size
int64 0
304k
| ext
stringclasses 1
value | lang
stringclasses 1
value | branch
stringclasses 1
value | content
stringlengths 0
304k
| avg_line_length
float64 0
238
| max_line_length
int64 0
304k
|
---|---|---|---|---|---|---|
4,738 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import _, models, SUPERUSER_ID
from odoo.exceptions import AccessError, MissingError, UserError
from odoo.http import request
from odoo.tools import consteq
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
def _check_attachments_access(self, attachment_tokens):
"""This method relies on access rules/rights and therefore it should not be called from a sudo env."""
self = self.sudo(False)
attachment_tokens = attachment_tokens or ([None] * len(self))
if len(attachment_tokens) != len(self):
raise UserError(_("An access token must be provided for each attachment."))
for attachment, access_token in zip(self, attachment_tokens):
try:
attachment_sudo = attachment.with_user(SUPERUSER_ID).exists()
if not attachment_sudo:
raise MissingError(_("The attachment %s does not exist.", attachment.id))
try:
attachment.check('write')
except AccessError:
if not access_token or not attachment_sudo.access_token or not consteq(attachment_sudo.access_token, access_token):
message_sudo = self.env['mail.message'].sudo().search([('attachment_ids', 'in', attachment_sudo.ids)], limit=1)
if not message_sudo or not message_sudo.is_current_user_or_guest_author:
raise
except (AccessError, MissingError):
raise UserError(_("The attachment %s does not exist or you do not have the rights to access it.", attachment.id))
def _post_add_create(self):
""" Overrides behaviour when the attachment is created through the controller
"""
super(IrAttachment, self)._post_add_create()
for record in self:
record.register_as_main_attachment(force=False)
def register_as_main_attachment(self, force=True):
""" Registers this attachment as the main one of the model it is
attached to.
"""
self.ensure_one()
if not self.res_model:
return
related_record = self.env[self.res_model].browse(self.res_id)
if not related_record.check_access_rights('write', raise_exception=False):
return
# message_main_attachment_id field can be empty, that's why we compare to False;
# we are just checking that it exists on the model before writing it
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if force or not related_record.message_main_attachment_id:
#Ignore AccessError, if you don't have access to modify the document
#Just don't set the value
try:
related_record.message_main_attachment_id = self
except AccessError:
pass
def _delete_and_notify(self):
for attachment in self:
if attachment.res_model == 'mail.channel' and attachment.res_id:
target = self.env['mail.channel'].browse(attachment.res_id)
else:
target = self.env.user.partner_id
self.env['bus.bus']._sendone(target, 'ir.attachment/delete', {
'id': attachment.id,
})
self.unlink()
def _attachment_format(self, commands=False):
safari = request and request.httprequest.user_agent and request.httprequest.user_agent.browser == 'safari'
res_list = []
for attachment in self:
res = {
'checksum': attachment.checksum,
'id': attachment.id,
'filename': attachment.name,
'name': attachment.name,
'mimetype': 'application/octet-stream' if safari and attachment.mimetype and 'video' in attachment.mimetype else attachment.mimetype,
}
if attachment.res_id and issubclass(self.pool[attachment.res_model], self.pool['mail.thread']):
main_attachment = self.env[attachment.res_model].sudo().browse(attachment.res_id).message_main_attachment_id
res['is_main'] = attachment == main_attachment
if commands:
res['originThread'] = [('insert', {
'id': attachment.res_id,
'model': attachment.res_model,
})]
else:
res.update({
'res_id': attachment.res_id,
'res_model': attachment.res_model,
})
res_list.append(res)
return res_list
| 48.346939 | 4,738 |
6,556 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class MailActivityType(models.Model):
""" Activity Types are used to categorize activities. Each type is a different
kind of activity e.g. call, mail, meeting. An activity can be generic i.e.
available for all models using activities; or specific to a model in which
case res_model field should be used. """
_name = 'mail.activity.type'
_description = 'Activity Type'
_rec_name = 'name'
_order = 'sequence, id'
def _get_model_selection(self):
return [
(model.model, model.name)
for model in self.env['ir.model'].sudo().search(
['&', ('is_mail_thread', '=', True), ('transient', '=', False)])
]
name = fields.Char('Name', required=True, translate=True)
summary = fields.Char('Default Summary', translate=True)
sequence = fields.Integer('Sequence', default=10)
active = fields.Boolean(default=True)
create_uid = fields.Many2one('res.users', index=True)
delay_count = fields.Integer(
'Schedule', default=0,
help='Number of days/week/month before executing the action. It allows to plan the action deadline.')
delay_unit = fields.Selection([
('days', 'days'),
('weeks', 'weeks'),
('months', 'months')], string="Delay units", help="Unit of delay", required=True, default='days')
delay_label = fields.Char(compute='_compute_delay_label')
delay_from = fields.Selection([
('current_date', 'after completion date'),
('previous_activity', 'after previous activity deadline')], string="Delay Type", help="Type of delay", required=True, default='previous_activity')
icon = fields.Char('Icon', help="Font awesome icon e.g. fa-tasks")
decoration_type = fields.Selection([
('warning', 'Alert'),
('danger', 'Error')], string="Decoration Type",
help="Change the background color of the related activities of this type.")
res_model = fields.Selection(selection=_get_model_selection, string="Model",
help='Specify a model if the activity should be specific to a model'
' and not available when managing activities for other models.')
triggered_next_type_id = fields.Many2one(
'mail.activity.type', string='Trigger', compute='_compute_triggered_next_type_id',
inverse='_inverse_triggered_next_type_id', store=True, readonly=False,
domain="['|', ('res_model', '=', False), ('res_model', '=', res_model)]", ondelete='restrict',
help="Automatically schedule this activity once the current one is marked as done.")
chaining_type = fields.Selection([
('suggest', 'Suggest Next Activity'), ('trigger', 'Trigger Next Activity')
], string="Chaining Type", required=True, default="suggest")
suggested_next_type_ids = fields.Many2many(
'mail.activity.type', 'mail_activity_rel', 'activity_id', 'recommended_id', string='Suggest',
domain="['|', ('res_model', '=', False), ('res_model', '=', res_model)]",
compute='_compute_suggested_next_type_ids', inverse='_inverse_suggested_next_type_ids', store=True, readonly=False,
help="Suggest these activities once the current one is marked as done.")
previous_type_ids = fields.Many2many(
'mail.activity.type', 'mail_activity_rel', 'recommended_id', 'activity_id',
domain="['|', ('res_model', '=', False), ('res_model', '=', res_model)]",
string='Preceding Activities')
category = fields.Selection([
('default', 'None'),
('upload_file', 'Upload Document'),
('phonecall', 'Phonecall')
], default='default', string='Action',
help='Actions may trigger specific behavior like opening calendar view or automatically mark as done when a document is uploaded')
mail_template_ids = fields.Many2many('mail.template', string='Email templates')
default_user_id = fields.Many2one("res.users", string="Default User")
default_note = fields.Html(string="Default Note", translate=True)
#Fields for display purpose only
initial_res_model = fields.Selection(selection=_get_model_selection, string='Initial model', compute="_compute_initial_res_model", store=False,
help='Technical field to keep track of the model at the start of editing to support UX related behaviour')
res_model_change = fields.Boolean(string="Model has change", help="Technical field for UX related behaviour", default=False, store=False)
@api.onchange('res_model')
def _onchange_res_model(self):
self.mail_template_ids = self.sudo().mail_template_ids.filtered(lambda template: template.model_id.model == self.res_model)
self.res_model_change = self.initial_res_model and self.initial_res_model != self.res_model
def _compute_initial_res_model(self):
for activity_type in self:
activity_type.initial_res_model = activity_type.res_model
@api.depends('delay_unit', 'delay_count')
def _compute_delay_label(self):
selection_description_values = {
e[0]: e[1] for e in self._fields['delay_unit']._description_selection(self.env)}
for activity_type in self:
unit = selection_description_values[activity_type.delay_unit]
activity_type.delay_label = '%s %s' % (activity_type.delay_count, unit)
@api.depends('chaining_type')
def _compute_suggested_next_type_ids(self):
"""suggested_next_type_ids and triggered_next_type_id should be mutually exclusive"""
for activity_type in self:
if activity_type.chaining_type == 'trigger':
activity_type.suggested_next_type_ids = False
def _inverse_suggested_next_type_ids(self):
for activity_type in self:
if activity_type.suggested_next_type_ids:
activity_type.chaining_type = 'suggest'
@api.depends('chaining_type')
def _compute_triggered_next_type_id(self):
"""suggested_next_type_ids and triggered_next_type_id should be mutually exclusive"""
for activity_type in self:
if activity_type.chaining_type == 'suggest':
activity_type.triggered_next_type_id = False
def _inverse_triggered_next_type_id(self):
for activity_type in self:
if activity_type.triggered_next_type_id:
activity_type.chaining_type = 'trigger'
else:
activity_type.chaining_type = 'suggest'
| 54.181818 | 6,556 |
3,569 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResUsersSettings(models.Model):
_name = 'res.users.settings'
_description = 'User Settings'
user_id = fields.Many2one('res.users', string="User", required=True, readonly=True, ondelete='cascade')
is_discuss_sidebar_category_channel_open = fields.Boolean(string="Is discuss sidebar category channel open?", default=True)
is_discuss_sidebar_category_chat_open = fields.Boolean(string="Is discuss sidebar category chat open?", default=True)
# RTC
push_to_talk_key = fields.Char(string="Push-To-Talk shortcut", help="String formatted to represent a key with modifiers following this pattern: shift.ctrl.alt.key, e.g: truthy.1.true.b")
use_push_to_talk = fields.Boolean(string="Use the push to talk feature", default=False)
voice_active_duration = fields.Integer(string="Duration of voice activity in ms", help="How long the audio broadcast will remain active after passing the volume threshold")
volume_settings_ids = fields.One2many('res.users.settings.volumes', 'user_setting_id', string="Volumes of other partners")
_sql_constraints = [
('unique_user_id', 'UNIQUE(user_id)', 'One user should only have one mail user settings.')
]
@api.model
def _find_or_create_for_user(self, user):
settings = user.sudo().res_users_settings_ids
if not settings:
settings = self.sudo().create({'user_id': user.id})
return settings
def _res_users_settings_format(self):
self.ensure_one()
res = self._read_format(fnames=[name for name, field in self._fields.items() if name == 'id' or not field.automatic])[0]
res.pop('volume_settings_ids')
volume_settings = self.volume_settings_ids._discuss_users_settings_volume_format()
res.update({
'volume_settings': [('insert', volume_settings)] if volume_settings else [],
})
return res
def set_res_users_settings(self, new_settings):
self.ensure_one()
changed_settings = {}
for setting in new_settings.keys():
if setting in self._fields and new_settings[setting] != self[setting]:
changed_settings[setting] = new_settings[setting]
self.write(changed_settings)
self.env['bus.bus']._sendone(self.user_id.partner_id, 'res.users.settings/changed', changed_settings)
def set_volume_setting(self, partner_id, volume, guest_id=None):
"""
Saves the volume of a guest or a partner.
Either partner_id or guest_id must be specified.
:param float volume: the selected volume between 0 and 1
:param int partner_id:
:param int guest_id:
"""
self.ensure_one()
volume_setting = self.env['res.users.settings.volumes'].search([
('user_setting_id', '=', self.id), ('partner_id', '=', partner_id), ('guest_id', '=', guest_id)
])
if volume_setting:
volume_setting.volume = volume
else:
volume_setting = self.env['res.users.settings.volumes'].create({
'user_setting_id': self.id,
'volume': volume,
'partner_id': partner_id,
'guest_id': guest_id,
})
self.env['bus.bus']._sendone(self.user_id.partner_id, 'res.users.settings/volumes_update', {
'volumeSettings': [('insert', volume_setting._discuss_users_settings_volume_format())],
})
| 48.22973 | 3,569 |
5,407 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, tools
class MailMessageSubtype(models.Model):
""" Class holding subtype definition for messages. Subtypes allow to tune
the follower subscription, allowing only some subtypes to be pushed
on the Wall. """
_name = 'mail.message.subtype'
_description = 'Message subtypes'
_order = 'sequence, id'
name = fields.Char(
'Message Type', required=True, translate=True,
help='Message subtype gives a more precise type on the message, '
'especially for system notifications. For example, it can be '
'a notification related to a new record (New), or to a stage '
'change in a process (Stage change). Message subtypes allow to '
'precisely tune the notifications the user want to receive on its wall.')
description = fields.Text(
'Description', translate=True,
help='Description that will be added in the message posted for this '
'subtype. If void, the name will be added instead.')
internal = fields.Boolean(
'Internal Only',
help='Messages with internal subtypes will be visible only by employees, aka members of base_user group')
parent_id = fields.Many2one(
'mail.message.subtype', string='Parent', ondelete='set null',
help='Parent subtype, used for automatic subscription. This field is not '
'correctly named. For example on a project, the parent_id of project '
'subtypes refers to task-related subtypes.')
relation_field = fields.Char(
'Relation field',
help='Field used to link the related model to the subtype model when '
'using automatic subscription on a related document. The field '
'is used to compute getattr(related_document.relation_field).')
res_model = fields.Char('Model', help="Model the subtype applies to. If False, this subtype applies to all models.")
default = fields.Boolean('Default', default=True, help="Activated by default when subscribing.")
sequence = fields.Integer('Sequence', default=1, help="Used to order subtypes.")
hidden = fields.Boolean('Hidden', help="Hide the subtype in the follower options")
@api.model_create_multi
def create(self, vals_list):
self.clear_caches()
return super(MailMessageSubtype, self).create(vals_list)
def write(self, vals):
self.clear_caches()
return super(MailMessageSubtype, self).write(vals)
def unlink(self):
self.clear_caches()
return super(MailMessageSubtype, self).unlink()
@tools.ormcache('model_name')
def _get_auto_subscription_subtypes(self, model_name):
""" Return data related to auto subscription based on subtype matching.
Here model_name indicates child model (like a task) on which we want to
make subtype matching based on its parents (like a project).
Example with tasks and project :
* generic: discussion, res_model = False
* task: new, res_model = project.task
* project: task_new, parent_id = new, res_model = project.project, field = project_id
Returned data
* child_ids: all subtypes that are generic or related to task (res_model = False or model_name)
* def_ids: default subtypes ids (either generic or task specific)
* all_int_ids: all internal-only subtypes ids (generic or task or project)
* parent: dict(parent subtype id, child subtype id), i.e. {task_new.id: new.id}
* relation: dict(parent_model, relation_fields), i.e. {'project.project': ['project_id']}
"""
child_ids, def_ids = list(), list()
all_int_ids = list()
parent, relation = dict(), dict()
subtypes = self.sudo().search([
'|', '|', ('res_model', '=', False),
('res_model', '=', model_name),
('parent_id.res_model', '=', model_name)
])
for subtype in subtypes:
if not subtype.res_model or subtype.res_model == model_name:
child_ids += subtype.ids
if subtype.default:
def_ids += subtype.ids
elif subtype.relation_field:
parent[subtype.id] = subtype.parent_id.id
relation.setdefault(subtype.res_model, set()).add(subtype.relation_field)
# required for backward compatibility
if subtype.internal:
all_int_ids += subtype.ids
return child_ids, def_ids, all_int_ids, parent, relation
@api.model
def default_subtypes(self, model_name):
""" Retrieve the default subtypes (all, internal, external) for the given model. """
subtype_ids, internal_ids, external_ids = self._default_subtypes(model_name)
return self.browse(subtype_ids), self.browse(internal_ids), self.browse(external_ids)
@tools.ormcache('self.env.uid', 'self.env.su', 'model_name')
def _default_subtypes(self, model_name):
domain = [('default', '=', True),
'|', ('res_model', '=', model_name), ('res_model', '=', False)]
subtypes = self.search(domain)
internal = subtypes.filtered('internal')
return subtypes.ids, internal.ids, (subtypes - internal).ids
| 49.154545 | 5,407 |
16,481 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import logging
from odoo import _, api, fields, models, tools, Command
from odoo.exceptions import UserError
_logger = logging.getLogger(__name__)
class MailTemplate(models.Model):
"Templates for sending email"
_name = "mail.template"
_inherit = ['mail.render.mixin']
_description = 'Email Templates'
_order = 'name'
_unrestricted_rendering = True
@api.model
def default_get(self, fields):
res = super(MailTemplate, self).default_get(fields)
if res.get('model'):
res['model_id'] = self.env['ir.model']._get(res.pop('model')).id
return res
# description
name = fields.Char('Name', translate=True)
model_id = fields.Many2one('ir.model', 'Applies to', help="The type of document this template can be used with")
model = fields.Char('Related Document Model', related='model_id.model', index=True, store=True, readonly=True)
subject = fields.Char('Subject', translate=True, help="Subject (placeholders may be used here)")
email_from = fields.Char('From',
help="Sender address (placeholders may be used here). If not set, the default "
"value will be the author's email alias if configured, or email address.")
# recipients
use_default_to = fields.Boolean(
'Default recipients',
help="Default recipients of the record:\n"
"- partner (using id on a partner or the partner_id field) OR\n"
"- email (using email_from or email field)")
email_to = fields.Char('To (Emails)', help="Comma-separated recipient addresses (placeholders may be used here)")
partner_to = fields.Char('To (Partners)',
help="Comma-separated ids of recipient partners (placeholders may be used here)")
email_cc = fields.Char('Cc', help="Carbon copy recipients (placeholders may be used here)")
reply_to = fields.Char('Reply To', help="Email address to which replies will be redirected when sending emails in mass; only used when the reply is not logged in the original discussion thread.")
# content
body_html = fields.Html('Body', render_engine='qweb', translate=True, sanitize=False)
attachment_ids = fields.Many2many('ir.attachment', 'email_template_attachment_rel', 'email_template_id',
'attachment_id', 'Attachments',
help="You may attach files to this template, to be added to all "
"emails created from this template")
report_name = fields.Char('Report Filename', translate=True,
help="Name to use for the generated report file (may contain placeholders)\n"
"The extension can be omitted and will then come from the report type.")
report_template = fields.Many2one('ir.actions.report', 'Optional report to print and attach')
# options
mail_server_id = fields.Many2one('ir.mail_server', 'Outgoing Mail Server', readonly=False,
help="Optional preferred server for outgoing mails. If not set, the highest "
"priority one will be used.")
scheduled_date = fields.Char('Scheduled Date', help="If set, the queue manager will send the email after the date. If not set, the email will be send as soon as possible. You can use dynamic expressions expression.")
auto_delete = fields.Boolean(
'Auto Delete', default=True,
help="This option permanently removes any track of email after it's been sent, including from the Technical menu in the Settings, in order to preserve storage space of your Odoo database.")
# contextual action
ref_ir_act_window = fields.Many2one('ir.actions.act_window', 'Sidebar action', readonly=True, copy=False,
help="Sidebar action to make this template available on records "
"of the related document model")
# access
can_write = fields.Boolean(compute='_compute_can_write',
help='The current user can edit the template.')
# Overrides of mail.render.mixin
@api.depends('model')
def _compute_render_model(self):
for template in self:
template.render_model = template.model
@api.depends_context('uid')
def _compute_can_write(self):
writable_templates = self._filter_access_rules('write')
for template in self:
template.can_write = template in writable_templates
# ------------------------------------------------------------
# CRUD
# ------------------------------------------------------------
def _fix_attachment_ownership(self):
for record in self:
record.attachment_ids.write({'res_model': record._name, 'res_id': record.id})
return self
@api.model_create_multi
def create(self, values_list):
return super().create(values_list)\
._fix_attachment_ownership()
def write(self, vals):
super().write(vals)
self._fix_attachment_ownership()
return True
def unlink(self):
self.unlink_action()
return super(MailTemplate, self).unlink()
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
default = dict(default or {},
name=_("%s (copy)", self.name))
return super(MailTemplate, self).copy(default=default)
def unlink_action(self):
for template in self:
if template.ref_ir_act_window:
template.ref_ir_act_window.unlink()
return True
def create_action(self):
ActWindow = self.env['ir.actions.act_window']
view = self.env.ref('mail.email_compose_message_wizard_form')
for template in self:
button_name = _('Send Mail (%s)', template.name)
action = ActWindow.create({
'name': button_name,
'type': 'ir.actions.act_window',
'res_model': 'mail.compose.message',
'context': "{'default_composition_mode': 'mass_mail', 'default_template_id' : %d, 'default_use_template': True}" % (template.id),
'view_mode': 'form,tree',
'view_id': view.id,
'target': 'new',
'binding_model_id': template.model_id.id,
})
template.write({'ref_ir_act_window': action.id})
return True
# ------------------------------------------------------------
# MESSAGE/EMAIL VALUES GENERATION
# ------------------------------------------------------------
def generate_recipients(self, results, res_ids):
"""Generates the recipients of the template. Default values can ben generated
instead of the template values if requested by template or context.
Emails (email_to, email_cc) can be transformed into partners if requested
in the context. """
self.ensure_one()
if self.use_default_to or self._context.get('tpl_force_default_to'):
records = self.env[self.model].browse(res_ids).sudo()
default_recipients = records._message_get_default_recipients()
for res_id, recipients in default_recipients.items():
results[res_id].pop('partner_to', None)
results[res_id].update(recipients)
records_company = None
if self._context.get('tpl_partners_only') and self.model and results and 'company_id' in self.env[self.model]._fields:
records = self.env[self.model].browse(results.keys()).read(['company_id'])
records_company = {rec['id']: (rec['company_id'][0] if rec['company_id'] else None) for rec in records}
for res_id, values in results.items():
partner_ids = values.get('partner_ids', list())
if self._context.get('tpl_partners_only'):
mails = tools.email_split(values.pop('email_to', '')) + tools.email_split(values.pop('email_cc', ''))
Partner = self.env['res.partner']
if records_company:
Partner = Partner.with_context(default_company_id=records_company[res_id])
for mail in mails:
partner = Partner.find_or_create(mail)
partner_ids.append(partner.id)
partner_to = values.pop('partner_to', '')
if partner_to:
# placeholders could generate '', 3, 2 due to some empty field values
tpl_partner_ids = [int(pid) for pid in partner_to.split(',') if pid]
partner_ids += self.env['res.partner'].sudo().browse(tpl_partner_ids).exists().ids
results[res_id]['partner_ids'] = partner_ids
return results
def generate_email(self, res_ids, fields):
"""Generates an email from the template for given the given model based on
records given by res_ids.
:param res_id: id of the record to use for rendering the template (model
is taken from template definition)
:returns: a dict containing all relevant fields for creating a new
mail.mail entry, with one extra key ``attachments``, in the
format [(report_name, data)] where data is base64 encoded.
"""
self.ensure_one()
multi_mode = True
if isinstance(res_ids, int):
res_ids = [res_ids]
multi_mode = False
results = dict()
for lang, (template, template_res_ids) in self._classify_per_lang(res_ids).items():
for field in fields:
generated_field_values = template._render_field(
field, template_res_ids,
options={'render_safe': field == 'subject'},
post_process=(field == 'body_html')
)
for res_id, field_value in generated_field_values.items():
results.setdefault(res_id, dict())[field] = field_value
# compute recipients
if any(field in fields for field in ['email_to', 'partner_to', 'email_cc']):
results = template.generate_recipients(results, template_res_ids)
# update values for all res_ids
for res_id in template_res_ids:
values = results[res_id]
if values.get('body_html'):
values['body'] = tools.html_sanitize(values['body_html'])
# technical settings
values.update(
mail_server_id=template.mail_server_id.id or False,
auto_delete=template.auto_delete,
model=template.model,
res_id=res_id or False,
attachment_ids=[attach.id for attach in template.attachment_ids],
)
# Add report in attachments: generate once for all template_res_ids
if template.report_template:
for res_id in template_res_ids:
attachments = []
report_name = template._render_field('report_name', [res_id])[res_id]
report = template.report_template
report_service = report.report_name
if report.report_type in ['qweb-html', 'qweb-pdf']:
result, format = report._render_qweb_pdf([res_id])
else:
res = report._render([res_id])
if not res:
raise UserError(_('Unsupported report type %s found.', report.report_type))
result, format = res
# TODO in trunk, change return format to binary to match message_post expected format
result = base64.b64encode(result)
if not report_name:
report_name = 'report.' + report_service
ext = "." + format
if not report_name.endswith(ext):
report_name += ext
attachments.append((report_name, result))
results[res_id]['attachments'] = attachments
return multi_mode and results or results[res_ids[0]]
# ------------------------------------------------------------
# EMAIL
# ------------------------------------------------------------
def _send_check_access(self, res_ids):
records = self.env[self.model].browse(res_ids)
records.check_access_rights('read')
records.check_access_rule('read')
def send_mail(self, res_id, force_send=False, raise_exception=False, email_values=None, notif_layout=False):
""" Generates a new mail.mail. Template is rendered on record given by
res_id and model coming from template.
:param int res_id: id of the record to render the template
:param bool force_send: send email immediately; otherwise use the mail
queue (recommended);
:param dict email_values: update generated mail with those values to further
customize the mail;
:param str notif_layout: optional notification layout to encapsulate the
generated email;
:returns: id of the mail.mail that was created """
# Grant access to send_mail only if access to related document
self.ensure_one()
self._send_check_access([res_id])
Attachment = self.env['ir.attachment'] # TDE FIXME: should remove default_type from context
# create a mail_mail based on values, without attachments
values = self.generate_email(res_id, ['subject', 'body_html', 'email_from', 'email_to', 'partner_to', 'email_cc', 'reply_to', 'scheduled_date'])
values['recipient_ids'] = [Command.link(pid) for pid in values.get('partner_ids', list())]
values['attachment_ids'] = [Command.link(aid) for aid in values.get('attachment_ids', list())]
values.update(email_values or {})
attachment_ids = values.pop('attachment_ids', [])
attachments = values.pop('attachments', [])
# add a protection against void email_from
if 'email_from' in values and not values.get('email_from'):
values.pop('email_from')
# encapsulate body
if notif_layout and values['body_html']:
try:
template = self.env.ref(notif_layout, raise_if_not_found=True)
except ValueError:
_logger.warning('QWeb template %s not found when sending template %s. Sending without layouting.' % (notif_layout, self.name))
else:
record = self.env[self.model].browse(res_id)
model = self.env['ir.model']._get(record._name)
if self.lang:
lang = self._render_lang([res_id])[res_id]
template = template.with_context(lang=lang)
model = model.with_context(lang=lang)
template_ctx = {
'message': self.env['mail.message'].sudo().new(dict(body=values['body_html'], record_name=record.display_name)),
'model_description': model.display_name,
'company': 'company_id' in record and record['company_id'] or self.env.company,
'record': record,
}
body = template._render(template_ctx, engine='ir.qweb', minimal_qcontext=True)
values['body_html'] = self.env['mail.render.mixin']._replace_local_links(body)
mail = self.env['mail.mail'].sudo().create(values)
# manage attachments
for attachment in attachments:
attachment_data = {
'name': attachment[0],
'datas': attachment[1],
'type': 'binary',
'res_model': 'mail.message',
'res_id': mail.mail_message_id.id,
}
attachment_ids.append((4, Attachment.create(attachment_data).id))
if attachment_ids:
mail.write({'attachment_ids': attachment_ids})
if force_send:
mail.send(raise_exception=raise_exception)
return mail.id # TDE CLEANME: return mail + api.returns ?
| 49.492492 | 16,481 |
1,006 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class MailShortcode(models.Model):
""" Shortcode
Canned Responses, allowing the user to defined shortcuts in its message. Should be applied before storing message in database.
Emoji allowing replacing text with image for visual effect. Should be applied when the message is displayed (only for final rendering).
These shortcodes are global and are available for every user.
"""
_name = 'mail.shortcode'
_description = 'Canned Response / Shortcode'
source = fields.Char('Shortcut', required=True, index=True, help="The shortcut which must be replaced in the Chat Messages")
substitution = fields.Text('Substitution', required=True, index=True, help="The escaped html code replacing the shortcut")
description = fields.Char('Description')
message_ids = fields.Many2one('mail.message', string="Messages", store=False)
| 52.947368 | 1,006 |
584 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo import fields, models
class BusPresence(models.Model):
_inherit = ['bus.presence']
guest_id = fields.Many2one('mail.guest', 'Guest', ondelete='cascade')
def init(self):
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS bus_presence_guest_unique ON %s (guest_id) WHERE guest_id IS NOT NULL" % self._table)
_sql_constraints = [
("partner_or_guest_exists", "CHECK((user_id IS NOT NULL AND guest_id IS NULL) OR (user_id IS NULL AND guest_id IS NOT NULL))", "A bus presence must have a user or a guest."),
]
| 36.5 | 584 |
28,692 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import babel
import copy
import logging
import re
from lxml import html
from markupsafe import Markup
from werkzeug import urls
from odoo import _, api, fields, models, tools
from odoo.addons.base.models.qweb import QWebCodeFound
from odoo.exceptions import UserError, AccessError
from odoo.tools import is_html_empty, safe_eval
from odoo.tools.rendering_tools import convert_inline_template_to_qweb, parse_inline_template, render_inline_template, template_env_globals
_logger = logging.getLogger(__name__)
def format_date(env, date, pattern=False, lang_code=False):
try:
return tools.format_date(env, date, date_format=pattern, lang_code=lang_code)
except babel.core.UnknownLocaleError:
return date
def format_datetime(env, dt, tz=False, dt_format='medium', lang_code=False):
try:
return tools.format_datetime(env, dt, tz=tz, dt_format=dt_format, lang_code=lang_code)
except babel.core.UnknownLocaleError:
return dt
def format_time(env, time, tz=False, time_format='medium', lang_code=False):
try:
return tools.format_time(env, time, tz=tz, time_format=time_format, lang_code=lang_code)
except babel.core.UnknownLocaleError:
return time
class MailRenderMixin(models.AbstractModel):
_name = 'mail.render.mixin'
_description = 'Mail Render Mixin'
# If True, we trust the value on the model for rendering
# If False, we need the group "Template Editor" to render the model fields
_unrestricted_rendering = False
# language for rendering
lang = fields.Char(
'Language',
help="Optional translation language (ISO code) to select when sending out an email. "
"If not set, the english version will be used. This should usually be a placeholder expression "
"that provides the appropriate language, e.g. {{ object.partner_id.lang }}.")
# rendering context
render_model = fields.Char("Rendering Model", compute='_compute_render_model', store=False)
# expression builder
model_object_field = fields.Many2one(
'ir.model.fields', string="Field", store=False,
help="Select target field from the related document model.\n"
"If it is a relationship field you will be able to select "
"a target field at the destination of the relationship.")
sub_object = fields.Many2one(
'ir.model', 'Sub-model', readonly=True, store=False,
help="When a relationship field is selected as first field, "
"this field shows the document model the relationship goes to.")
sub_model_object_field = fields.Many2one(
'ir.model.fields', 'Sub-field', store=False,
help="When a relationship field is selected as first field, "
"this field lets you select the target field within the "
"destination document model (sub-model).")
null_value = fields.Char('Default Value', store=False, help="Optional value to use if the target field is empty")
copyvalue = fields.Char(
'Placeholder Expression', store=False,
help="Final placeholder expression, to be copy-pasted in the desired template field.")
def _compute_render_model(self):
""" Give the target model for rendering. Void by default as models
inheriting from ``mail.render.mixin`` should define how to find this
model. """
self.render_model = False
@api.onchange('model_object_field', 'sub_model_object_field', 'null_value')
def _onchange_dynamic_placeholder(self):
""" Generate the dynamic placeholder """
if self.model_object_field:
if self.model_object_field.ttype in ['many2one', 'one2many', 'many2many']:
model = self.env['ir.model']._get(self.model_object_field.relation)
if model:
self.sub_object = model.id
sub_field_name = self.sub_model_object_field.name
self.copyvalue = self._build_expression(self.model_object_field.name,
sub_field_name, self.null_value or False)
else:
self.sub_object = False
self.sub_model_object_field = False
self.copyvalue = self._build_expression(self.model_object_field.name, False, self.null_value or False)
else:
self.sub_object = False
self.copyvalue = False
self.sub_model_object_field = False
self.null_value = False
@api.model
def _build_expression(self, field_name, sub_field_name, null_value):
"""Returns a placeholder expression for use in a template field,
based on the values provided in the placeholder assistant.
:param field_name: main field name
:param sub_field_name: sub field name (M2O)
:param null_value: default value if the target value is empty
:return: final placeholder expression """
expression = ''
if field_name:
expression = "{{ object." + field_name
if sub_field_name:
expression += "." + sub_field_name
if null_value:
expression += " or '''%s'''" % null_value
expression += " }}"
return expression
# ------------------------------------------------------------
# ORM
# ------------------------------------------------------------
def _valid_field_parameter(self, field, name):
# allow specifying rendering options directly from field when using the render mixin
return name in ['render_engine', 'render_options'] or super()._valid_field_parameter(field, name)
@api.model_create_multi
def create(self, values_list):
record = super().create(values_list)
if self._unrestricted_rendering:
# If the rendering is unrestricted (e.g. mail.template),
# check the user is part of the mail editor group to create a new template if the template is dynamic
record._check_access_right_dynamic_template()
return record
def write(self, vals):
super().write(vals)
if self._unrestricted_rendering:
# If the rendering is unrestricted (e.g. mail.template),
# check the user is part of the mail editor group to modify a template if the template is dynamic
self._check_access_right_dynamic_template()
return True
# ------------------------------------------------------------
# TOOLS
# ------------------------------------------------------------
def _replace_local_links(self, html, base_url=None):
""" Replace local links by absolute links. It is required in various
cases, for example when sending emails on chatter or sending mass
mailings. It replaces
* href of links (mailto will not match the regex)
* src of images (base64 hardcoded data will not match the regex)
* styling using url like background-image: url
It is done using regex because it is shorten than using an html parser
to create a potentially complex soupe and hope to have a result that
has not been harmed.
"""
if not html:
return html
wrapper = Markup if isinstance(html, Markup) else str
html = tools.ustr(html)
if isinstance(html, Markup):
wrapper = Markup
def _sub_relative2absolute(match):
# compute here to do it only if really necessary + cache will ensure it is done only once
# if not base_url
if not _sub_relative2absolute.base_url:
_sub_relative2absolute.base_url = self.env["ir.config_parameter"].sudo().get_param("web.base.url")
return match.group(1) + urls.url_join(_sub_relative2absolute.base_url, match.group(2))
_sub_relative2absolute.base_url = base_url
html = re.sub(r"""(<img(?=\s)[^>]*\ssrc=")(/[^/][^"]+)""", _sub_relative2absolute, html)
html = re.sub(r"""(<a(?=\s)[^>]*\shref=")(/[^/][^"]+)""", _sub_relative2absolute, html)
html = re.sub(re.compile(
r"""( # Group 1: element up to url in style
<[^>]+\bstyle=" # Element with a style attribute
[^"]+\burl\( # Style attribute contains "url(" style
(?:&\#34;|'|")?) # url style may start with (escaped) quote: capture it
( # Group 2: url itself
/(?:[^'")]|(?!&\#34;))+ # stop at the first closing quote
)""", re.VERBOSE), _sub_relative2absolute, html)
return wrapper(html)
@api.model
def _render_encapsulate(self, layout_xmlid, html, add_context=None, context_record=None):
try:
template = self.env.ref(layout_xmlid, raise_if_not_found=True)
except ValueError:
_logger.warning('QWeb template %s not found when rendering encapsulation template.' % (layout_xmlid))
else:
record_name = context_record.display_name if context_record else ''
model_description = self.env['ir.model']._get(context_record._name).display_name if context_record else False
template_ctx = {
'body': html,
'record_name': record_name,
'model_description': model_description,
'company': context_record['company_id'] if (context_record and 'company_id' in context_record) else self.env.company,
'record': context_record,
}
if add_context:
template_ctx.update(**add_context)
html = template._render(template_ctx, engine='ir.qweb', minimal_qcontext=True)
html = self.env['mail.render.mixin']._replace_local_links(html)
return html
@api.model
def _prepend_preview(self, html, preview):
""" Prepare the email body before sending. Add the text preview at the
beginning of the mail. The preview text is displayed bellow the mail
subject of most mail client (gmail, outlook...).
:param html: html content for which we want to prepend a preview
:param preview: the preview to add before the html content
:return: html with preprended preview
"""
if preview:
preview = preview.strip()
preview_markup = convert_inline_template_to_qweb(preview)
if preview:
html_preview = Markup("""
<div style="display:none;font-size:1px;height:0px;width:0px;opacity:0;">
{}
</div>
""").format(preview_markup)
return tools.prepend_html_content(html, html_preview)
return html
# ------------------------------------------------------------
# SECURITY
# ------------------------------------------------------------
def _is_dynamic(self):
for template in self.sudo():
for fname, field in template._fields.items():
engine = getattr(field, 'render_engine', 'inline_template')
if engine in ('qweb', 'qweb_view'):
if self._is_dynamic_template_qweb(template[fname]):
return True
else:
if self._is_dynamic_template_inline_template(template[fname]):
return True
return False
@api.model
def _is_dynamic_template_qweb(self, template_src):
if template_src:
try:
node = html.fragment_fromstring(template_src, create_parent='div')
self.env["ir.qweb"]._compile(node, options={'raise_on_code': True})
except QWebCodeFound:
return True
return False
@api.model
def _is_dynamic_template_inline_template(self, template_txt):
if template_txt:
template_instructions = parse_inline_template(str(template_txt))
if len(template_instructions) > 1 or template_instructions[0][1]:
return True
return False
def _check_access_right_dynamic_template(self):
if not self.env.su and not self.env.user.has_group('mail.group_mail_template_editor') and self._is_dynamic():
group = self.env.ref('mail.group_mail_template_editor')
raise AccessError(_('Only users belonging to the "%s" group can modify dynamic templates.', group.name))
# ------------------------------------------------------------
# RENDERING
# ------------------------------------------------------------
@api.model
def _render_eval_context(self):
""" Evaluation context used in all rendering engines. Contains
* ``user``: current user browse record;
* ``ctx```: current context;
* various formatting tools;
"""
render_context = {
'format_date': lambda date, date_format=False, lang_code=False: format_date(self.env, date, date_format, lang_code),
'format_datetime': lambda dt, tz=False, dt_format=False, lang_code=False: format_datetime(self.env, dt, tz, dt_format, lang_code),
'format_time': lambda time, tz=False, time_format=False, lang_code=False: format_time(self.env, time, tz, time_format, lang_code),
'format_amount': lambda amount, currency, lang_code=False: tools.format_amount(self.env, amount, currency, lang_code),
'format_duration': lambda value: tools.format_duration(value),
'user': self.env.user,
'ctx': self._context,
'is_html_empty': is_html_empty,
}
render_context.update(copy.copy(template_env_globals))
return render_context
@api.model
def _render_template_qweb(self, template_src, model, res_ids,
add_context=None, options=None):
""" Render a raw QWeb template.
:param str template_src: raw QWeb template to render;
:param str model: see ``MailRenderMixin._render_template()``;
:param list res_ids: see ``MailRenderMixin._render_template()``;
:param dict add_context: additional context to give to renderer. It
allows to add or update values to base rendering context generated
by ``MailRenderMixin._render_eval_context()``;
:param dict options: options for rendering (not used currently);
:return dict: {res_id: string of rendered template based on record}
:notice: Experimental. Use at your own risks only.
"""
results = dict.fromkeys(res_ids, u"")
if not template_src:
return results
# prepare template variables
variables = self._render_eval_context()
if add_context:
variables.update(**add_context)
is_restricted = not self._unrestricted_rendering and not self.env.is_admin() and not self.env.user.has_group('mail.group_mail_template_editor')
for record in self.env[model].browse(res_ids):
variables['object'] = record
try:
render_result = self.env['ir.qweb']._render(
html.fragment_fromstring(template_src, create_parent='div'),
variables,
raise_on_code=is_restricted,
)
# remove the rendered tag <div> that was added in order to wrap potentially multiples nodes into one.
render_result = render_result[5:-6]
except QWebCodeFound:
group = self.env.ref('mail.group_mail_template_editor')
raise AccessError(_('Only users belonging to the "%s" group can modify dynamic templates.', group.name))
except Exception as e:
_logger.info("Failed to render template : %s", template_src, exc_info=True)
raise UserError(_("Failed to render QWeb template : %s)", e))
results[record.id] = render_result
return results
@api.model
def _render_template_qweb_view(self, template_src, model, res_ids,
add_context=None, options=None):
""" Render a QWeb template based on an ir.ui.view content.
In addition to the generic evaluation context available, some other
variables are added:
* ``object``: record based on which the template is rendered;
:param str template_src: source QWeb template. It should be a string
XmlID allowing to fetch an ``ir.ui.view``;
:param str model: see ``MailRenderMixin._render_template()``;
:param list res_ids: see ``MailRenderMixin._render_template()``;
:param dict add_context: additional context to give to renderer. It
allows to add or update values to base rendering context generated
by ``MailRenderMixin._render_eval_context()``;
:param dict options: options for rendering (not used currently);
:return dict: {res_id: string of rendered template based on record}
"""
# prevent wrong values (rendering on a void record set, ...)
if any(r is None for r in res_ids):
raise ValueError(_('Template rendering should be called on a valid record IDs.'))
view = self.env.ref(template_src, raise_if_not_found=False) or self.env['ir.ui.view']
results = dict.fromkeys(res_ids, u"")
if not view:
return results
# prepare template variables
variables = self._render_eval_context()
if add_context:
variables.update(**add_context)
safe_eval.check_values(variables)
for record in self.env[model].browse(res_ids):
variables['object'] = record
try:
render_result = view._render(variables, engine='ir.qweb', minimal_qcontext=True)
except Exception as e:
_logger.info("Failed to render template : %s (%d)", template_src, view.id, exc_info=True)
raise UserError(_("Failed to render template : %(xml_id)s (%(view_id)d)",
xml_id=template_src,
view_id=view.id))
results[record.id] = render_result
return results
@api.model
def _render_template_inline_template(self, template_txt, model, res_ids,
add_context=None, options=None):
""" Render a string-based template on records given by a model and a list
of IDs, using inline_template.
In addition to the generic evaluation context available, some other
variables are added:
* ``object``: record based on which the template is rendered;
:param str template_txt: template text to render
:param str model: see ``MailRenderMixin._render_template()``;
:param list res_ids: see ``MailRenderMixin._render_template()``;
:param dict add_context: additional context to give to renderer. It
allows to add or update values to base rendering context generated
by ``MailRenderMixin._render_inline_template_eval_context()``;
:param dict options: options for rendering;
:return dict: {res_id: string of rendered template based on record}
"""
# prevent wrong values (rendering on a void record set, ...)
if any(r is None for r in res_ids):
raise ValueError(_('Template rendering should be called on a valid record IDs.'))
results = dict.fromkeys(res_ids, u"")
if not template_txt:
return results
template_instructions = parse_inline_template(str(template_txt))
is_dynamic = len(template_instructions) > 1 or template_instructions[0][1]
if (not self._unrestricted_rendering and is_dynamic and not self.env.is_admin() and
not self.env.user.has_group('mail.group_mail_template_editor')):
group = self.env.ref('mail.group_mail_template_editor')
raise AccessError(_('Only users belonging to the "%s" group can modify dynamic templates.', group.name))
if not is_dynamic:
# Either the content is a raw text without placeholders, either we fail to
# detect placeholders code. In both case we skip the rendering and return
# the raw content, so even if we failed to detect dynamic code,
# non "mail_template_editor" users will not gain rendering tools available
# only for template specific group users
return {record_id: template_instructions[0][0] for record_id in res_ids}
# prepare template variables
variables = self._render_eval_context()
if add_context:
variables.update(**add_context)
for record in self.env[model].browse(res_ids):
variables['object'] = record
try:
results[record.id] = render_inline_template(template_instructions, variables)
except Exception as e:
_logger.info("Failed to render inline_template: \n%s", str(template_txt), exc_info=True)
raise UserError(_("Failed to render inline_template template : %s)", e))
return results
@api.model
def _render_template_postprocess(self, rendered):
""" Tool method for post processing. In this method we ensure local
links ('/shop/Basil-1') are replaced by global links ('https://www.
mygarden.com/shop/Basil-1').
:param rendered: result of ``_render_template``;
:return dict: updated version of rendered per record ID;
"""
for res_id, rendered_html in rendered.items():
rendered[res_id] = self._replace_local_links(rendered_html)
return rendered
@api.model
def _render_template(self, template_src, model, res_ids, engine='inline_template',
add_context=None, options=None, post_process=False):
""" Render the given string on records designed by model / res_ids using
the given rendering engine. Possible engine are small_web, qweb, or
qweb_view.
:param str template_src: template text to render or xml id of a qweb view;
:param str model: model name of records on which we want to perform
rendering (aka 'crm.lead');
:param list res_ids: list of ids of records. All should belong to the
Odoo model given by model;
:param string engine: inline_template, qweb or qweb_view;
:param dict add_context: additional context to give to renderer. It
allows to add or update values to base rendering context generated
by ``MailRenderMixin._render_<engine>_eval_context()``;
:param dict options: options for rendering;
:param boolean post_process: perform a post processing on rendered result
(notably html links management). See``_render_template_postprocess``;
:return dict: {res_id: string of rendered template based on record}
"""
if not isinstance(res_ids, (list, tuple)):
raise ValueError(_('Template rendering should be called only using on a list of IDs.'))
if engine not in ('inline_template', 'qweb', 'qweb_view'):
raise ValueError(_('Template rendering supports only inline_template, qweb, or qweb_view (view or raw).'))
if engine == 'qweb_view':
rendered = self._render_template_qweb_view(template_src, model, res_ids,
add_context=add_context, options=options)
elif engine == 'qweb':
rendered = self._render_template_qweb(template_src, model, res_ids,
add_context=add_context, options=options)
else:
rendered = self._render_template_inline_template(template_src, model, res_ids,
add_context=add_context, options=options)
if post_process:
rendered = self._render_template_postprocess(rendered)
return rendered
def _render_lang(self, res_ids, engine='inline_template'):
""" Given some record ids, return the lang for each record based on
lang field of template or through specific context-based key. Lang is
computed by performing a rendering on res_ids, based on self.render_model.
:param list res_ids: list of ids of records. All should belong to the
Odoo model given by model;
:param string engine: inline_template or qweb_view;
:return dict: {res_id: lang code (i.e. en_US)}
"""
self.ensure_one()
if not isinstance(res_ids, (list, tuple)):
raise ValueError(_('Template rendering for language should be called with a list of IDs.'))
rendered_langs = self._render_template(self.lang, self.render_model, res_ids, engine=engine)
return dict(
(res_id, lang)
for res_id, lang in rendered_langs.items()
)
def _classify_per_lang(self, res_ids, engine='inline_template'):
""" Given some record ids, return for computed each lang a contextualized
template and its subset of res_ids.
:param list res_ids: list of ids of records (all belonging to same model
defined by self.render_model)
:param string engine: inline_template, qweb, or qweb_view;
:return dict: {lang: (template with lang=lang_code if specific lang computed
or template, res_ids targeted by that language}
"""
self.ensure_one()
if self.env.context.get('template_preview_lang'):
lang_to_res_ids = {self.env.context['template_preview_lang']: res_ids}
else:
lang_to_res_ids = {}
for res_id, lang in self._render_lang(res_ids, engine=engine).items():
lang_to_res_ids.setdefault(lang, []).append(res_id)
return dict(
(lang, (self.with_context(lang=lang) if lang else self, lang_res_ids))
for lang, lang_res_ids in lang_to_res_ids.items()
)
def _render_field(self, field, res_ids, engine='inline_template',
compute_lang=False, set_lang=False,
add_context=None, options=None, post_process=False):
""" Given some record ids, render a template located on field on all
records. ``field`` should be a field of self (i.e. ``body_html`` on
``mail.template``). res_ids are record IDs linked to ``model`` field
on self.
:param field: a field name existing on self;
:param list res_ids: list of ids of records (all belonging to same model
defined by ``self.render_model``)
:param string engine: inline_template, qweb, or qweb_view;
:param boolean compute_lang: compute language to render on translated
version of the template instead of default (probably english) one.
Language will be computed based on ``self.lang``;
:param string set_lang: force language for rendering. It should be a
valid lang code matching an activate res.lang. Checked only if
``compute_lang`` is False;
:param dict add_context: additional context to give to renderer;
:param dict options: options for rendering;
:param boolean post_process: perform a post processing on rendered result
(notably html links management). See``_render_template_postprocess``);
:return dict: {res_id: string of rendered template based on record}
"""
if options is None:
options = {}
self.ensure_one()
if compute_lang:
templates_res_ids = self._classify_per_lang(res_ids)
elif set_lang:
templates_res_ids = {set_lang: (self.with_context(lang=set_lang), res_ids)}
else:
templates_res_ids = {self._context.get('lang'): (self, res_ids)}
# rendering options
engine = getattr(self._fields[field], 'render_engine', engine)
options.update(**getattr(self._fields[field], 'render_options', {}))
post_process = options.get('post_process') or post_process
return dict(
(res_id, rendered)
for lang, (template, tpl_res_ids) in templates_res_ids.items()
for res_id, rendered in template._render_template(
template[field], template.render_model, tpl_res_ids, engine=engine,
add_context=add_context, options=options, post_process=post_process
).items()
)
| 46.729642 | 28,692 |
970 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class ResGroups(models.Model):
""" Update of res.groups class
- if adding users from a group, check mail.channels linked to this user
group and subscribe them. This is done by overriding the write method.
"""
_name = 'res.groups'
_inherit = 'res.groups'
_description = 'Access Groups'
def write(self, vals):
res = super(ResGroups, self).write(vals)
if vals.get('users'):
# form: {'group_ids': [(3, 10), (3, 3), (4, 10), (4, 3)]} or {'group_ids': [(6, 0, [ids]}
user_ids = [command[1] for command in vals['users'] if command[0] == 4]
user_ids += [id for command in vals['users'] if command[0] == 6 for id in command[2]]
self.env['mail.channel'].search([('group_ids', 'in', self._ids)])._subscribe_users_automatically()
return res
| 42.173913 | 970 |
5,651 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import _, api, fields, models
_logger = logging.getLogger(__name__)
class AliasMixin(models.AbstractModel):
""" A mixin for models that inherits mail.alias. This mixin initializes the
alias_id column in database, and manages the expected one-to-one
relation between your model and mail aliases.
"""
_name = 'mail.alias.mixin'
_inherits = {'mail.alias': 'alias_id'}
_description = 'Email Aliases Mixin'
ALIAS_WRITEABLE_FIELDS = ['alias_name', 'alias_contact', 'alias_defaults', 'alias_bounced_content']
alias_id = fields.Many2one('mail.alias', string='Alias', ondelete="restrict", required=True)
# --------------------------------------------------
# CRUD
# --------------------------------------------------
@api.model_create_multi
def create(self, vals_list):
""" Create a record with each ``vals`` or ``vals_list`` and create a corresponding alias. """
# prepare all alias values
alias_vals_list, record_vals_list = [], []
for vals in vals_list:
new_alias = not vals.get('alias_id')
if new_alias:
alias_vals, record_vals = self._alias_filter_fields(vals)
alias_vals.update(self._alias_get_creation_values())
alias_vals_list.append(alias_vals)
record_vals_list.append(record_vals)
# create all aliases
alias_ids = []
if alias_vals_list:
alias_ids = iter(self.env['mail.alias'].sudo().create(alias_vals_list).ids)
# update alias values in create vals directly
valid_vals_list = []
record_vals_iter = iter(record_vals_list)
for vals in vals_list:
new_alias = not vals.get('alias_id')
if new_alias:
record_vals = next(record_vals_iter)
record_vals['alias_id'] = next(alias_ids)
valid_vals_list.append(record_vals)
else:
valid_vals_list.append(vals)
records = super(AliasMixin, self).create(valid_vals_list)
for record in records:
record.alias_id.sudo().write(record._alias_get_creation_values())
return records
def write(self, vals):
""" Split writable fields of mail.alias and other fields alias fields will
write with sudo and the other normally """
alias_vals, record_vals = self._alias_filter_fields(vals, filters=self.ALIAS_WRITEABLE_FIELDS)
if record_vals:
super(AliasMixin, self).write(record_vals)
if alias_vals and (record_vals or self.check_access_rights('write', raise_exception=False)):
self.mapped('alias_id').sudo().write(alias_vals)
return True
def unlink(self):
""" Delete the given records, and cascade-delete their corresponding alias. """
aliases = self.mapped('alias_id')
res = super(AliasMixin, self).unlink()
aliases.sudo().unlink()
return res
@api.returns(None, lambda value: value[0])
def copy_data(self, default=None):
data = super(AliasMixin, self).copy_data(default)[0]
for fields_not_writable in set(self.env['mail.alias']._fields.keys()) - set(self.ALIAS_WRITEABLE_FIELDS):
if fields_not_writable in data:
del data[fields_not_writable]
return [data]
def _init_column(self, name):
""" Create aliases for existing rows. """
super(AliasMixin, self)._init_column(name)
if name == 'alias_id':
# as 'mail.alias' records refer to 'ir.model' records, create
# aliases after the reflection of models
self.pool.post_init(self._init_column_alias_id)
def _init_column_alias_id(self):
# both self and the alias model must be present in 'ir.model'
child_ctx = {
'active_test': False, # retrieve all records
'prefetch_fields': False, # do not prefetch fields on records
}
child_model = self.sudo().with_context(child_ctx)
for record in child_model.search([('alias_id', '=', False)]):
# create the alias, and link it to the current record
alias = self.env['mail.alias'].sudo().create(record._alias_get_creation_values())
record.with_context(mail_notrack=True).alias_id = alias
_logger.info('Mail alias created for %s %s (id %s)',
record._name, record.display_name, record.id)
# --------------------------------------------------
# MIXIN TOOL OVERRIDE METHODS
# --------------------------------------------------
def _alias_get_creation_values(self):
""" Return values to create an alias, or to write on the alias after its
creation.
"""
return {
'alias_parent_thread_id': self.id if self.id else False,
'alias_parent_model_id': self.env['ir.model']._get(self._name).id,
}
def _alias_filter_fields(self, values, filters=False):
""" Split the vals dict into two dictionnary of vals, one for alias
field and the other for other fields """
if not filters:
filters = self.env['mail.alias']._fields.keys()
alias_values, record_values = {}, {}
for fname in values.keys():
if fname in filters:
alias_values[fname] = values.get(fname)
else:
record_values[fname] = values.get(fname)
return alias_values, record_values
| 41.248175 | 5,651 |
150,913 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import ast
import base64
import datetime
import dateutil
import email
import email.policy
import hashlib
import hmac
import lxml
import logging
import pytz
import re
import time
import threading
from collections import namedtuple
from email.message import EmailMessage
from email import message_from_string, policy
from lxml import etree
from werkzeug import urls
from xmlrpc import client as xmlrpclib
from markupsafe import Markup
from odoo import _, api, exceptions, fields, models, tools, registry, SUPERUSER_ID, Command
from odoo.exceptions import MissingError
from odoo.osv import expression
from odoo.tools.misc import clean_context, split_every
_logger = logging.getLogger(__name__)
class MailThread(models.AbstractModel):
''' mail_thread model is meant to be inherited by any model that needs to
act as a discussion topic on which messages can be attached. Public
methods are prefixed with ``message_`` in order to avoid name
collisions with methods of the models that will inherit from this class.
``mail.thread`` defines fields used to handle and display the
communication history. ``mail.thread`` also manages followers of
inheriting classes. All features and expected behavior are managed
by mail.thread. Widgets has been designed for the 7.0 and following
versions of Odoo.
Inheriting classes are not required to implement any method, as the
default implementation will work for any model. However it is common
to override at least the ``message_new`` and ``message_update``
methods (calling ``super``) to add model-specific behavior at
creation and update of a thread when processing incoming emails.
Options:
- _mail_flat_thread: if set to True, all messages without parent_id
are automatically attached to the first message posted on the
ressource. If set to False, the display of Chatter is done using
threads, and no parent_id is automatically set.
MailThread features can be somewhat controlled through context keys :
- ``mail_create_nosubscribe``: at create or message_post, do not subscribe
uid to the record thread
- ``mail_create_nolog``: at create, do not log the automatic '<Document>
created' message
- ``mail_notrack``: at create and write, do not perform the value tracking
creating messages
- ``tracking_disable``: at create and write, perform no MailThread features
(auto subscription, tracking, post, ...)
- ``mail_notify_force_send``: if less than 50 email notifications to send,
send them directly instead of using the queue; True by default
'''
_name = 'mail.thread'
_description = 'Email Thread'
_mail_flat_thread = True # flatten the discussino history
_mail_post_access = 'write' # access required on the document to post on it
_Attachment = namedtuple('Attachment', ('fname', 'content', 'info'))
message_is_follower = fields.Boolean(
'Is Follower', compute='_compute_message_is_follower', search='_search_message_is_follower')
message_follower_ids = fields.One2many(
'mail.followers', 'res_id', string='Followers', groups='base.group_user')
message_partner_ids = fields.Many2many(
comodel_name='res.partner', string='Followers (Partners)',
compute='_compute_message_partner_ids',
search='_search_message_partner_ids',
groups='base.group_user')
message_ids = fields.One2many(
'mail.message', 'res_id', string='Messages',
domain=lambda self: [('message_type', '!=', 'user_notification')], auto_join=True)
has_message = fields.Boolean(compute="_compute_has_message", search="_search_has_message", store=False)
message_unread = fields.Boolean(
'Unread Messages', compute='_compute_message_unread',
help="If checked, new messages require your attention.")
message_unread_counter = fields.Integer(
'Unread Messages Counter', compute='_compute_message_unread',
help="Number of unread messages")
message_needaction = fields.Boolean(
'Action Needed',
compute='_compute_message_needaction', search='_search_message_needaction',
help="If checked, new messages require your attention.")
message_needaction_counter = fields.Integer(
'Number of Actions', compute='_compute_message_needaction',
help="Number of messages which requires an action")
message_has_error = fields.Boolean(
'Message Delivery error',
compute='_compute_message_has_error', search='_search_message_has_error',
help="If checked, some messages have a delivery error.")
message_has_error_counter = fields.Integer(
'Number of errors', compute='_compute_message_has_error',
help="Number of messages with delivery error")
message_attachment_count = fields.Integer('Attachment Count', compute='_compute_message_attachment_count', groups="base.group_user")
message_main_attachment_id = fields.Many2one(string="Main Attachment", comodel_name='ir.attachment', index=True, copy=False)
@api.depends('message_follower_ids')
def _compute_message_partner_ids(self):
for thread in self:
thread.message_partner_ids = thread.message_follower_ids.mapped('partner_id')
@api.model
def _search_message_partner_ids(self, operator, operand):
"""Search function for message_follower_ids
Do not use with operator 'not in'. Use instead message_is_followers
"""
# TOFIX make it work with not in
assert operator != "not in", "Do not search message_follower_ids with 'not in'"
followers = self.env['mail.followers'].sudo().search([
('res_model', '=', self._name),
('partner_id', operator, operand)])
# using read() below is much faster than followers.mapped('res_id')
return [('id', 'in', [res['res_id'] for res in followers.read(['res_id'])])]
@api.depends('message_follower_ids')
def _compute_message_is_follower(self):
followers = self.env['mail.followers'].sudo().search([
('res_model', '=', self._name),
('res_id', 'in', self.ids),
('partner_id', '=', self.env.user.partner_id.id),
])
# using read() below is much faster than followers.mapped('res_id')
following_ids = [res['res_id'] for res in followers.read(['res_id'])]
for record in self:
record.message_is_follower = record.id in following_ids
@api.model
def _search_message_is_follower(self, operator, operand):
followers = self.env['mail.followers'].sudo().search([
('res_model', '=', self._name),
('partner_id', '=', self.env.user.partner_id.id),
])
# Cases ('message_is_follower', '=', True) or ('message_is_follower', '!=', False)
if (operator == '=' and operand) or (operator == '!=' and not operand):
# using read() below is much faster than followers.mapped('res_id')
return [('id', 'in', [res['res_id'] for res in followers.read(['res_id'])])]
else:
# using read() below is much faster than followers.mapped('res_id')
return [('id', 'not in', [res['res_id'] for res in followers.read(['res_id'])])]
def _compute_has_message(self):
self.flush()
self.env.cr.execute("""
SELECT distinct res_id
FROM mail_message mm
WHERE res_id = any(%s)
AND mm.model=%s
""", [self.ids, self._name])
channel_ids = [r[0] for r in self.env.cr.fetchall()]
for record in self:
record.has_message = record.id in channel_ids
def _search_has_message(self, operator, value):
if (operator == '=' and value is True) or (operator == '!=' and value is False):
operator_new = 'inselect'
else:
operator_new = 'not inselect'
return [('id', operator_new, ("SELECT res_id FROM mail_message WHERE model=%s", [self._name]))]
def _compute_message_unread(self):
partner_id = self.env.user.partner_id.id
res = dict.fromkeys(self.ids, 0)
if self.ids:
# search for unread messages, directly in SQL to improve performances
self._cr.execute(""" SELECT msg.res_id FROM mail_message msg
RIGHT JOIN mail_channel_partner cp
ON (cp.channel_id = msg.res_id AND cp.partner_id = %s AND
(cp.seen_message_id IS NULL OR cp.seen_message_id < msg.id))
WHERE msg.model = %s AND msg.res_id = ANY(%s) AND
msg.message_type != 'user_notification' AND
(msg.author_id IS NULL OR msg.author_id != %s) AND
(msg.message_type not in ('notification', 'user_notification') OR msg.model != 'mail.channel')""",
(partner_id, self._name, list(self.ids), partner_id,))
for result in self._cr.fetchall():
res[result[0]] += 1
for record in self:
record.message_unread_counter = res.get(record._origin.id, 0)
record.message_unread = bool(record.message_unread_counter)
def _compute_message_needaction(self):
res = dict.fromkeys(self.ids, 0)
if self.ids:
# search for unread messages, directly in SQL to improve performances
self._cr.execute(""" SELECT msg.res_id FROM mail_message msg
RIGHT JOIN mail_notification rel
ON rel.mail_message_id = msg.id AND rel.res_partner_id = %s AND (rel.is_read = false OR rel.is_read IS NULL)
WHERE msg.model = %s AND msg.res_id in %s AND msg.message_type != 'user_notification'""",
(self.env.user.partner_id.id, self._name, tuple(self.ids),))
for result in self._cr.fetchall():
res[result[0]] += 1
for record in self:
record.message_needaction_counter = res.get(record._origin.id, 0)
record.message_needaction = bool(record.message_needaction_counter)
@api.model
def _search_message_needaction(self, operator, operand):
return [('message_ids.needaction', operator, operand)]
def _compute_message_has_error(self):
res = {}
if self.ids:
self._cr.execute(""" SELECT msg.res_id, COUNT(msg.res_id) FROM mail_message msg
RIGHT JOIN mail_notification rel
ON rel.mail_message_id = msg.id AND rel.notification_status in ('exception','bounce')
WHERE msg.author_id = %s AND msg.model = %s AND msg.res_id in %s AND msg.message_type != 'user_notification'
GROUP BY msg.res_id""",
(self.env.user.partner_id.id, self._name, tuple(self.ids),))
res.update(self._cr.fetchall())
for record in self:
record.message_has_error_counter = res.get(record._origin.id, 0)
record.message_has_error = bool(record.message_has_error_counter)
@api.model
def _search_message_has_error(self, operator, operand):
message_ids = self.env['mail.message']._search([('has_error', operator, operand), ('author_id', '=', self.env.user.partner_id.id)])
return [('message_ids', 'in', message_ids)]
def _compute_message_attachment_count(self):
read_group_var = self.env['ir.attachment'].read_group([('res_id', 'in', self.ids), ('res_model', '=', self._name)],
fields=['res_id'],
groupby=['res_id'])
attachment_count_dict = dict((d['res_id'], d['res_id_count']) for d in read_group_var)
for record in self:
record.message_attachment_count = attachment_count_dict.get(record.id, 0)
# ------------------------------------------------------------
# CRUD
# ------------------------------------------------------------
@api.model_create_multi
def create(self, vals_list):
""" Chatter override :
- subscribe uid
- subscribe followers of parent
- log a creation message
"""
if self._context.get('tracking_disable'):
threads = super(MailThread, self).create(vals_list)
threads._discard_tracking()
return threads
threads = super(MailThread, self).create(vals_list)
# subscribe uid unless asked not to
if not self._context.get('mail_create_nosubscribe'):
for thread in threads:
self.env['mail.followers']._insert_followers(
thread._name, thread.ids,
self.env.user.partner_id.ids, subtypes=None,
customer_ids=[],
check_existing=False
)
# auto_subscribe: take values and defaults into account
create_values_list = {}
for thread, values in zip(threads, vals_list):
create_values = dict(values)
for key, val in self._context.items():
if key.startswith('default_') and key[8:] not in create_values:
create_values[key[8:]] = val
thread._message_auto_subscribe(create_values, followers_existing_policy='update')
create_values_list[thread.id] = create_values
# automatic logging unless asked not to (mainly for various testing purpose)
if not self._context.get('mail_create_nolog'):
threads_no_subtype = self.env[self._name]
for thread in threads:
subtype = thread._creation_subtype()
if subtype: # if we have a subtype, post message to notify users from _message_auto_subscribe
thread.sudo().message_post(subtype_id=subtype.id, author_id=self.env.user.partner_id.id)
else:
threads_no_subtype += thread
if threads_no_subtype:
bodies = dict(
(thread.id, thread._creation_message())
for thread in threads_no_subtype)
threads_no_subtype._message_log_batch(bodies=bodies)
# post track template if a tracked field changed
threads._discard_tracking()
if not self._context.get('mail_notrack'):
fnames = self._get_tracked_fields()
for thread in threads:
create_values = create_values_list[thread.id]
changes = [fname for fname in fnames if create_values.get(fname)]
# based on tracked field to stay consistent with write
# we don't consider that a falsy field is a change, to stay consistent with previous implementation,
# but we may want to change that behaviour later.
thread._message_track_post_template(changes)
return threads
def write(self, values):
if self._context.get('tracking_disable'):
return super(MailThread, self).write(values)
if not self._context.get('mail_notrack'):
self._prepare_tracking(self._fields)
# Perform write
result = super(MailThread, self).write(values)
# update followers
self._message_auto_subscribe(values)
return result
def unlink(self):
""" Override unlink to delete messages and followers. This cannot be
cascaded, because link is done through (res_model, res_id). """
if not self:
return True
# discard pending tracking
self._discard_tracking()
self.env['mail.message'].sudo().search([('model', '=', self._name), ('res_id', 'in', self.ids)]).unlink()
res = super(MailThread, self).unlink()
self.env['mail.followers'].sudo().search(
[('res_model', '=', self._name), ('res_id', 'in', self.ids)]
).unlink()
return res
def copy_data(self, default=None):
# avoid tracking multiple temporary changes during copy
return super(MailThread, self.with_context(mail_notrack=True)).copy_data(default=default)
@api.model
def get_empty_list_help(self, help):
""" Override of BaseModel.get_empty_list_help() to generate an help message
that adds alias information. """
model = self._context.get('empty_list_help_model')
res_id = self._context.get('empty_list_help_id')
catchall_domain = self.env['ir.config_parameter'].sudo().get_param("mail.catchall.domain")
document_name = self._context.get('empty_list_help_document_name', _('document'))
nothing_here = not help
alias = None
if catchall_domain and model and res_id: # specific res_id -> find its alias (i.e. section_id specified)
record = self.env[model].sudo().browse(res_id)
# check that the alias effectively creates new records
if record.alias_id and record.alias_id.alias_name and \
record.alias_id.alias_model_id and \
record.alias_id.alias_model_id.model == self._name and \
record.alias_id.alias_force_thread_id == 0:
alias = record.alias_id
if not alias and catchall_domain and model: # no res_id or res_id not linked to an alias -> generic help message, take a generic alias of the model
Alias = self.env['mail.alias']
aliases = Alias.search([
("alias_parent_model_id.model", "=", model),
("alias_name", "!=", False),
('alias_force_thread_id', '=', False),
('alias_parent_thread_id', '=', False)], order='id ASC')
if aliases and len(aliases) == 1:
alias = aliases[0]
if alias:
email_link = "<a href='mailto:%(email)s'>%(email)s</a>" % {'email': alias.display_name}
if nothing_here:
return "<p class='o_view_nocontent_smiling_face'>%(dyn_help)s</p>" % {
'dyn_help': _("Add a new %(document)s or send an email to %(email_link)s",
document=document_name,
email_link=email_link,
)
}
# do not add alias two times if it was added previously
if "oe_view_nocontent_alias" not in help:
return "%(static_help)s<p class='oe_view_nocontent_alias'>%(dyn_help)s</p>" % {
'static_help': help,
'dyn_help': _("Create new %(document)s by sending an email to %(email_link)s",
document=document_name,
email_link=email_link,
)
}
if nothing_here:
return "<p class='o_view_nocontent_smiling_face'>%(dyn_help)s</p>" % {
'dyn_help': _("Create new %(document)s", document=document_name),
}
return help
# ------------------------------------------------------
# MODELS / CRUD HELPERS
# ------------------------------------------------------
def _compute_field_value(self, field):
if not self._context.get('tracking_disable') and not self._context.get('mail_notrack'):
self._prepare_tracking(f.name for f in self.pool.field_computed[field] if f.store)
return super()._compute_field_value(field)
def _creation_subtype(self):
""" Give the subtypes triggered by the creation of a record
:returns: a subtype browse record (empty if no subtype is triggered)
"""
return self.env['mail.message.subtype']
def _creation_message(self):
""" Get the creation message to log into the chatter at the record's creation.
:returns: The message's body to log.
"""
self.ensure_one()
doc_name = self.env['ir.model']._get(self._name).name
return _('%s created', doc_name)
@api.model
def _get_mail_message_access(self, res_ids, operation, model_name=None):
""" mail.message check permission rules for related document. This method is
meant to be inherited in order to implement addons-specific behavior.
A common behavior would be to allow creating messages when having read
access rule on the document, for portal document such as issues. """
DocModel = self.env[model_name] if model_name else self
create_allow = getattr(DocModel, '_mail_post_access', 'write')
if operation in ['write', 'unlink']:
check_operation = 'write'
elif operation == 'create' and create_allow in ['create', 'read', 'write', 'unlink']:
check_operation = create_allow
elif operation == 'create':
check_operation = 'write'
else:
check_operation = operation
return check_operation
def _valid_field_parameter(self, field, name):
# allow tracking on models inheriting from 'mail.thread'
return name == 'tracking' or super()._valid_field_parameter(field, name)
def _fallback_lang(self):
if not self._context.get("lang"):
return self.with_context(lang=self.env.user.lang)
return self
# ------------------------------------------------------
# WRAPPERS AND TOOLS
# ------------------------------------------------------
def message_change_thread(self, new_thread, new_parent_message=False):
"""
Transfer the list of the mail thread messages from an model to another
:param id : the old res_id of the mail.message
:param new_res_id : the new res_id of the mail.message
:param new_model : the name of the new model of the mail.message
Example : my_lead.message_change_thread(my_project_task)
will transfer the context of the thread of my_lead to my_project_task
"""
self.ensure_one()
# get the subtype of the comment Message
subtype_comment = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_comment')
# get the ids of the comment and not-comment of the thread
# TDE check: sudo on mail.message, to be sure all messages are moved ?
MailMessage = self.env['mail.message']
msg_comment = MailMessage.search([
('model', '=', self._name),
('res_id', '=', self.id),
('message_type', '!=', 'user_notification'),
('subtype_id', '=', subtype_comment)])
msg_not_comment = MailMessage.search([
('model', '=', self._name),
('res_id', '=', self.id),
('message_type', '!=', 'user_notification'),
('subtype_id', '!=', subtype_comment)])
# update the messages
msg_vals = {"res_id": new_thread.id, "model": new_thread._name}
if new_parent_message:
msg_vals["parent_id"] = new_parent_message.id
msg_comment.write(msg_vals)
# other than comment: reset subtype
msg_vals["subtype_id"] = None
msg_not_comment.write(msg_vals)
return True
# ------------------------------------------------------
# TRACKING / LOG
# ------------------------------------------------------
def _prepare_tracking(self, fields):
""" Prepare the tracking of ``fields`` for ``self``.
:param fields: iterable of fields names to potentially track
"""
fnames = self._get_tracked_fields().intersection(fields)
if not fnames:
return
self.env.cr.precommit.add(self._finalize_tracking)
initial_values = self.env.cr.precommit.data.setdefault(f'mail.tracking.{self._name}', {})
for record in self:
if not record.id:
continue
values = initial_values.setdefault(record.id, {})
if values is not None:
for fname in fnames:
values.setdefault(fname, record[fname])
def _discard_tracking(self):
""" Prevent any tracking of fields on ``self``. """
if not self._get_tracked_fields():
return
self.env.cr.precommit.add(self._finalize_tracking)
initial_values = self.env.cr.precommit.data.setdefault(f'mail.tracking.{self._name}', {})
# disable tracking by setting initial values to None
for id_ in self.ids:
initial_values[id_] = None
def _finalize_tracking(self):
""" Generate the tracking messages for the records that have been
prepared with ``_prepare_tracking``.
"""
initial_values = self.env.cr.precommit.data.pop(f'mail.tracking.{self._name}', {})
ids = [id_ for id_, vals in initial_values.items() if vals]
if not ids:
return
records = self.browse(ids).sudo()
fnames = self._get_tracked_fields()
context = clean_context(self._context)
tracking = records.with_context(context).message_track(fnames, initial_values)
for record in records:
changes, tracking_value_ids = tracking.get(record.id, (None, None))
record._message_track_post_template(changes)
# this method is called after the main flush() and just before commit();
# we have to flush() again in case we triggered some recomputations
self.flush()
@tools.ormcache('self.env.uid', 'self.env.su')
def _get_tracked_fields(self):
""" Return the set of tracked fields names for the current model. """
fields = {
name
for name, field in self._fields.items()
if getattr(field, 'tracking', None) or getattr(field, 'track_visibility', None)
}
return fields and set(self.fields_get(fields))
def _message_track_post_template(self, changes):
if not changes:
return True
# Clean the context to get rid of residual default_* keys
# that could cause issues afterward during the mail.message
# generation. Example: 'default_parent_id' would refer to
# the parent_id of the current record that was used during
# its creation, but could refer to wrong parent message id,
# leading to a traceback in case the related message_id
# doesn't exist
self = self.with_context(clean_context(self._context))
templates = self._track_template(changes)
for field_name, (template, post_kwargs) in templates.items():
if not template:
continue
if isinstance(template, str):
self._fallback_lang().message_post_with_view(template, **post_kwargs)
else:
self._fallback_lang().message_post_with_template(template.id, **post_kwargs)
return True
def _track_template(self, changes):
return dict()
def message_track(self, tracked_fields, initial_values):
""" Track updated values. Comparing the initial and current values of
the fields given in tracked_fields, it generates a message containing
the updated values. This message can be linked to a mail.message.subtype
given by the ``_track_subtype`` method.
:param tracked_fields: iterable of field names to track
:param initial_values: mapping {record_id: {field_name: value}}
:return: mapping {record_id: (changed_field_names, tracking_value_ids)}
containing existing records only
"""
if not tracked_fields:
return True
tracked_fields = self.fields_get(tracked_fields)
tracking = dict()
for record in self:
try:
tracking[record.id] = record._mail_track(tracked_fields, initial_values[record.id])
except MissingError:
continue
for record in self:
changes, tracking_value_ids = tracking.get(record.id, (None, None))
if not changes:
continue
# find subtypes and post messages or log if no subtype found
subtype = False
# By passing this key, that allows to let the subtype empty and so don't sent email because partners_to_notify from mail_message._notify will be empty
if not self._context.get('mail_track_log_only'):
subtype = record._track_subtype(dict((col_name, initial_values[record.id][col_name]) for col_name in changes))
if subtype:
if not subtype.exists():
_logger.debug('subtype "%s" not found' % subtype.name)
continue
record.message_post(subtype_id=subtype.id, tracking_value_ids=tracking_value_ids)
elif tracking_value_ids:
record._message_log(tracking_value_ids=tracking_value_ids)
return tracking
def _track_subtype(self, init_values):
""" Give the subtypes triggered by the changes on the record according
to values that have been updated.
:param init_values: the original values of the record; only modified fields
are present in the dict
:type init_values: dict
:returns: a subtype browse record or False if no subtype is trigerred
"""
return False
# ------------------------------------------------------
# MAIL GATEWAY
# ------------------------------------------------------
def _routing_warn(self, error_message, message_id, route, raise_exception=True):
""" Tools method used in _routing_check_route: whether to log a warning or raise an error """
short_message = _("Mailbox unavailable - %s", error_message)
full_message = ('Routing mail with Message-Id %s: route %s: %s' %
(message_id, route, error_message))
_logger.info(full_message)
if raise_exception:
# sender should not see private diagnostics info, just the error
raise ValueError(short_message)
def _routing_create_bounce_email(self, email_from, body_html, message, **mail_values):
bounce_to = tools.decode_message_header(message, 'Return-Path') or email_from
bounce_mail_values = {
'author_id': False,
'body_html': body_html,
'subject': 'Re: %s' % message.get('subject'),
'email_to': bounce_to,
'auto_delete': True,
}
bounce_from = self.env['ir.mail_server']._get_default_bounce_address()
if bounce_from:
bounce_mail_values['email_from'] = tools.formataddr(('MAILER-DAEMON', bounce_from))
elif self.env['ir.config_parameter'].sudo().get_param("mail.catchall.alias") not in message['To']:
bounce_mail_values['email_from'] = tools.decode_message_header(message, 'To')
else:
bounce_mail_values['email_from'] = tools.formataddr(('MAILER-DAEMON', self.env.user.email_normalized))
bounce_mail_values.update(mail_values)
self.env['mail.mail'].sudo().create(bounce_mail_values).send()
@api.model
def _routing_handle_bounce(self, email_message, message_dict):
""" Handle bounce of incoming email. Based on values of the bounce (email
and related partner, send message and its messageID)
* find blacklist-enabled records with email_normalized = bounced email
and call ``_message_receive_bounce`` on each of them to propagate
bounce information through various records linked to same email;
* if not already done (i.e. if original record is not blacklist enabled
like a bounce on an applicant), find record linked to bounced message
and call ``_message_receive_bounce``;
:param email_message: incoming email;
:type email_message: email.message;
:param message_dict: dictionary holding already-parsed values and in
which bounce-related values will be added;
:type message_dict: dictionary;
"""
bounced_record, bounced_record_done = False, False
bounced_email, bounced_partner = message_dict['bounced_email'], message_dict['bounced_partner']
bounced_msg_id, bounced_message = message_dict['bounced_msg_id'], message_dict['bounced_message']
if bounced_email:
bounced_model, bounced_res_id = bounced_message.model, bounced_message.res_id
if bounced_model and bounced_model in self.env and bounced_res_id:
bounced_record = self.env[bounced_model].sudo().browse(bounced_res_id).exists()
bl_models = self.env['ir.model'].sudo().search(['&', ('is_mail_blacklist', '=', True), ('model', '!=', 'mail.thread.blacklist')])
for model in [bl_model for bl_model in bl_models if bl_model.model in self.env]: # transient test mode
rec_bounce_w_email = self.env[model.model].sudo().search([('email_normalized', '=', bounced_email)])
rec_bounce_w_email._message_receive_bounce(bounced_email, bounced_partner)
bounced_record_done = bounced_record_done or (bounced_record and model.model == bounced_model and bounced_record in rec_bounce_w_email)
# set record as bounced unless already done due to blacklist mixin
if bounced_record and not bounced_record_done and issubclass(type(bounced_record), self.pool['mail.thread']):
bounced_record._message_receive_bounce(bounced_email, bounced_partner)
if bounced_partner and bounced_message:
self.env['mail.notification'].sudo().search([
('mail_message_id', '=', bounced_message.id),
('res_partner_id', 'in', bounced_partner.ids)]
).write({'notification_status': 'bounce'})
if bounced_record:
_logger.info('Routing mail from %s to %s with Message-Id %s: not routing bounce email from %s replying to %s (model %s ID %s)',
message_dict['email_from'], message_dict['to'], message_dict['message_id'], bounced_email, bounced_msg_id, bounced_model, bounced_res_id)
elif bounced_email:
_logger.info('Routing mail from %s to %s with Message-Id %s: not routing bounce email from %s replying to %s (no document found)',
message_dict['email_from'], message_dict['to'], message_dict['message_id'], bounced_email, bounced_msg_id)
else:
_logger.info('Routing mail from %s to %s with Message-Id %s: not routing bounce email.',
message_dict['email_from'], message_dict['to'], message_dict['message_id'])
@api.model
def _routing_check_route(self, message, message_dict, route, raise_exception=True):
""" Verify route validity. Check and rules:
1 - if thread_id -> check that document effectively exists; otherwise
fallback on a message_new by resetting thread_id
2 - check that message_update exists if thread_id is set; or at least
that message_new exist
3 - if there is an alias, check alias_contact:
'followers' and thread_id:
check on target document that the author is in the followers
'followers' and alias_parent_thread_id:
check on alias parent document that the author is in the
followers
'partners': check that author_id id set
:param message: an email.message instance
:param message_dict: dictionary of values that will be given to
mail_message.create()
:param route: route to check which is a tuple (model, thread_id,
custom_values, uid, alias)
:param raise_exception: if an error occurs, tell whether to raise an error
or just log a warning and try other processing or
invalidate route
"""
assert isinstance(route, (list, tuple)), 'A route should be a list or a tuple'
assert len(route) == 5, 'A route should contain 5 elements: model, thread_id, custom_values, uid, alias record'
message_id = message_dict['message_id']
email_from = message_dict['email_from']
author_id = message_dict.get('author_id')
model, thread_id, alias = route[0], route[1], route[4]
record_set = None
# Wrong model
if not model:
self._routing_warn(_('target model unspecified'), message_id, route, raise_exception)
return ()
elif model not in self.env:
self._routing_warn(_('unknown target model %s', model), message_id, route, raise_exception)
return ()
record_set = self.env[model].browse(thread_id) if thread_id else self.env[model]
# Existing Document: check if exists and model accepts the mailgateway; if not, fallback on create if allowed
if thread_id:
if not record_set.exists():
self._routing_warn(
_('reply to missing document (%(model)s,%(thread)s), fall back on document creation', model=model, thread=thread_id),
message_id,
route,
False
)
thread_id = None
elif not hasattr(record_set, 'message_update'):
self._routing_warn(_('reply to model %s that does not accept document update, fall back on document creation', model), message_id, route, False)
thread_id = None
# New Document: check model accepts the mailgateway
if not thread_id and model and not hasattr(record_set, 'message_new'):
self._routing_warn(_('model %s does not accept document creation', model), message_id, route, raise_exception)
return ()
# Update message author. We do it now because we need it for aliases (contact settings)
if not author_id:
if record_set:
authors = self._mail_find_partner_from_emails([email_from], records=record_set)
elif alias and alias.alias_parent_model_id and alias.alias_parent_thread_id:
records = self.env[alias.alias_parent_model_id.model].browse(alias.alias_parent_thread_id)
authors = self._mail_find_partner_from_emails([email_from], records=records)
else:
authors = self._mail_find_partner_from_emails([email_from], records=None)
if authors:
message_dict['author_id'] = authors[0].id
# Alias: check alias_contact settings
if alias:
if thread_id:
obj = record_set[0]
elif alias.alias_parent_model_id and alias.alias_parent_thread_id:
obj = self.env[alias.alias_parent_model_id.model].browse(alias.alias_parent_thread_id)
else:
obj = self.env[model]
error_message = obj._alias_get_error_message(message, message_dict, alias)
if error_message:
self._routing_warn(
_('alias %(name)s: %(error)s', name=alias.alias_name, error=error_message or _('unknown error')),
message_id,
route,
False
)
body = alias._get_alias_bounced_body(message_dict)
self._routing_create_bounce_email(email_from, body, message, references=message_id)
return False
return (model, thread_id, route[2], route[3], route[4])
@api.model
def _routing_reset_bounce(self, email_message, message_dict):
"""Called by ``message_process`` when a new mail is received from an email address.
If the email is related to a partner, we consider that the number of message_bounce
is not relevant anymore as the email is valid - as we received an email from this
address. The model is here hardcoded because we cannot know with which model the
incomming mail match. We consider that if a mail arrives, we have to clear bounce for
each model having bounce count.
:param email_from: email address that sent the incoming email."""
valid_email = message_dict['email_from']
if valid_email:
bl_models = self.env['ir.model'].sudo().search(['&', ('is_mail_blacklist', '=', True), ('model', '!=', 'mail.thread.blacklist')])
for model in [bl_model for bl_model in bl_models if bl_model.model in self.env]: # transient test mode
self.env[model.model].sudo().search([('message_bounce', '>', 0), ('email_normalized', '=', valid_email)])._message_reset_bounce(valid_email)
@api.model
def message_route(self, message, message_dict, model=None, thread_id=None, custom_values=None):
""" Attempt to figure out the correct target model, thread_id,
custom_values and user_id to use for an incoming message.
Multiple values may be returned, if a message had multiple
recipients matching existing mail.aliases, for example.
The following heuristics are used, in this order:
* if the message replies to an existing thread by having a Message-Id
that matches an existing mail_message.message_id, we take the original
message model/thread_id pair and ignore custom_value as no creation will
take place;
* look for a mail.alias entry matching the message recipients and use the
corresponding model, thread_id, custom_values and user_id. This could
lead to a thread update or creation depending on the alias;
* fallback on provided ``model``, ``thread_id`` and ``custom_values``;
* raise an exception as no route has been found
:param string message: an email.message instance
:param dict message_dict: dictionary holding parsed message variables
:param string model: the fallback model to use if the message does not match
any of the currently configured mail aliases (may be None if a matching
alias is supposed to be present)
:type dict custom_values: optional dictionary of default field values
to pass to ``message_new`` if a new record needs to be created.
Ignored if the thread record already exists, and also if a matching
mail.alias was found (aliases define their own defaults)
:param int thread_id: optional ID of the record/thread from ``model`` to
which this mail should be attached. Only used if the message does not
reply to an existing thread and does not match any mail alias.
:return: list of routes [(model, thread_id, custom_values, user_id, alias)]
:raises: ValueError, TypeError
"""
if not isinstance(message, EmailMessage):
raise TypeError('message must be an email.message.EmailMessage at this point')
catchall_alias = self.env['ir.config_parameter'].sudo().get_param("mail.catchall.alias")
bounce_alias = self.env['ir.config_parameter'].sudo().get_param("mail.bounce.alias")
fallback_model = model
# get email.message.Message variables for future processing
message_id = message_dict['message_id']
# compute references to find if message is a reply to an existing thread
thread_references = message_dict['references'] or message_dict['in_reply_to']
msg_references = [
re.sub(r'[\r\n\t ]+', r'', ref) # "Unfold" buggy references
for ref in tools.mail_header_msgid_re.findall(thread_references)
if 'reply_to' not in ref
]
mail_messages = self.env['mail.message'].sudo().search([('message_id', 'in', msg_references)], limit=1, order='id desc, message_id')
is_a_reply = bool(mail_messages)
reply_model, reply_thread_id = mail_messages.model, mail_messages.res_id
# author and recipients
email_from = message_dict['email_from']
email_from_localpart = (tools.email_split(email_from) or [''])[0].split('@', 1)[0].lower()
email_to = message_dict['to']
email_to_localparts = [
e.split('@', 1)[0].lower()
for e in (tools.email_split(email_to) or [''])
]
# Delivered-To is a safe bet in most modern MTAs, but we have to fallback on To + Cc values
# for all the odd MTAs out there, as there is no standard header for the envelope's `rcpt_to` value.
rcpt_tos_localparts = [
e.split('@')[0].lower()
for e in tools.email_split(message_dict['recipients'])
]
rcpt_tos_valid_localparts = [to for to in rcpt_tos_localparts]
# 0. Handle bounce: verify whether this is a bounced email and use it to collect bounce data and update notifications for customers
# Bounce alias: if any To contains bounce_alias@domain
# Bounce message (not alias)
# See http://datatracker.ietf.org/doc/rfc3462/?include_text=1
# As all MTA does not respect this RFC (googlemail is one of them),
# we also need to verify if the message come from "mailer-daemon"
# If not a bounce: reset bounce information
if bounce_alias and any(email == bounce_alias for email in email_to_localparts):
self._routing_handle_bounce(message, message_dict)
return []
if message.get_content_type() == 'multipart/report' or email_from_localpart == 'mailer-daemon':
self._routing_handle_bounce(message, message_dict)
return []
self._routing_reset_bounce(message, message_dict)
# 1. Handle reply
# if destination = alias with different model -> consider it is a forward and not a reply
# if destination = alias with same model -> check contact settings as they still apply
if reply_model and reply_thread_id:
reply_model_id = self.env['ir.model']._get_id(reply_model)
other_model_aliases = self.env['mail.alias'].search([
'&', '&',
('alias_name', '!=', False),
('alias_name', 'in', email_to_localparts),
('alias_model_id', '!=', reply_model_id),
])
if other_model_aliases:
is_a_reply = False
rcpt_tos_valid_localparts = [to for to in rcpt_tos_valid_localparts if to in other_model_aliases.mapped('alias_name')]
if is_a_reply and reply_model:
reply_model_id = self.env['ir.model']._get_id(reply_model)
dest_aliases = self.env['mail.alias'].search([
('alias_name', 'in', rcpt_tos_localparts),
('alias_model_id', '=', reply_model_id)
], limit=1)
user_id = self._mail_find_user_for_gateway(email_from, alias=dest_aliases).id or self._uid
route = self._routing_check_route(
message, message_dict,
(reply_model, reply_thread_id, custom_values, user_id, dest_aliases),
raise_exception=False)
if route:
_logger.info(
'Routing mail from %s to %s with Message-Id %s: direct reply to msg: model: %s, thread_id: %s, custom_values: %s, uid: %s',
email_from, email_to, message_id, reply_model, reply_thread_id, custom_values, self._uid)
return [route]
elif route is False:
return []
# 2. Handle new incoming email by checking aliases and applying their settings
if rcpt_tos_localparts:
# no route found for a matching reference (or reply), so parent is invalid
message_dict.pop('parent_id', None)
# check it does not directly contact catchall
if catchall_alias and email_to_localparts and all(email_localpart == catchall_alias for email_localpart in email_to_localparts):
_logger.info('Routing mail from %s to %s with Message-Id %s: direct write to catchall, bounce', email_from, email_to, message_id)
body = self.env.ref('mail.mail_bounce_catchall')._render({
'message': message,
}, engine='ir.qweb')
self._routing_create_bounce_email(email_from, body, message, references=message_id, reply_to=self.env.company.email)
return []
dest_aliases = self.env['mail.alias'].search([('alias_name', 'in', rcpt_tos_valid_localparts)])
if dest_aliases:
routes = []
for alias in dest_aliases:
user_id = self._mail_find_user_for_gateway(email_from, alias=alias).id or self._uid
route = (alias.sudo().alias_model_id.model, alias.alias_force_thread_id, ast.literal_eval(alias.alias_defaults), user_id, alias)
route = self._routing_check_route(message, message_dict, route, raise_exception=True)
if route:
_logger.info(
'Routing mail from %s to %s with Message-Id %s: direct alias match: %r',
email_from, email_to, message_id, route)
routes.append(route)
return routes
# 3. Fallback to the provided parameters, if they work
if fallback_model:
# no route found for a matching reference (or reply), so parent is invalid
message_dict.pop('parent_id', None)
user_id = self._mail_find_user_for_gateway(email_from).id or self._uid
route = self._routing_check_route(
message, message_dict,
(fallback_model, thread_id, custom_values, user_id, None),
raise_exception=True)
if route:
_logger.info(
'Routing mail from %s to %s with Message-Id %s: fallback to model:%s, thread_id:%s, custom_values:%s, uid:%s',
email_from, email_to, message_id, fallback_model, thread_id, custom_values, user_id)
return [route]
# ValueError if no routes found and if no bounce occurred
raise ValueError(
'No possible route found for incoming message from %s to %s (Message-Id %s:). '
'Create an appropriate mail.alias or force the destination model.' %
(email_from, email_to, message_id)
)
@api.model
def _message_route_process(self, message, message_dict, routes):
self = self.with_context(attachments_mime_plainxml=True) # import XML attachments as text
# postpone setting message_dict.partner_ids after message_post, to avoid double notifications
original_partner_ids = message_dict.pop('partner_ids', [])
thread_id = False
for model, thread_id, custom_values, user_id, alias in routes or ():
subtype_id = False
related_user = self.env['res.users'].browse(user_id)
Model = self.env[model].with_context(mail_create_nosubscribe=True, mail_create_nolog=True)
if not (thread_id and hasattr(Model, 'message_update') or hasattr(Model, 'message_new')):
raise ValueError(
"Undeliverable mail with Message-Id %s, model %s does not accept incoming emails" %
(message_dict['message_id'], model)
)
# disabled subscriptions during message_new/update to avoid having the system user running the
# email gateway become a follower of all inbound messages
ModelCtx = Model.with_user(related_user).sudo()
if thread_id and hasattr(ModelCtx, 'message_update'):
thread = ModelCtx.browse(thread_id)
thread.message_update(message_dict)
else:
# if a new thread is created, parent is irrelevant
message_dict.pop('parent_id', None)
thread = ModelCtx.message_new(message_dict, custom_values)
thread_id = thread.id
subtype_id = thread._creation_subtype().id
# replies to internal message are considered as notes, but parent message
# author is added in recipients to ensure he is notified of a private answer
parent_message = False
if message_dict.get('parent_id'):
parent_message = self.env['mail.message'].sudo().browse(message_dict['parent_id'])
partner_ids = []
if not subtype_id:
if message_dict.get('is_internal'):
subtype_id = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_note')
if parent_message and parent_message.author_id:
partner_ids = [parent_message.author_id.id]
else:
subtype_id = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_comment')
post_params = dict(subtype_id=subtype_id, partner_ids=partner_ids, **message_dict)
# remove computational values not stored on mail.message and avoid warnings when creating it
for x in ('from', 'to', 'cc', 'recipients', 'references', 'in_reply_to', 'bounced_email', 'bounced_message', 'bounced_msg_id', 'bounced_partner'):
post_params.pop(x, None)
new_msg = False
if thread._name == 'mail.thread': # message with parent_id not linked to record
new_msg = thread.message_notify(**post_params)
else:
# parsing should find an author independently of user running mail gateway, and ensure it is not odoobot
partner_from_found = message_dict.get('author_id') and message_dict['author_id'] != self.env['ir.model.data']._xmlid_to_res_id('base.partner_root')
thread = thread.with_context(mail_create_nosubscribe=not partner_from_found)
new_msg = thread.message_post(**post_params)
if new_msg and original_partner_ids:
# postponed after message_post, because this is an external message and we don't want to create
# duplicate emails due to notifications
new_msg.write({'partner_ids': original_partner_ids})
return thread_id
@api.model
def message_process(self, model, message, custom_values=None,
save_original=False, strip_attachments=False,
thread_id=None):
""" Process an incoming RFC2822 email message, relying on
``mail.message.parse()`` for the parsing operation,
and ``message_route()`` to figure out the target model.
Once the target model is known, its ``message_new`` method
is called with the new message (if the thread record did not exist)
or its ``message_update`` method (if it did).
:param string model: the fallback model to use if the message
does not match any of the currently configured mail aliases
(may be None if a matching alias is supposed to be present)
:param message: source of the RFC2822 message
:type message: string or xmlrpclib.Binary
:type dict custom_values: optional dictionary of field values
to pass to ``message_new`` if a new record needs to be created.
Ignored if the thread record already exists, and also if a
matching mail.alias was found (aliases define their own defaults)
:param bool save_original: whether to keep a copy of the original
email source attached to the message after it is imported.
:param bool strip_attachments: whether to strip all attachments
before processing the message, in order to save some space.
:param int thread_id: optional ID of the record/thread from ``model``
to which this mail should be attached. When provided, this
overrides the automatic detection based on the message
headers.
"""
# extract message bytes - we are forced to pass the message as binary because
# we don't know its encoding until we parse its headers and hence can't
# convert it to utf-8 for transport between the mailgate script and here.
if isinstance(message, xmlrpclib.Binary):
message = bytes(message.data)
if isinstance(message, str):
message = message.encode('utf-8')
message = email.message_from_bytes(message, policy=email.policy.SMTP)
# parse the message, verify we are not in a loop by checking message_id is not duplicated
msg_dict = self.message_parse(message, save_original=save_original)
if strip_attachments:
msg_dict.pop('attachments', None)
existing_msg_ids = self.env['mail.message'].search([('message_id', '=', msg_dict['message_id'])], limit=1)
if existing_msg_ids:
_logger.info('Ignored mail from %s to %s with Message-Id %s: found duplicated Message-Id during processing',
msg_dict.get('email_from'), msg_dict.get('to'), msg_dict.get('message_id'))
return False
# find possible routes for the message
routes = self.message_route(message, msg_dict, model, thread_id, custom_values)
thread_id = self._message_route_process(message, msg_dict, routes)
return thread_id
@api.model
def message_new(self, msg_dict, custom_values=None):
"""Called by ``message_process`` when a new message is received
for a given thread model, if the message did not belong to
an existing thread.
The default behavior is to create a new record of the corresponding
model (based on some very basic info extracted from the message).
Additional behavior may be implemented by overriding this method.
:param dict msg_dict: a map containing the email details and
attachments. See ``message_process`` and
``mail.message.parse`` for details.
:param dict custom_values: optional dictionary of additional
field values to pass to create()
when creating the new thread record.
Be careful, these values may override
any other values coming from the message.
:rtype: int
:return: the id of the newly created thread object
"""
data = {}
if isinstance(custom_values, dict):
data = custom_values.copy()
fields = self.fields_get()
name_field = self._rec_name or 'name'
if name_field in fields and not data.get('name'):
data[name_field] = msg_dict.get('subject', '')
return self.create(data)
def message_update(self, msg_dict, update_vals=None):
"""Called by ``message_process`` when a new message is received
for an existing thread. The default behavior is to update the record
with update_vals taken from the incoming email.
Additional behavior may be implemented by overriding this
method.
:param dict msg_dict: a map containing the email details and
attachments. See ``message_process`` and
``mail.message.parse()`` for details.
:param dict update_vals: a dict containing values to update records
given their ids; if the dict is None or is
void, no write operation is performed.
"""
if update_vals:
self.write(update_vals)
return True
def _message_receive_bounce(self, email, partner):
"""Called by ``message_process`` when a bounce email (such as Undelivered
Mail Returned to Sender) is received for an existing thread. The default
behavior is to do nothing. This method is meant to be overridden in various
modules to add some specific behavior like blacklist management or mass
mailing statistics update. check is an integer ``message_bounce`` column exists.
If it is the case, its content is incremented.
:param string email: email that caused the bounce;
:param record partner: partner matching the bounced email address, if any;
"""
pass
def _message_reset_bounce(self, email):
"""Called by ``message_process`` when an email is considered as not being
a bounce. The default behavior is to do nothing. This method is meant to
be overridden in various modules to add some specific behavior like
blacklist management.
:param string email: email for which to reset bounce information
"""
pass
def _message_parse_extract_payload_postprocess(self, message, payload_dict):
""" Perform some cleaning / postprocess in the body and attachments
extracted from the email. Note that this processing is specific to the
mail module, and should not contain security or generic html cleaning.
Indeed those aspects should be covered by the html_sanitize method
located in tools. """
body, attachments = payload_dict['body'], payload_dict['attachments']
if not body.strip():
return {'body': body, 'attachments': attachments}
try:
root = lxml.html.fromstring(body)
except ValueError:
# In case the email client sent XHTML, fromstring will fail because 'Unicode strings
# with encoding declaration are not supported'.
root = lxml.html.fromstring(body.encode('utf-8'))
postprocessed = False
to_remove = []
for node in root.iter():
if 'o_mail_notification' in (node.get('class') or '') or 'o_mail_notification' in (node.get('summary') or ''):
postprocessed = True
if node.getparent() is not None:
to_remove.append(node)
if node.tag == 'img' and node.get('src', '').startswith('cid:'):
cid = node.get('src').split(':', 1)[1]
related_attachment = [attach for attach in attachments if attach[2] and attach[2].get('cid') == cid]
if related_attachment:
node.set('data-filename', related_attachment[0][0])
postprocessed = True
for node in to_remove:
node.getparent().remove(node)
if postprocessed:
body = etree.tostring(root, pretty_print=False, encoding='unicode')
return {'body': body, 'attachments': attachments}
def _message_parse_extract_payload(self, message, save_original=False):
"""Extract body as HTML and attachments from the mail message"""
attachments = []
body = u''
if save_original:
attachments.append(self._Attachment('original_email.eml', message.as_string(), {}))
# Be careful, content-type may contain tricky content like in the
# following example so test the MIME type with startswith()
#
# Content-Type: multipart/related;
# boundary="_004_3f1e4da175f349248b8d43cdeb9866f1AMSPR06MB343eurprd06pro_";
# type="text/html"
if message.get_content_maintype() == 'text':
encoding = message.get_content_charset()
body = message.get_content()
body = tools.ustr(body, encoding, errors='replace')
if message.get_content_type() == 'text/plain':
# text/plain -> <pre/>
body = tools.append_content_to_html(u'', body, preserve=True)
else:
alternative = False
mixed = False
html = u''
for part in message.walk():
if part.get_content_type() == 'binary/octet-stream':
_logger.warning("Message containing an unexpected Content-Type 'binary/octet-stream', assuming 'application/octet-stream'")
part.replace_header('Content-Type', 'application/octet-stream')
if part.get_content_type() == 'multipart/alternative':
alternative = True
if part.get_content_type() == 'multipart/mixed':
mixed = True
if part.get_content_maintype() == 'multipart':
continue # skip container
filename = part.get_filename() # I may not properly handle all charsets
if part.get_content_type() == 'text/xml' and not part.get_param('charset'):
# for text/xml with omitted charset, the charset is assumed to be ASCII by the `email` module
# although the payload might be in UTF8
part.set_charset('utf-8')
encoding = part.get_content_charset() # None if attachment
content = part.get_content()
info = {'encoding': encoding}
# 0) Inline Attachments -> attachments, with a third part in the tuple to match cid / attachment
if filename and part.get('content-id'):
info['cid'] = part.get('content-id').strip('><')
attachments.append(self._Attachment(filename, content, info))
continue
# 1) Explicit Attachments -> attachments
if filename or part.get('content-disposition', '').strip().startswith('attachment'):
attachments.append(self._Attachment(filename or 'attachment', content, info))
continue
# 2) text/plain -> <pre/>
if part.get_content_type() == 'text/plain' and (not alternative or not body):
body = tools.append_content_to_html(body, tools.ustr(content,
encoding, errors='replace'), preserve=True)
# 3) text/html -> raw
elif part.get_content_type() == 'text/html':
# mutlipart/alternative have one text and a html part, keep only the second
# mixed allows several html parts, append html content
append_content = not alternative or (html and mixed)
html = tools.ustr(content, encoding, errors='replace')
if not append_content:
body = html
else:
body = tools.append_content_to_html(body, html, plaintext=False)
# we only strip_classes here everything else will be done in by html field of mail.message
body = tools.html_sanitize(body, sanitize_tags=False, strip_classes=True)
# 4) Anything else -> attachment
else:
attachments.append(self._Attachment(filename or 'attachment', content, info))
return self._message_parse_extract_payload_postprocess(message, {'body': body, 'attachments': attachments})
def _message_parse_extract_bounce(self, email_message, message_dict):
""" Parse email and extract bounce information to be used in future
processing.
:param email_message: an email.message instance;
:param message_dict: dictionary holding already-parsed values;
:return dict: bounce-related values will be added, containing
* bounced_email: email that bounced (normalized);
* bounce_partner: res.partner recordset whose email_normalized =
bounced_email;
* bounced_msg_id: list of message_ID references (<...@myserver>) linked
to the email that bounced;
* bounced_message: if found, mail.message recordset matching bounced_msg_id;
"""
if not isinstance(email_message, EmailMessage):
raise TypeError('message must be an email.message.EmailMessage at this point')
email_part = next((part for part in email_message.walk() if part.get_content_type() in {'message/rfc822', 'text/rfc822-headers'}), None)
dsn_part = next((part for part in email_message.walk() if part.get_content_type() == 'message/delivery-status'), None)
bounced_email = False
bounced_partner = self.env['res.partner'].sudo()
if dsn_part and len(dsn_part.get_payload()) > 1:
dsn = dsn_part.get_payload()[1]
final_recipient_data = tools.decode_message_header(dsn, 'Final-Recipient')
# old servers may hold void or invalid Final-Recipient header
if final_recipient_data and ";" in final_recipient_data:
bounced_email = tools.email_normalize(final_recipient_data.split(';', 1)[1].strip())
if bounced_email:
bounced_partner = self.env['res.partner'].sudo().search([('email_normalized', '=', bounced_email)])
bounced_msg_id = False
bounced_message = self.env['mail.message'].sudo()
if email_part:
if email_part.get_content_type() == 'text/rfc822-headers':
# Convert the message body into a message itself
email_payload = message_from_string(email_part.get_content(), policy=policy.SMTP)
else:
email_payload = email_part.get_payload()[0]
bounced_msg_id = tools.mail_header_msgid_re.findall(tools.decode_message_header(email_payload, 'Message-Id'))
if bounced_msg_id:
bounced_message = self.env['mail.message'].sudo().search([('message_id', 'in', bounced_msg_id)])
return {
'bounced_email': bounced_email,
'bounced_partner': bounced_partner,
'bounced_msg_id': bounced_msg_id,
'bounced_message': bounced_message,
}
@api.model
def message_parse(self, message, save_original=False):
""" Parses an email.message.Message representing an RFC-2822 email
and returns a generic dict holding the message details.
:param message: email to parse
:type message: email.message.Message
:param bool save_original: whether the returned dict should include
an ``original`` attachment containing the source of the message
:rtype: dict
:return: A dict with the following structure, where each field may not
be present if missing in original message::
{ 'message_id': msg_id,
'subject': subject,
'email_from': from,
'to': to + delivered-to,
'cc': cc,
'recipients': delivered-to + to + cc + resent-to + resent-cc,
'partner_ids': partners found based on recipients emails,
'body': unified_body,
'references': references,
'in_reply_to': in-reply-to,
'parent_id': parent mail.message based on in_reply_to or references,
'is_internal': answer to an internal message (note),
'date': date,
'attachments': [('file1', 'bytes'),
('file2', 'bytes')}
}
"""
if not isinstance(message, EmailMessage):
raise ValueError(_('Message should be a valid EmailMessage instance'))
msg_dict = {'message_type': 'email'}
message_id = message.get('Message-Id')
if not message_id:
# Very unusual situation, be we should be fault-tolerant here
message_id = "<%s@localhost>" % time.time()
_logger.debug('Parsing Message without message-id, generating a random one: %s', message_id)
msg_dict['message_id'] = message_id.strip()
if message.get('Subject'):
msg_dict['subject'] = tools.decode_message_header(message, 'Subject')
email_from = tools.decode_message_header(message, 'From', separator=',')
email_cc = tools.decode_message_header(message, 'cc', separator=',')
email_from_list = tools.email_split_and_format(email_from)
email_cc_list = tools.email_split_and_format(email_cc)
msg_dict['email_from'] = email_from_list[0] if email_from_list else email_from
msg_dict['from'] = msg_dict['email_from'] # compatibility for message_new
msg_dict['cc'] = ','.join(email_cc_list) if email_cc_list else email_cc
# Delivered-To is a safe bet in most modern MTAs, but we have to fallback on To + Cc values
# for all the odd MTAs out there, as there is no standard header for the envelope's `rcpt_to` value.
msg_dict['recipients'] = ','.join(set(formatted_email
for address in [
tools.decode_message_header(message, 'Delivered-To', separator=','),
tools.decode_message_header(message, 'To', separator=','),
tools.decode_message_header(message, 'Cc', separator=','),
tools.decode_message_header(message, 'Resent-To', separator=','),
tools.decode_message_header(message, 'Resent-Cc', separator=',')
] if address
for formatted_email in tools.email_split_and_format(address))
)
msg_dict['to'] = ','.join(set(formatted_email
for address in [
tools.decode_message_header(message, 'Delivered-To', separator=','),
tools.decode_message_header(message, 'To', separator=',')
] if address
for formatted_email in tools.email_split_and_format(address))
)
partner_ids = [x.id for x in self._mail_find_partner_from_emails(tools.email_split(msg_dict['recipients']), records=self) if x]
msg_dict['partner_ids'] = partner_ids
# compute references to find if email_message is a reply to an existing thread
msg_dict['references'] = tools.decode_message_header(message, 'References')
msg_dict['in_reply_to'] = tools.decode_message_header(message, 'In-Reply-To').strip()
if message.get('Date'):
try:
date_hdr = tools.decode_message_header(message, 'Date')
parsed_date = dateutil.parser.parse(date_hdr, fuzzy=True)
if parsed_date.utcoffset() is None:
# naive datetime, so we arbitrarily decide to make it
# UTC, there's no better choice. Should not happen,
# as RFC2822 requires timezone offset in Date headers.
stored_date = parsed_date.replace(tzinfo=pytz.utc)
else:
stored_date = parsed_date.astimezone(tz=pytz.utc)
except Exception:
_logger.info('Failed to parse Date header %r in incoming mail '
'with message-id %r, assuming current date/time.',
message.get('Date'), message_id)
stored_date = datetime.datetime.now()
msg_dict['date'] = stored_date.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)
parent_ids = False
if msg_dict['in_reply_to']:
parent_ids = self.env['mail.message'].search(
[('message_id', '=', msg_dict['in_reply_to'])],
order='create_date DESC, id DESC',
limit=1)
if msg_dict['references'] and not parent_ids:
references_msg_id_list = tools.mail_header_msgid_re.findall(msg_dict['references'])
parent_ids = self.env['mail.message'].search(
[('message_id', 'in', [x.strip() for x in references_msg_id_list])],
order='create_date DESC, id DESC',
limit=1)
if parent_ids:
msg_dict['parent_id'] = parent_ids.id
msg_dict['is_internal'] = parent_ids.subtype_id and parent_ids.subtype_id.internal or False
msg_dict.update(self._message_parse_extract_payload(message, save_original=save_original))
msg_dict.update(self._message_parse_extract_bounce(message, msg_dict))
return msg_dict
# ------------------------------------------------------
# RECIPIENTS MANAGEMENT TOOLS
# ------------------------------------------------------
def _message_add_suggested_recipient(self, result, partner=None, email=None, reason=''):
""" Called by _message_get_suggested_recipients, to add a suggested
recipient in the result dictionary. The form is :
partner_id, partner_name<partner_email> or partner_name, reason """
self.ensure_one()
if email and not partner:
# get partner info from email
partner_info = self._message_partner_info_from_emails([email])[0]
if partner_info.get('partner_id'):
partner = self.env['res.partner'].sudo().browse([partner_info['partner_id']])[0]
if email and email in [val[1] for val in result[self.ids[0]]]: # already existing email -> skip
return result
if partner and partner in self.message_partner_ids: # recipient already in the followers -> skip
return result
if partner and partner.id in [val[0] for val in result[self.ids[0]]]: # already existing partner ID -> skip
return result
if partner and partner.email: # complete profile: id, name <email>
result[self.ids[0]].append((partner.id, partner.email_formatted, reason))
elif partner: # incomplete profile: id, name
result[self.ids[0]].append((partner.id, '%s' % (partner.name), reason))
else: # unknown partner, we are probably managing an email address
result[self.ids[0]].append((False, email, reason))
return result
def _message_get_suggested_recipients(self):
""" Returns suggested recipients for ids. Those are a list of
tuple (partner_id, partner_name, reason), to be managed by Chatter. """
result = dict((res_id, []) for res_id in self.ids)
if 'user_id' in self._fields:
for obj in self.sudo(): # SUPERUSER because of a read on res.users that would crash otherwise
if not obj.user_id or not obj.user_id.partner_id:
continue
obj._message_add_suggested_recipient(result, partner=obj.user_id.partner_id, reason=self._fields['user_id'].string)
return result
def _mail_search_on_user(self, normalized_emails, extra_domain=False):
""" Find partners linked to users, given an email address that will
be normalized. Search is done as sudo on res.users model to avoid domain
on partner like ('user_ids', '!=', False) that would not be efficient. """
domain = [('email_normalized', 'in', normalized_emails)]
if extra_domain:
domain = expression.AND([domain, extra_domain])
partners = self.env['res.users'].sudo().search(domain, order='name ASC').mapped('partner_id')
# return a search on partner to filter results current user should not see (multi company for example)
return self.env['res.partner'].search([('id', 'in', partners.ids)])
def _mail_search_on_partner(self, normalized_emails, extra_domain=False):
domain = [('email_normalized', 'in', normalized_emails)]
if extra_domain:
domain = expression.AND([domain, extra_domain])
return self.env['res.partner'].search(domain)
def _mail_find_user_for_gateway(self, email, alias=None):
""" Utility method to find user from email address that can create documents
in the target model. Purpose is to link document creation to users whenever
possible, for example when creating document through mailgateway.
Heuristic
* alias owner record: fetch in its followers for user with matching email;
* find any user with matching emails;
* try alias owner as fallback;
Note that standard search order is applied.
:param str email: will be sanitized and parsed to find email;
:param mail.alias alias: optional alias. Used to fetch owner followers
or fallback user (alias owner);
:param fallback_model: if not alias, related model to check access rights;
:return res.user user: user matching email or void recordset if none found
"""
# find normalized emails and exclude aliases (to avoid subscribing alias emails to records)
normalized_email = tools.email_normalize(email)
if not normalized_email:
return self.env['res.users']
catchall_domain = self.env['ir.config_parameter'].sudo().get_param("mail.catchall.domain")
if catchall_domain:
left_part = normalized_email.split('@')[0] if normalized_email.split('@')[1] == catchall_domain.lower() else False
if left_part:
if self.env['mail.alias'].sudo().search_count([('alias_name', '=', left_part)]):
return self.env['res.users']
if alias and alias.alias_parent_model_id and alias.alias_parent_thread_id:
followers = self.env['mail.followers'].search([
('res_model', '=', alias.alias_parent_model_id.sudo().model),
('res_id', '=', alias.alias_parent_thread_id)]
).mapped('partner_id')
else:
followers = self.env['res.partner']
follower_users = self.env['res.users'].search([
('partner_id', 'in', followers.ids), ('email_normalized', '=', normalized_email)
], limit=1) if followers else self.env['res.users']
matching_user = follower_users[0] if follower_users else self.env['res.users']
if matching_user:
return matching_user
if not matching_user:
std_users = self.env['res.users'].sudo().search([('email_normalized', '=', normalized_email)], limit=1, order='name ASC')
matching_user = std_users[0] if std_users else self.env['res.users']
if matching_user:
return matching_user
if not matching_user and alias and alias.alias_user_id:
matching_user = alias and alias.alias_user_id
if matching_user:
return matching_user
return matching_user
@api.model
def _mail_find_partner_from_emails(self, emails, records=None, force_create=False, extra_domain=False):
""" Utility method to find partners from email addresses. If no partner is
found, create new partners if force_create is enabled. Search heuristics
* 1: check in records (record set) followers if records is mail.thread
enabled and if check_followers parameter is enabled;
* 2: search for partners with user;
* 3: search for partners;
:param records: record set on which to check followers;
:param list emails: list of email addresses for finding partner;
:param boolean force_create: create a new partner if not found
:return list partners: a list of partner records ordered as given emails.
If no partner has been found and/or created for a given emails its
matching partner is an empty record.
"""
if records and issubclass(type(records), self.pool['mail.thread']):
followers = records.mapped('message_partner_ids')
else:
followers = self.env['res.partner']
catchall_domain = self.env['ir.config_parameter'].sudo().get_param("mail.catchall.domain")
# first, build a normalized email list and remove those linked to aliases to avoid adding aliases as partners
normalized_emails = [tools.email_normalize(contact) for contact in emails if tools.email_normalize(contact)]
if catchall_domain:
domain_left_parts = [email.split('@')[0] for email in normalized_emails if email and email.split('@')[1] == catchall_domain.lower()]
if domain_left_parts:
found_alias_names = self.env['mail.alias'].sudo().search([('alias_name', 'in', domain_left_parts)]).mapped('alias_name')
normalized_emails = [email for email in normalized_emails if email.split('@')[0] not in found_alias_names]
done_partners = [follower for follower in followers if follower.email_normalized in normalized_emails]
remaining = [email for email in normalized_emails if email not in [partner.email_normalized for partner in done_partners]]
user_partners = self._mail_search_on_user(remaining, extra_domain=extra_domain)
done_partners += [user_partner for user_partner in user_partners]
remaining = [email for email in normalized_emails if email not in [partner.email_normalized for partner in done_partners]]
partners = self._mail_search_on_partner(remaining, extra_domain=extra_domain)
done_partners += [partner for partner in partners]
remaining = [email for email in normalized_emails if email not in [partner.email_normalized for partner in done_partners]]
# iterate and keep ordering
partners = []
for contact in emails:
normalized_email = tools.email_normalize(contact)
partner = next((partner for partner in done_partners if partner.email_normalized == normalized_email), self.env['res.partner'])
if not partner and force_create and normalized_email in normalized_emails:
partner = self.env['res.partner'].browse(self.env['res.partner'].name_create(contact)[0])
partners.append(partner)
return partners
def _message_partner_info_from_emails(self, emails, link_mail=False):
""" Convert a list of emails into a list partner_ids and a list
new_partner_ids. The return value is non conventional because
it is meant to be used by the mail widget.
:return dict: partner_ids and new_partner_ids """
self.ensure_one()
MailMessage = self.env['mail.message'].sudo()
partners = self._mail_find_partner_from_emails(emails, records=self)
result = list()
for idx, contact in enumerate(emails):
partner = partners[idx]
partner_info = {'full_name': partner.email_formatted if partner else contact, 'partner_id': partner.id}
result.append(partner_info)
# link mail with this from mail to the new partner id
if link_mail and partner:
MailMessage.search([
('email_from', '=ilike', partner.email_normalized),
('author_id', '=', False)
]).write({'author_id': partner.id})
return result
# ------------------------------------------------------
# MESSAGE POST API
# ------------------------------------------------------
def _message_post_process_attachments(self, attachments, attachment_ids, message_values):
""" Preprocess attachments for mail_thread.message_post() or mail_mail.create().
:param list attachments: list of attachment tuples in the form ``(name,content)``, #todo xdo update that
where content is NOT base64 encoded
:param list attachment_ids: a list of attachment ids, not in tomany command form
:param dict message_data: model: the model of the attachments parent record,
res_id: the id of the attachments parent record
"""
return_values = {}
body = message_values.get('body')
model = message_values['model']
res_id = message_values['res_id']
m2m_attachment_ids = []
if attachment_ids:
# taking advantage of cache looks better in this case, to check
filtered_attachment_ids = self.env['ir.attachment'].sudo().browse(attachment_ids).filtered(
lambda a: a.res_model == 'mail.compose.message' and a.create_uid.id == self._uid)
# update filtered (pending) attachments to link them to the proper record
if filtered_attachment_ids:
filtered_attachment_ids.write({'res_model': model, 'res_id': res_id})
# prevent public and portal users from using attachments that are not theirs
if not self.env.user.has_group('base.group_user'):
attachment_ids = filtered_attachment_ids.ids
m2m_attachment_ids += [Command.link(id) for id in attachment_ids]
# Handle attachments parameter, that is a dictionary of attachments
if attachments: # generate
cids_in_body = set()
names_in_body = set()
cid_list = []
name_list = []
if body:
root = lxml.html.fromstring(tools.ustr(body))
# first list all attachments that will be needed in body
for node in root.iter('img'):
if node.get('src', '').startswith('cid:'):
cids_in_body.add(node.get('src').split('cid:')[1])
elif node.get('data-filename'):
names_in_body.add(node.get('data-filename'))
attachement_values_list = []
# generate values
for attachment in attachments:
cid = False
if len(attachment) == 2:
name, content = attachment
info = {}
elif len(attachment) == 3:
name, content, info = attachment
cid = info and info.get('cid')
else:
continue
if isinstance(content, str):
encoding = info and info.get('encoding')
try:
content = content.encode(encoding or "utf-8")
except UnicodeEncodeError:
content = content.encode("utf-8")
elif isinstance(content, EmailMessage):
content = content.as_bytes()
elif content is None:
continue
attachement_values= {
'name': name,
'datas': base64.b64encode(content),
'type': 'binary',
'description': name,
'res_model': model,
'res_id': res_id,
}
if body and (cid and cid in cids_in_body or name in names_in_body):
attachement_values['access_token'] = self.env['ir.attachment']._generate_access_token()
attachement_values_list.append(attachement_values)
# keep cid and name list synced with attachement_values_list length to match ids latter
cid_list.append(cid)
name_list.append(name)
new_attachments = self.env['ir.attachment'].create(attachement_values_list)
cid_mapping = {}
name_mapping = {}
for counter, new_attachment in enumerate(new_attachments):
cid = cid_list[counter]
if 'access_token' in attachement_values_list[counter]:
if cid:
cid_mapping[cid] = (new_attachment.id, attachement_values_list[counter]['access_token'])
name = name_list[counter]
name_mapping[name] = (new_attachment.id, attachement_values_list[counter]['access_token'])
m2m_attachment_ids.append((4, new_attachment.id))
# note: right know we are only taking attachments and ignoring attachment_ids.
if (cid_mapping or name_mapping) and body:
postprocessed = False
for node in root.iter('img'):
attachment_data = False
if node.get('src', '').startswith('cid:'):
cid = node.get('src').split('cid:')[1]
attachment_data = cid_mapping.get(cid)
if not attachment_data and node.get('data-filename'):
attachment_data = name_mapping.get(node.get('data-filename'), False)
if attachment_data:
node.set('src', '/web/image/%s?access_token=%s' % attachment_data)
postprocessed = True
if postprocessed:
return_values['body'] = lxml.html.tostring(root, pretty_print=False, encoding='UTF-8')
return_values['attachment_ids'] = m2m_attachment_ids
return return_values
@api.returns('mail.message', lambda value: value.id)
def message_post(self, *,
body='', subject=None, message_type='notification',
email_from=None, author_id=None, parent_id=False,
subtype_xmlid=None, subtype_id=False, partner_ids=None,
attachments=None, attachment_ids=None,
add_sign=True, record_name=False,
**kwargs):
""" Post a new message in an existing thread, returning the new
mail.message ID.
:param str body: body of the message, usually raw HTML that will
be sanitized
:param str subject: subject of the message
:param str message_type: see mail_message.message_type field. Can be anything but
user_notification, reserved for message_notify
:param int parent_id: handle thread formation
:param int subtype_id: subtype_id of the message, used mainly use for
followers notification mechanism;
:param list(int) partner_ids: partner_ids to notify in addition to partners
computed based on subtype / followers matching;
:param list(tuple(str,str), tuple(str,str, dict) or int) attachments : list of attachment tuples in the form
``(name,content)`` or ``(name,content, info)``, where content is NOT base64 encoded
:param list id attachment_ids: list of existing attachement to link to this message
-Should only be setted by chatter
-Attachement object attached to mail.compose.message(0) will be attached
to the related document.
Extra keyword arguments will be used as default column values for the
new mail.message record.
:return int: ID of newly created mail.message
"""
self.ensure_one() # should always be posted on a record, use message_notify if no record
# split message additional values from notify additional values
msg_kwargs = dict((key, val) for key, val in kwargs.items() if key in self.env['mail.message']._fields)
notif_kwargs = dict((key, val) for key, val in kwargs.items() if key not in msg_kwargs)
# preliminary value safety check
partner_ids = set(partner_ids or [])
if self._name == 'mail.thread' or not self.id or message_type == 'user_notification':
raise ValueError(_('Posting a message should be done on a business document. Use message_notify to send a notification to an user.'))
if 'channel_ids' in kwargs:
raise ValueError(_("Posting a message with channels as listeners is not supported since Odoo 14.3+. Please update code accordingly."))
if 'model' in msg_kwargs or 'res_id' in msg_kwargs:
raise ValueError(_("message_post does not support model and res_id parameters anymore. Please call message_post on record."))
if 'subtype' in kwargs:
raise ValueError(_("message_post does not support subtype parameter anymore. Please give a valid subtype_id or subtype_xmlid value instead."))
if any(not isinstance(pc_id, int) for pc_id in partner_ids):
raise ValueError(_('message_post partner_ids and must be integer list, not commands.'))
self = self._fallback_lang() # add lang to context imediatly since it will be usefull in various flows latter.
# Explicit access rights check, because display_name is computed as sudo.
self.check_access_rights('read')
self.check_access_rule('read')
record_name = record_name or self.display_name
# Find the message's author
guest = self.env['mail.guest']._get_guest_from_context()
if self.env.user._is_public() and guest:
author_guest_id = guest.id
author_id, email_from = False, False
else:
author_guest_id = False
author_id, email_from = self._message_compute_author(author_id, email_from, raise_exception=True)
if subtype_xmlid:
subtype_id = self.env['ir.model.data']._xmlid_to_res_id(subtype_xmlid)
if not subtype_id:
subtype_id = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_note')
# automatically subscribe recipients if asked to
if self._context.get('mail_post_autofollow') and partner_ids:
self.message_subscribe(partner_ids=list(partner_ids))
values = dict(msg_kwargs)
values.update({
'author_id': author_id,
'author_guest_id': author_guest_id,
'email_from': email_from,
'model': self._name,
'res_id': self.id,
'body': body,
'subject': subject or False,
'message_type': message_type,
'parent_id': self._message_compute_parent_id(parent_id),
'subtype_id': subtype_id,
'partner_ids': partner_ids,
'add_sign': add_sign,
'record_name': record_name,
})
attachments = attachments or []
attachment_ids = attachment_ids or []
attachement_values = self._message_post_process_attachments(attachments, attachment_ids, values)
values.update(attachement_values) # attachement_ids, [body]
new_message = self._message_create(values)
# Set main attachment field if necessary
self._message_set_main_attachment_id(values['attachment_ids'])
if values['author_id'] and values['message_type'] != 'notification' and not self._context.get('mail_create_nosubscribe'):
if self.env['res.partner'].browse(values['author_id']).active: # we dont want to add odoobot/inactive as a follower
self._message_subscribe(partner_ids=[values['author_id']])
self._message_post_after_hook(new_message, values)
self._notify_thread(new_message, values, **notif_kwargs)
return new_message
def _message_set_main_attachment_id(self, attachment_ids): # todo move this out of mail.thread
if not self._abstract and attachment_ids and not self.message_main_attachment_id:
all_attachments = self.env['ir.attachment'].browse([attachment_tuple[1] for attachment_tuple in attachment_ids])
prioritary_attachments = all_attachments.filtered(lambda x: x.mimetype.endswith('pdf')) \
or all_attachments.filtered(lambda x: x.mimetype.startswith('image')) \
or all_attachments
self.sudo().with_context(tracking_disable=True).write({'message_main_attachment_id': prioritary_attachments[0].id})
def _message_post_after_hook(self, message, msg_vals):
""" Hook to add custom behavior after having posted the message. Both
message and computed value are given, to try to lessen query count by
using already-computed values instead of having to rebrowse things. """
def _message_update_content_after_hook(self, message):
""" Hook to add custom behavior after having updated the message content. """
def _message_add_reaction_after_hook(self, message, content):
""" Hook to add custom behavior after having added a reaction to a message. """
def _message_remove_reaction_after_hook(self, message, content):
""" Hook to add custom behavior after having removed a reaction from a message. """
def _check_can_update_message_content(self, message):
"""" Checks that the current user can update the content of the message. """
note_id = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_note')
if not message.subtype_id.id == note_id:
raise exceptions.UserError(_("Only logged notes can have their content updated on model '%s'", self._name))
if message.tracking_value_ids:
raise exceptions.UserError(_("Messages with tracking values cannot be modified"))
if not message.message_type == 'comment':
raise exceptions.UserError(_("Only messages type comment can have their content updated"))
# ------------------------------------------------------
# MESSAGE POST TOOLS
# ------------------------------------------------------
def _message_compose_with_view(self, views_or_xmlid, message_log=False, **kwargs):
""" Helper method to send a mail / post a message / log a note using
a view_id to render using the ir.qweb engine. This method is stand
alone, because there is nothing in template and composer that allows
to handle views in batch. This method should probably disappear when
templates handle ir ui views. """
values = kwargs.pop('values', None) or dict()
try:
from odoo.addons.http_routing.models.ir_http import slug
values['slug'] = slug
except ImportError:
values['slug'] = lambda self: self.id
if isinstance(views_or_xmlid, str):
views = self.env.ref(views_or_xmlid, raise_if_not_found=False)
else:
views = views_or_xmlid
if not views:
return
messages_as_sudo = self.env['mail.message']
for record in self:
values['object'] = record
rendered_template = views._render(values, engine='ir.qweb', minimal_qcontext=True)
if message_log:
messages_as_sudo += record._message_log(body=rendered_template, **kwargs)
else:
kwargs['body'] = rendered_template
# ``Composer._action_send_mail`` returns None in 15.0, no message to return here
record.message_post_with_template(False, **kwargs)
return messages_as_sudo
def message_post_with_view(self, views_or_xmlid, **kwargs):
""" Helper method to send a mail / post a message using a view_id """
self._message_compose_with_view(views_or_xmlid, **kwargs)
def message_post_with_template(self, template_id, email_layout_xmlid=None, auto_commit=False, **kwargs):
""" Helper method to send a mail with a template
:param template_id : the id of the template to render to create the body of the message
:param **kwargs : parameter to create a mail.compose.message woaerd (which inherit from mail.message)
"""
# Get composition mode, or force it according to the number of record in self
if not kwargs.get('composition_mode'):
kwargs['composition_mode'] = 'comment' if len(self.ids) == 1 else 'mass_mail'
if not kwargs.get('message_type'):
kwargs['message_type'] = 'notification'
res_id = kwargs.get('res_id', self.ids and self.ids[0] or 0)
res_ids = kwargs.get('res_id') and [kwargs['res_id']] or self.ids
# Create the composer
composer = self.env['mail.compose.message'].with_context(
active_id=res_id,
active_ids=res_ids,
active_model=kwargs.get('model', self._name),
default_composition_mode=kwargs['composition_mode'],
default_model=kwargs.get('model', self._name),
default_res_id=res_id,
default_template_id=template_id,
custom_layout=email_layout_xmlid,
).create(kwargs)
# Simulate the onchange (like trigger in form the view) only
# when having a template in single-email mode
if template_id:
update_values = composer._onchange_template_id(template_id, kwargs['composition_mode'], self._name, res_id)['value']
composer.write(update_values)
return composer._action_send_mail(auto_commit=auto_commit)
def message_notify(self, *,
partner_ids=False, parent_id=False, model=False, res_id=False,
author_id=None, email_from=None, body='', subject=False, **kwargs):
""" Shortcut allowing to notify partners of messages that shouldn't be
displayed on a document. It pushes notifications on inbox or by email depending
on the user configuration, like other notifications. """
if self:
self.ensure_one()
# split message additional values from notify additional values
msg_kwargs = dict((key, val) for key, val in kwargs.items() if key in self.env['mail.message']._fields)
notif_kwargs = dict((key, val) for key, val in kwargs.items() if key not in msg_kwargs)
author_id, email_from = self._message_compute_author(author_id, email_from, raise_exception=True)
if not partner_ids:
_logger.warning('Message notify called without recipient_ids, skipping')
return self.env['mail.message']
if not (model and res_id): # both value should be set or none should be set (record)
model = False
res_id = False
MailThread = self.env['mail.thread']
values = {
'parent_id': parent_id,
'model': self._name if self else model,
'res_id': self.id if self else res_id,
'message_type': 'user_notification',
'subject': subject,
'body': body,
'author_id': author_id,
'email_from': email_from,
'partner_ids': partner_ids,
'subtype_id': self.env['ir.model.data']._xmlid_to_res_id('mail.mt_note'),
'is_internal': True,
'record_name': False,
'reply_to': MailThread._notify_get_reply_to(default=email_from, records=None)[False],
'message_id': tools.generate_tracking_message_id('message-notify'),
}
values.update(msg_kwargs)
new_message = MailThread._message_create(values)
MailThread._notify_thread(new_message, values, **notif_kwargs)
return new_message
def _message_log_with_view(self, views_or_xmlid, **kwargs):
""" Helper method to log a note using a view_id without notifying followers. """
return self._message_compose_with_view(views_or_xmlid, message_log=True, **kwargs)
def _message_log(self, *, body='', author_id=None, email_from=None, subject=False, message_type='notification', **kwargs):
""" Shortcut allowing to post note on a document. It does not perform
any notification and pre-computes some values to have a short code
as optimized as possible. This method is private as it does not check
access rights and perform the message creation as sudo to speedup
the log process. This method should be called within methods where
access rights are already granted to avoid privilege escalation. """
self.ensure_one()
author_id, email_from = self._message_compute_author(author_id, email_from, raise_exception=False)
message_values = {
'subject': subject,
'body': body,
'author_id': author_id,
'email_from': email_from,
'message_type': message_type,
'model': kwargs.get('model', self._name),
'res_id': self.ids[0] if self.ids else False,
'subtype_id': self.env['ir.model.data']._xmlid_to_res_id('mail.mt_note'),
'is_internal': True,
'record_name': False,
'reply_to': self.env['mail.thread']._notify_get_reply_to(default=email_from, records=None)[False],
'message_id': tools.generate_tracking_message_id('message-notify'), # why? this is all but a notify
}
message_values.update(kwargs)
return self.sudo()._message_create(message_values)
def _message_log_batch(self, bodies, author_id=None, email_from=None, subject=False, message_type='notification'):
""" Shortcut allowing to post notes on a batch of documents. It achieve the
same purpose as _message_log, done in batch to speedup quick note log.
:param bodies: dict {record_id: body}
"""
author_id, email_from = self._message_compute_author(author_id, email_from, raise_exception=False)
base_message_values = {
'subject': subject,
'author_id': author_id,
'email_from': email_from,
'message_type': message_type,
'model': self._name,
'subtype_id': self.env['ir.model.data']._xmlid_to_res_id('mail.mt_note'),
'is_internal': True,
'record_name': False,
'reply_to': self.env['mail.thread']._notify_get_reply_to(default=email_from, records=None)[False],
'message_id': tools.generate_tracking_message_id('message-notify'), # why? this is all but a notify
}
values_list = [dict(base_message_values,
res_id=record.id,
body=bodies.get(record.id, ''))
for record in self]
return self.sudo()._message_create(values_list)
def _message_compute_author(self, author_id=None, email_from=None, raise_exception=True):
""" Tool method computing author information for messages. Purpose is
to ensure maximum coherence between author / current user / email_from
when sending emails. """
if author_id is None:
if email_from:
author = self._mail_find_partner_from_emails([email_from])[0]
else:
author = self.env.user.partner_id
email_from = author.email_formatted
author_id = author.id
if email_from is None:
if author_id:
author = self.env['res.partner'].browse(author_id)
email_from = author.email_formatted
# superuser mode without author email -> probably public user; anyway we don't want to crash
if not email_from and not self.env.su and raise_exception:
raise exceptions.UserError(_("Unable to log message, please configure the sender's email address."))
return author_id, email_from
def _message_compute_parent_id(self, parent_id):
# parent management, depending on ``_mail_flat_thread``
# ``_mail_flat_thread`` True: no free message. If no parent, find the first
# posted message and attach new message to it. If parent, get back to the first
# ancestor and attach it. We don't keep hierarchy (one level of threading).
# ``_mail_flat_thread`` False: free message = new thread (think of mailing lists).
# If parent get up one level to try to flatten threads without completely
# removing hierarchy.
MailMessage_sudo = self.env['mail.message'].sudo()
if self._mail_flat_thread and not parent_id:
parent_message = MailMessage_sudo.search([('res_id', '=', self.id), ('model', '=', self._name), ('message_type', '!=', 'user_notification')], order="id ASC", limit=1)
# parent_message searched in sudo for performance, only used for id.
# Note that with sudo we will match message with internal subtypes.
parent_id = parent_message.id if parent_message else False
elif parent_id:
current_ancestor = MailMessage_sudo.search([('id', '=', parent_id), ('parent_id', '!=', False)])
if self._mail_flat_thread:
if current_ancestor:
# avoid loops when finding ancestors
processed_list = []
while (current_ancestor.parent_id and current_ancestor.parent_id not in processed_list):
processed_list.append(current_ancestor)
current_ancestor = current_ancestor.parent_id
parent_id = current_ancestor.id
else:
parent_id = current_ancestor.parent_id.id if current_ancestor.parent_id else parent_id
return parent_id
def _message_create(self, values_list):
if not isinstance(values_list, (list)):
values_list = [values_list]
create_values_list = []
for values in values_list:
create_values = dict(values)
# Avoid warnings about non-existing fields
for x in ('from', 'to', 'cc', 'canned_response_ids'):
create_values.pop(x, None)
create_values['partner_ids'] = [Command.link(pid) for pid in create_values.get('partner_ids', [])]
create_values_list.append(create_values)
# remove context, notably for default keys, as this thread method is not
# meant to propagate default values for messages, only for master records
return self.env['mail.message'].with_context(
clean_context(self.env.context)
).create(create_values_list)
# ------------------------------------------------------
# NOTIFICATION API
# ------------------------------------------------------
def _notify_thread(self, message, msg_vals=False, notify_by_email=True, **kwargs):
""" Main notification method. This method basically does two things
* call ``_notify_compute_recipients`` that computes recipients to
notify based on message record or message creation values if given
(to optimize performance if we already have data computed);
* performs the notification process by calling the various notification
methods implemented;
:param message: mail.message record to notify;
:param msg_vals: dictionary of values used to create the message. If given
it is used instead of accessing ``self`` to lessen query count in some
simple cases where no notification is actually required;
Kwargs allow to pass various parameters that are given to sub notification
methods. See those methods for more details about the additional parameters.
Parameters used for email-style notifications
"""
msg_vals = msg_vals if msg_vals else {}
rdata = self._notify_compute_recipients(message, msg_vals)
if not rdata:
return rdata
self._notify_record_by_inbox(message, rdata, msg_vals=msg_vals, **kwargs)
if notify_by_email:
self._notify_record_by_email(message, rdata, msg_vals=msg_vals, **kwargs)
return rdata
def _notify_record_by_inbox(self, message, recipients_data, msg_vals=False, **kwargs):
""" Notification method: inbox. Do two main things
* create an inbox notification for users;
* send bus notifications;
TDE/XDO TODO: flag rdata directly, with for example r['notif'] = 'ocn_client' and r['needaction']=False
and correctly override notify_recipients
"""
bus_notifications = []
inbox_pids = [r['id'] for r in recipients_data if r['notif'] == 'inbox']
if inbox_pids:
notif_create_values = [{
'mail_message_id': message.id,
'res_partner_id': pid,
'notification_type': 'inbox',
'notification_status': 'sent',
} for pid in inbox_pids]
self.env['mail.notification'].sudo().create(notif_create_values)
message_format_values = message.message_format()[0]
for partner_id in inbox_pids:
bus_notifications.append((self.env['res.partner'].browse(partner_id), 'mail.message/inbox', dict(message_format_values)))
self.env['bus.bus'].sudo()._sendmany(bus_notifications)
def _notify_record_by_email(self, message, recipients_data, msg_vals=False,
model_description=False, mail_auto_delete=True, check_existing=False,
force_send=True, send_after_commit=True,
**kwargs):
""" Method to send email linked to notified messages.
:param message: mail.message record to notify;
:param recipients_data: see ``_notify_thread``;
:param msg_vals: see ``_notify_thread``;
:param model_description: model description used in email notification process
(computed if not given);
:param mail_auto_delete: delete notification emails once sent;
:param check_existing: check for existing notifications to update based on
mailed recipient, otherwise create new notifications;
:param force_send: send emails directly instead of using queue;
:param send_after_commit: if force_send, tells whether to send emails after
the transaction has been committed using a post-commit hook;
"""
partners_data = [r for r in recipients_data if r['notif'] == 'email']
if not partners_data:
return True
model = msg_vals.get('model') if msg_vals else message.model
model_name = model_description or (self._fallback_lang().env['ir.model']._get(model).display_name if model else False) # one query for display name
recipients_groups_data = self._notify_classify_recipients(partners_data, model_name, msg_vals=msg_vals)
if not recipients_groups_data:
return True
force_send = self.env.context.get('mail_notify_force_send', force_send)
template_values = self._notify_prepare_template_context(message, msg_vals, model_description=model_description) # 10 queries
email_layout_xmlid = msg_vals.get('email_layout_xmlid') if msg_vals else message.email_layout_xmlid
template_xmlid = email_layout_xmlid if email_layout_xmlid else 'mail.message_notification_email'
try:
base_template = self.env.ref(template_xmlid, raise_if_not_found=True).with_context(lang=template_values['lang']) # 1 query
except ValueError:
_logger.warning('QWeb template %s not found when sending notification emails. Sending without layouting.' % (template_xmlid))
base_template = False
mail_subject = message.subject or (message.record_name and 'Re: %s' % message.record_name) # in cache, no queries
# Replace new lines by spaces to conform to email headers requirements
mail_subject = ' '.join((mail_subject or '').splitlines())
# compute references: set references to the parent and add current message just to
# have a fallback in case replies mess with Messsage-Id in the In-Reply-To (e.g. amazon
# SES SMTP may replace Message-Id and In-Reply-To refers an internal ID not stored in Odoo)
message_sudo = message.sudo()
if message_sudo.parent_id:
references = f'{message_sudo.parent_id.message_id} {message_sudo.message_id}'
else:
references = message_sudo.message_id
# prepare notification mail values
base_mail_values = {
'mail_message_id': message.id,
'mail_server_id': message.mail_server_id.id, # 2 query, check acces + read, may be useless, Falsy, when will it be used?
'auto_delete': mail_auto_delete,
# due to ir.rule, user have no right to access parent message if message is not published
'references': references,
'subject': mail_subject,
}
base_mail_values = self._notify_by_email_add_values(base_mail_values)
# Clean the context to get rid of residual default_* keys that could cause issues during
# the mail.mail creation.
# Example: 'default_state' would refer to the default state of a previously created record
# from another model that in turns triggers an assignation notification that ends up here.
# This will lead to a traceback when trying to create a mail.mail with this state value that
# doesn't exist.
SafeMail = self.env['mail.mail'].sudo().with_context(clean_context(self._context))
SafeNotification = self.env['mail.notification'].sudo().with_context(clean_context(self._context))
emails = self.env['mail.mail'].sudo()
# loop on groups (customer, portal, user, ... + model specific like group_sale_salesman)
notif_create_values = []
recipients_max = 50
for recipients_group_data in recipients_groups_data:
# generate notification email content
recipients_ids = recipients_group_data.pop('recipients')
render_values = {**template_values, **recipients_group_data}
# {company, is_discussion, lang, message, model_description, record, record_name, signature, subtype, tracking_values, website_url}
# {actions, button_access, has_button_access, recipients}
if base_template:
mail_body = base_template._render(render_values, engine='ir.qweb', minimal_qcontext=True)
else:
mail_body = message.body
mail_body = self.env['mail.render.mixin']._replace_local_links(mail_body)
# create email
for recipients_ids_chunk in split_every(recipients_max, recipients_ids):
recipient_values = self._notify_email_recipient_values(recipients_ids_chunk)
email_to = recipient_values['email_to']
recipient_ids = recipient_values['recipient_ids']
create_values = {
'body_html': mail_body,
'subject': mail_subject,
'recipient_ids': [Command.link(pid) for pid in recipient_ids],
}
if email_to:
create_values['email_to'] = email_to
create_values.update(base_mail_values) # mail_message_id, mail_server_id, auto_delete, references, headers
email = SafeMail.create(create_values)
if email and recipient_ids:
tocreate_recipient_ids = list(recipient_ids)
if check_existing:
existing_notifications = self.env['mail.notification'].sudo().search([
('mail_message_id', '=', message.id),
('notification_type', '=', 'email'),
('res_partner_id', 'in', tocreate_recipient_ids)
])
if existing_notifications:
tocreate_recipient_ids = [rid for rid in recipient_ids if rid not in existing_notifications.mapped('res_partner_id.id')]
existing_notifications.write({
'notification_status': 'ready',
'mail_mail_id': email.id,
})
notif_create_values += [{
'mail_message_id': message.id,
'res_partner_id': recipient_id,
'notification_type': 'email',
'mail_mail_id': email.id,
'is_read': True, # discard Inbox notification
'notification_status': 'ready',
} for recipient_id in tocreate_recipient_ids]
emails |= email
if notif_create_values:
SafeNotification.create(notif_create_values)
# NOTE:
# 1. for more than 50 followers, use the queue system
# 2. do not send emails immediately if the registry is not loaded,
# to prevent sending email during a simple update of the database
# using the command-line.
test_mode = getattr(threading.current_thread(), 'testing', False)
if force_send and len(emails) < recipients_max and (not self.pool._init or test_mode):
# unless asked specifically, send emails after the transaction to
# avoid side effects due to emails being sent while the transaction fails
if not test_mode and send_after_commit:
email_ids = emails.ids
dbname = self.env.cr.dbname
_context = self._context
@self.env.cr.postcommit.add
def send_notifications():
db_registry = registry(dbname)
with db_registry.cursor() as cr:
env = api.Environment(cr, SUPERUSER_ID, _context)
env['mail.mail'].browse(email_ids).send()
else:
emails.send()
return True
@api.model
def _notify_prepare_template_context(self, message, msg_vals, model_description=False, mail_auto_delete=True):
# compute send user and its related signature
signature = ''
user = self.env.user
author = message.env['res.partner'].browse(msg_vals.get('author_id')) if msg_vals else message.author_id
model = msg_vals.get('model') if msg_vals else message.model
add_sign = msg_vals.get('add_sign') if msg_vals else message.add_sign
subtype_id = msg_vals.get('subtype_id') if msg_vals else message.subtype_id.id
message_id = message.id
record_name = msg_vals.get('record_name') if msg_vals else message.record_name
author_user = user if user.partner_id == author else author.user_ids[0] if author and author.user_ids else False
# trying to use user (self.env.user) instead of browing user_ids if he is the author will give a sudo user,
# improving access performances and cache usage.
if author_user:
user = author_user
if add_sign:
signature = user.signature
elif add_sign and author.name:
signature = Markup("<p>-- <br/>%s</p>") % author.name
# company value should fall back on env.company if:
# - no company_id field on record
# - company_id field available but not set
company = self.company_id.sudo() if self and 'company_id' in self and self.company_id else self.env.company
if company.website:
website_url = 'http://%s' % company.website if not company.website.lower().startswith(('http:', 'https:')) else company.website
else:
website_url = False
# Retrieve the language in which the template was rendered, in order to render the custom
# layout in the same language.
# TDE FIXME: this whole brol should be cleaned !
lang = self.env.context.get('lang')
if {'default_template_id', 'default_model', 'default_res_id'} <= self.env.context.keys():
template = self.env['mail.template'].browse(self.env.context['default_template_id'])
if template and template.lang:
lang = template._render_lang([self.env.context['default_res_id']])[self.env.context['default_res_id']]
if not model_description and model:
model_description = self.env['ir.model'].with_context(lang=lang)._get(model).display_name
tracking = []
if msg_vals.get('tracking_value_ids', True) if msg_vals else bool(self): # could be tracking
for tracking_value in self.env['mail.tracking.value'].sudo().search([('mail_message_id', '=', message.id)]):
groups = tracking_value.field_groups
if not groups or self.env.is_superuser() or self.user_has_groups(groups):
tracking.append((tracking_value.field_desc,
tracking_value.get_old_display_value()[0],
tracking_value.get_new_display_value()[0]))
is_discussion = subtype_id == self.env['ir.model.data']._xmlid_to_res_id('mail.mt_comment')
return {
'message': message,
'signature': signature,
'website_url': website_url,
'company': company,
'model_description': model_description,
'record': self,
'record_name': record_name,
'tracking_values': tracking,
'is_discussion': is_discussion,
'subtype': message.subtype_id,
'lang': lang,
}
def _notify_by_email_add_values(self, base_mail_values):
""" Add model-specific values to the dictionary used to create the
notification email. Its base behavior is to compute model-specific
headers.
:param dict base_mail_values: base mail.mail values, holding message
to notify (mail_message_id and its fields), server, references, subject.
"""
headers = self._notify_email_headers()
if headers:
base_mail_values['headers'] = headers
return base_mail_values
def _notify_compute_recipients(self, message, msg_vals):
""" Compute recipients to notify based on subtype and followers. This
method returns data structured as expected for ``_notify_recipients``. """
msg_sudo = message.sudo()
# get values from msg_vals or from message if msg_vals doen't exists
pids = msg_vals.get('partner_ids', []) if msg_vals else msg_sudo.partner_ids.ids
message_type = msg_vals.get('message_type') if msg_vals else msg_sudo.message_type
subtype_id = msg_vals.get('subtype_id') if msg_vals else msg_sudo.subtype_id.id
# is it possible to have record but no subtype_id ?
recipients_data = []
res = self.env['mail.followers']._get_recipient_data(self, message_type, subtype_id, pids)
if not res:
return recipients_data
author_id = msg_vals.get('author_id') or message.author_id.id
for pid, active, pshare, notif, groups in res:
if pid and pid == author_id and not self.env.context.get('mail_notify_author'): # do not notify the author of its own messages
continue
if pid:
if active is False:
continue
pdata = {'id': pid, 'active': active, 'share': pshare, 'groups': groups or []}
if notif == 'inbox':
recipients_data.append(dict(pdata, notif=notif, type='user'))
elif not pshare and notif: # has an user and is not shared, is therefore user
recipients_data.append(dict(pdata, notif=notif, type='user'))
elif pshare and notif: # has an user but is shared, is therefore portal
recipients_data.append(dict(pdata, notif=notif, type='portal'))
else: # has no user, is therefore customer
recipients_data.append(dict(pdata, notif=notif if notif else 'email', type='customer'))
return recipients_data
@api.model
def _notify_encode_link(self, base_link, params):
secret = self.env['ir.config_parameter'].sudo().get_param('database.secret')
token = '%s?%s' % (base_link, ' '.join('%s=%s' % (key, params[key]) for key in sorted(params)))
hm = hmac.new(secret.encode('utf-8'), token.encode('utf-8'), hashlib.sha1).hexdigest()
return hm
def _notify_get_action_link(self, link_type, **kwargs):
""" Prepare link to an action: view document, follow document, ... """
params = {
'model': kwargs.get('model', self._name),
'res_id': kwargs.get('res_id', self.ids and self.ids[0] or False),
}
# whitelist accepted parameters: action (deprecated), token (assign), access_token
# (view), auth_signup_token and auth_login (for auth_signup support)
params.update(dict(
(key, value)
for key, value in kwargs.items()
if key in ('action', 'token', 'access_token', 'auth_signup_token', 'auth_login')
))
if link_type in ['view', 'assign', 'follow', 'unfollow']:
base_link = '/mail/%s' % link_type
elif link_type == 'controller':
controller = kwargs.get('controller')
params.pop('model')
base_link = '%s' % controller
else:
return ''
if link_type not in ['view']:
token = self._notify_encode_link(base_link, params)
params['token'] = token
link = '%s?%s' % (base_link, urls.url_encode(params))
if self:
link = self[0].get_base_url() + link
return link
def _notify_get_groups(self, msg_vals=None):
""" Return groups used to classify recipients of a notification email.
Groups is a list of tuple containing of form (group_name, group_func,
group_data) where
* group_name is an identifier used only to be able to override and manipulate
groups. Default groups are user (recipients linked to an employee user),
portal (recipients linked to a portal user) and customer (recipients not
linked to any user). An example of override use would be to add a group
linked to a res.groups like Hr Officers to set specific action buttons to
them.
* group_func is a function pointer taking a partner record as parameter. This
method will be applied on recipients to know whether they belong to a given
group or not. Only first matching group is kept. Evaluation order is the
list order.
* group_data is a dict containing parameters for the notification email
* has_button_access: whether to display Access <Document> in email. True
by default for new groups, False for portal / customer.
* button_access: dict with url and title of the button
* actions: list of action buttons to display in the notification email.
Each action is a dict containing url and title of the button.
Groups has a default value that you can find in mail_thread
``_notify_classify_recipients`` method.
"""
return [
(
'user',
lambda pdata: pdata['type'] == 'user',
{}
), (
'portal',
lambda pdata: pdata['type'] == 'portal',
{'has_button_access': False}
), (
'customer',
lambda pdata: True,
{'has_button_access': False}
)
]
def _notify_classify_recipients(self, recipient_data, model_name, msg_vals=None):
""" Classify recipients to be notified of a message in groups to have
specific rendering depending on their group. For example users could
have access to buttons customers should not have in their emails.
Module-specific grouping should be done by overriding ``_notify_get_groups``
method defined here-under.
:param recipient_data:todo xdo UPDATE ME
return example:
[{
'actions': [],
'button_access': {'title': 'View Simple Chatter Model',
'url': '/mail/view?model=mail.test.simple&res_id=1497'},
'has_button_access': False,
'recipients': [11]
},
{
'actions': [],
'button_access': {'title': 'View Simple Chatter Model',
'url': '/mail/view?model=mail.test.simple&res_id=1497'},
'has_button_access': False,
'recipients': [4, 5, 6]
},
{
'actions': [],
'button_access': {'title': 'View Simple Chatter Model',
'url': '/mail/view?model=mail.test.simple&res_id=1497'},
'has_button_access': True,
'recipients': [10, 11, 12]
}]
only return groups with recipients
"""
# keep a local copy of msg_vals as it may be modified to include more information about groups or links
local_msg_vals = dict(msg_vals) if msg_vals else {}
groups = self._notify_get_groups(msg_vals=local_msg_vals)
access_link = self._notify_get_action_link('view', **local_msg_vals)
if model_name:
view_title = _('View %s', model_name)
else:
view_title = _('View')
# fill group_data with default_values if they are not complete
for group_name, group_func, group_data in groups:
group_data.setdefault('notification_group_name', group_name)
group_data.setdefault('notification_is_customer', False)
is_thread_notification = self._notify_get_recipients_thread_info(msg_vals=msg_vals)['is_thread_notification']
group_data.setdefault('has_button_access', is_thread_notification)
group_button_access = group_data.setdefault('button_access', {})
group_button_access.setdefault('url', access_link)
group_button_access.setdefault('title', view_title)
group_data.setdefault('actions', list())
group_data.setdefault('recipients', list())
# classify recipients in each group
for recipient in recipient_data:
for group_name, group_func, group_data in groups:
if group_func(recipient):
group_data['recipients'].append(recipient['id'])
break
result = []
for group_name, group_method, group_data in groups:
if group_data['recipients']:
result.append(group_data)
return result
def _notify_get_recipients_thread_info(self, msg_vals=None):
""" Tool method to compute thread info used in ``_notify_classify_recipients``
and its sub-methods. """
res_model = msg_vals['model'] if msg_vals and 'model' in msg_vals else self._name
res_id = msg_vals['res_id'] if msg_vals and 'res_id' in msg_vals else self.ids[0] if self.ids else False
return {
'is_thread_notification': res_model and (res_model != 'mail.thread') and res_id
}
def _notify_email_recipient_values(self, recipient_ids):
""" Format email notification recipient values to store on the notification
mail.mail. Basic method just set the recipient partners as mail_mail
recipients. Override to generate other mail values like email_to or
email_cc.
:param recipient_ids: res.partner recordset to notify
"""
return {
'email_to': False,
'recipient_ids': recipient_ids,
}
# ------------------------------------------------------
# FOLLOWERS API
# ------------------------------------------------------
def message_subscribe(self, partner_ids=None, subtype_ids=None):
""" Main public API to add followers to a record set. Its main purpose is
to perform access rights checks before calling ``_message_subscribe``. """
if not self or not partner_ids:
return True
partner_ids = partner_ids or []
adding_current = set(partner_ids) == set([self.env.user.partner_id.id])
customer_ids = [] if adding_current else None
if partner_ids and adding_current:
try:
self.check_access_rights('read')
self.check_access_rule('read')
except exceptions.AccessError:
return False
else:
self.check_access_rights('write')
self.check_access_rule('write')
# filter inactive and private addresses
if partner_ids and not adding_current:
partner_ids = self.env['res.partner'].sudo().search([('id', 'in', partner_ids), ('active', '=', True), ('type', '!=', 'private')]).ids
return self._message_subscribe(partner_ids, subtype_ids, customer_ids=customer_ids)
def _message_subscribe(self, partner_ids=None, subtype_ids=None, customer_ids=None):
""" Main private API to add followers to a record set. This method adds
partners and channels, given their IDs, as followers of all records
contained in the record set.
If subtypes are given existing followers are erased with new subtypes.
If default one have to be computed only missing followers will be added
with default subtypes matching the record set model.
This private method does not specifically check for access right. Use
``message_subscribe`` public API when not sure about access rights.
:param customer_ids: see ``_insert_followers`` """
if not self:
return True
if not subtype_ids:
self.env['mail.followers']._insert_followers(
self._name, self.ids,
partner_ids, subtypes=None,
customer_ids=customer_ids, check_existing=True, existing_policy='skip')
else:
self.env['mail.followers']._insert_followers(
self._name, self.ids,
partner_ids, subtypes=dict((pid, subtype_ids) for pid in partner_ids),
customer_ids=customer_ids, check_existing=True, existing_policy='replace')
return True
def message_unsubscribe(self, partner_ids=None):
""" Remove partners from the records followers. """
# not necessary for computation, but saves an access right check
if not partner_ids:
return True
if set(partner_ids) == set([self.env.user.partner_id.id]):
self.check_access_rights('read')
self.check_access_rule('read')
else:
self.check_access_rights('write')
self.check_access_rule('write')
self.env['mail.followers'].sudo().search([
('res_model', '=', self._name),
('res_id', 'in', self.ids),
('partner_id', 'in', partner_ids or []),
]).unlink()
def _message_auto_subscribe_followers(self, updated_values, default_subtype_ids):
""" Optional method to override in addons inheriting from mail.thread.
Return a list tuples containing (
partner ID,
subtype IDs (or False if model-based default subtypes),
QWeb template XML ID for notification (or False is no specific
notification is required),
), aka partners and their subtype and possible notification to send
using the auto subscription mechanism linked to updated values.
Default value of this method is to return the new responsible of
documents. This is done using relational fields linking to res.users
with track_visibility set. Since OpenERP v7 it is considered as being
responsible for the document and therefore standard behavior is to
subscribe the user and send him a notification.
Override this method to change that behavior and/or to add people to
notify, using possible custom notification.
:param updated_values: see ``_message_auto_subscribe``
:param default_subtype_ids: coming from ``_get_auto_subscription_subtypes``
"""
fnames = []
field = self._fields.get('user_id')
user_id = updated_values.get('user_id')
if field and user_id and field.comodel_name == 'res.users' and (getattr(field, 'track_visibility', False) or getattr(field, 'tracking', False)):
user = self.env['res.users'].sudo().browse(user_id)
try: # avoid to make an exists, lets be optimistic and try to read it.
if user.active:
return [(user.partner_id.id, default_subtype_ids, 'mail.message_user_assigned' if user != self.env.user else False)]
except:
pass
return []
def _message_auto_subscribe_notify(self, partner_ids, template):
""" Notify new followers, using a template to render the content of the
notification message. Notifications pushed are done using the standard
notification mechanism in mail.thread. It is either inbox either email
depending on the partner state: no user (email, customer), share user
(email, customer) or classic user (notification_type)
:param partner_ids: IDs of partner to notify;
:param template: XML ID of template used for the notification;
"""
if not self or self.env.context.get('mail_auto_subscribe_no_notify'):
return
if not self.env.registry.ready: # Don't send notification during install
return
view = self.env['ir.ui.view'].browse(self.env['ir.model.data']._xmlid_to_res_id(template))
for record in self:
model_description = self.env['ir.model']._get(record._name).display_name
values = {
'object': record,
'model_description': model_description,
'access_link': record._notify_get_action_link('view'),
}
assignation_msg = view._render(values, engine='ir.qweb', minimal_qcontext=True)
assignation_msg = self.env['mail.render.mixin']._replace_local_links(assignation_msg)
record.message_notify(
subject=_('You have been assigned to %s', record.display_name),
body=assignation_msg,
partner_ids=partner_ids,
record_name=record.display_name,
email_layout_xmlid='mail.mail_notification_light',
model_description=model_description,
)
def _message_auto_subscribe(self, updated_values, followers_existing_policy='skip'):
""" Handle auto subscription. Auto subscription is done based on two
main mechanisms
* using subtypes parent relationship. For example following a parent record
(i.e. project) with subtypes linked to child records (i.e. task). See
mail.message.subtype ``_get_auto_subscription_subtypes``;
* calling _message_auto_subscribe_notify that returns a list of partner
to subscribe, as well as data about the subtypes and notification
to send. Base behavior is to subscribe responsible and notify them;
Adding application-specific auto subscription should be done by overriding
``_message_auto_subscribe_followers``. It should return structured data
for new partner to subscribe, with subtypes and eventual notification
to perform. See that method for more details.
:param updated_values: values modifying the record trigerring auto subscription
"""
if not self:
return True
new_partner_subtypes = dict()
# return data related to auto subscription based on subtype matching (aka:
# default task subtypes or subtypes from project triggering task subtypes)
updated_relation = dict()
child_ids, def_ids, all_int_ids, parent, relation = self.env['mail.message.subtype']._get_auto_subscription_subtypes(self._name)
# check effectively modified relation field
for res_model, fnames in relation.items():
for field in (fname for fname in fnames if updated_values.get(fname)):
updated_relation.setdefault(res_model, set()).add(field)
udpated_fields = [fname for fnames in updated_relation.values() for fname in fnames if updated_values.get(fname)]
if udpated_fields:
# fetch "parent" subscription data (aka: subtypes on project to propagate on task)
doc_data = [(model, [updated_values[fname] for fname in fnames]) for model, fnames in updated_relation.items()]
res = self.env['mail.followers']._get_subscription_data(doc_data, None, include_pshare=True, include_active=True)
for _fol_id, _res_id, partner_id, subtype_ids, pshare, active in res:
# use project.task_new -> task.new link
sids = [parent[sid] for sid in subtype_ids if parent.get(sid)]
# add checked subtypes matching model_name
sids += [sid for sid in subtype_ids if sid not in parent and sid in child_ids]
if partner_id and active: # auto subscribe only active partners
if pshare: # remove internal subtypes for customers
new_partner_subtypes[partner_id] = set(sids) - set(all_int_ids)
else:
new_partner_subtypes[partner_id] = set(sids)
notify_data = dict()
res = self._message_auto_subscribe_followers(updated_values, def_ids)
for partner_id, sids, template in res:
new_partner_subtypes.setdefault(partner_id, sids)
if template:
partner = self.env['res.partner'].browse(partner_id)
lang = partner.lang if partner else None
notify_data.setdefault((template, lang), list()).append(partner_id)
self.env['mail.followers']._insert_followers(
self._name, self.ids,
list(new_partner_subtypes), subtypes=new_partner_subtypes,
check_existing=True, existing_policy=followers_existing_policy)
# notify people from auto subscription, for example like assignation
for (template, lang), pids in notify_data.items():
self.with_context(lang=lang)._message_auto_subscribe_notify(pids, template)
return True
# ------------------------------------------------------
# CONTROLLERS
# ------------------------------------------------------
def _get_mail_redirect_suggested_company(self):
""" Return the suggested company to be set on the context
in case of a mail redirection to the record. To avoid multi
company issues when clicking on a link sent by email, this
could be called to try setting the most suited company on
the allowed_company_ids in the context. This method can be
overridden, for example on the hr.leave model, where the
most suited company is the company of the leave type, as
specified by the ir.rule.
"""
if 'company_id' in self:
return self.company_id
return False
| 52.309532 | 150,913 |
183 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo import fields, models
class View(models.Model):
_inherit = 'ir.ui.view'
type = fields.Selection(selection_add=[('activity', 'Activity')])
| 22.875 | 183 |
1,778 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models, fields, tools
class Company(models.Model):
_name = 'res.company'
_inherit = 'res.company'
catchall_email = fields.Char(string="Catchall Email", compute="_compute_catchall")
catchall_formatted = fields.Char(string="Catchall", compute="_compute_catchall")
# the compute method is sudo'ed because it needs to access res.partner records
# portal users cannot access those (but they should be able to read the company email address)
email_formatted = fields.Char(string="Formatted Email",
compute="_compute_email_formatted", compute_sudo=True)
@api.depends('name')
def _compute_catchall(self):
ConfigParameter = self.env['ir.config_parameter'].sudo()
alias = ConfigParameter.get_param('mail.catchall.alias')
domain = ConfigParameter.get_param('mail.catchall.domain')
if alias and domain:
for company in self:
company.catchall_email = '%s@%s' % (alias, domain)
company.catchall_formatted = tools.formataddr((company.name, company.catchall_email))
else:
for company in self:
company.catchall_email = ''
company.catchall_formatted = ''
@api.depends('partner_id.email_formatted', 'catchall_formatted')
def _compute_email_formatted(self):
for company in self:
if company.partner_id.email_formatted:
company.email_formatted = company.partner_id.email_formatted
elif company.catchall_formatted:
company.email_formatted = company.catchall_formatted
else:
company.email_formatted = ''
| 44.45 | 1,778 |
1,363 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields
class MailMessageReaction(models.Model):
_name = 'mail.message.reaction'
_description = 'Message Reaction'
_order = 'id desc'
_log_access = False
message_id = fields.Many2one(string="Message", comodel_name='mail.message', ondelete='cascade', required=True, readonly=True)
content = fields.Char(string="Content", required=True, readonly=True)
partner_id = fields.Many2one(string="Reacting Partner", comodel_name='res.partner', ondelete='cascade', readonly=True)
guest_id = fields.Many2one(string="Reacting Guest", comodel_name='mail.guest', ondelete='cascade', readonly=True)
def init(self):
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS mail_message_reaction_partner_unique ON %s (message_id, content, partner_id) WHERE partner_id IS NOT NULL" % self._table)
self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS mail_message_reaction_guest_unique ON %s (message_id, content, guest_id) WHERE guest_id IS NOT NULL" % self._table)
_sql_constraints = [
("partner_or_guest_exists", "CHECK((partner_id IS NOT NULL AND guest_id IS NULL) OR (partner_id IS NULL AND guest_id IS NOT NULL))", "A message reaction must be from a partner or from a guest."),
]
| 56.791667 | 1,363 |
1,193 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class IrModelField(models.Model):
_inherit = 'ir.model.fields'
tracking = fields.Integer(
string="Enable Ordered Tracking",
help="If set every modification done to this field is tracked in the chatter. Value is used to order tracking values.",
)
def _reflect_field_params(self, field, model_id):
""" Tracking value can be either a boolean enabling tracking mechanism
on field, either an integer giving the sequence. Default sequence is
set to 100. """
vals = super(IrModelField, self)._reflect_field_params(field, model_id)
tracking = getattr(field, 'tracking', None)
if tracking is True:
tracking = 100
elif tracking is False:
tracking = None
vals['tracking'] = tracking
return vals
def _instanciate_attrs(self, field_data):
attrs = super(IrModelField, self)._instanciate_attrs(field_data)
if attrs and field_data.get('tracking'):
attrs['tracking'] = field_data['tracking']
return attrs
| 37.28125 | 1,193 |
5,704 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from odoo import api, fields, models
class MailTracking(models.Model):
_name = 'mail.tracking.value'
_description = 'Mail Tracking Value'
_rec_name = 'field'
_order = 'tracking_sequence asc'
field = fields.Many2one('ir.model.fields', required=True, readonly=1, ondelete='cascade')
field_desc = fields.Char('Field Description', required=True, readonly=1)
field_type = fields.Char('Field Type')
field_groups = fields.Char(compute='_compute_field_groups')
old_value_integer = fields.Integer('Old Value Integer', readonly=1)
old_value_float = fields.Float('Old Value Float', readonly=1)
old_value_monetary = fields.Float('Old Value Monetary', readonly=1)
old_value_char = fields.Char('Old Value Char', readonly=1)
old_value_text = fields.Text('Old Value Text', readonly=1)
old_value_datetime = fields.Datetime('Old Value DateTime', readonly=1)
new_value_integer = fields.Integer('New Value Integer', readonly=1)
new_value_float = fields.Float('New Value Float', readonly=1)
new_value_monetary = fields.Float('New Value Monetary', readonly=1)
new_value_char = fields.Char('New Value Char', readonly=1)
new_value_text = fields.Text('New Value Text', readonly=1)
new_value_datetime = fields.Datetime('New Value Datetime', readonly=1)
currency_id = fields.Many2one('res.currency', 'Currency', readonly=True, ondelete='set null',
help="Used to display the currency when tracking monetary values")
mail_message_id = fields.Many2one('mail.message', 'Message ID', required=True, index=True, ondelete='cascade')
tracking_sequence = fields.Integer('Tracking field sequence', readonly=1, default=100)
def _compute_field_groups(self):
for tracking in self:
model = self.env[tracking.mail_message_id.model]
field = model._fields.get(tracking.field.name)
tracking.field_groups = field.groups if field else 'base.group_system'
@api.model
def create_tracking_values(self, initial_value, new_value, col_name, col_info, tracking_sequence, model_name):
tracked = True
field = self.env['ir.model.fields']._get(model_name, col_name)
if not field:
return
values = {'field': field.id, 'field_desc': col_info['string'], 'field_type': col_info['type'], 'tracking_sequence': tracking_sequence}
if col_info['type'] in ['integer', 'float', 'char', 'text', 'datetime', 'monetary']:
values.update({
'old_value_%s' % col_info['type']: initial_value,
'new_value_%s' % col_info['type']: new_value
})
elif col_info['type'] == 'date':
values.update({
'old_value_datetime': initial_value and fields.Datetime.to_string(datetime.combine(fields.Date.from_string(initial_value), datetime.min.time())) or False,
'new_value_datetime': new_value and fields.Datetime.to_string(datetime.combine(fields.Date.from_string(new_value), datetime.min.time())) or False,
})
elif col_info['type'] == 'boolean':
values.update({
'old_value_integer': initial_value,
'new_value_integer': new_value
})
elif col_info['type'] == 'selection':
values.update({
'old_value_char': initial_value and dict(col_info['selection']).get(initial_value, initial_value) or '',
'new_value_char': new_value and dict(col_info['selection'])[new_value] or ''
})
elif col_info['type'] == 'many2one':
values.update({
'old_value_integer': initial_value and initial_value.id or 0,
'new_value_integer': new_value and new_value.id or 0,
'old_value_char': initial_value and initial_value.sudo().name_get()[0][1] or '',
'new_value_char': new_value and new_value.sudo().name_get()[0][1] or ''
})
else:
tracked = False
if tracked:
return values
return {}
def get_display_value(self, type):
assert type in ('new', 'old')
result = []
for record in self:
if record.field_type in ['integer', 'float', 'char', 'text', 'monetary']:
result.append(getattr(record, '%s_value_%s' % (type, record.field_type)))
elif record.field_type == 'datetime':
if record['%s_value_datetime' % type]:
new_datetime = getattr(record, '%s_value_datetime' % type)
result.append('%sZ' % new_datetime)
else:
result.append(record['%s_value_datetime' % type])
elif record.field_type == 'date':
if record['%s_value_datetime' % type]:
new_date = record['%s_value_datetime' % type]
result.append(fields.Date.to_string(new_date))
else:
result.append(record['%s_value_datetime' % type])
elif record.field_type == 'boolean':
result.append(bool(record['%s_value_integer' % type]))
else:
result.append(record['%s_value_char' % type])
return result
def get_old_display_value(self):
# grep : # old_value_integer | old_value_datetime | old_value_char
return self.get_display_value('old')
def get_new_display_value(self):
# grep : # new_value_integer | new_value_datetime | new_value_char
return self.get_display_value('new')
| 47.140496 | 5,704 |
32,351 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import pytz
from collections import defaultdict
from datetime import date, datetime
from dateutil.relativedelta import relativedelta
from odoo import api, exceptions, fields, models, _, Command
from odoo.osv import expression
from odoo.tools.misc import clean_context
class MailActivity(models.Model):
""" An actual activity to perform. Activities are linked to
documents using res_id and res_model_id fields. Activities have a deadline
that can be used in kanban view to display a status. Once done activities
are unlinked and a message is posted. This message has a new activity_type_id
field that indicates the activity linked to the message. """
_name = 'mail.activity'
_description = 'Activity'
_order = 'date_deadline ASC'
_rec_name = 'summary'
@api.model
def default_get(self, fields):
res = super(MailActivity, self).default_get(fields)
if not fields or 'res_model_id' in fields and res.get('res_model'):
res['res_model_id'] = self.env['ir.model']._get(res['res_model']).id
return res
@api.model
def _default_activity_type_id(self):
ActivityType = self.env["mail.activity.type"]
activity_type_todo = self.env.ref('mail.mail_activity_data_todo', raise_if_not_found=False)
default_vals = self.default_get(['res_model_id', 'res_model'])
if not default_vals.get('res_model_id'):
return ActivityType
current_model_id = default_vals['res_model_id']
current_model = self.env["ir.model"].sudo().browse(current_model_id)
if activity_type_todo and activity_type_todo.active and \
(activity_type_todo.res_model == current_model.model or not activity_type_todo.res_model):
return activity_type_todo
activity_type_model = ActivityType.search([('res_model', '=', current_model.model)], limit=1)
if activity_type_model:
return activity_type_model
activity_type_generic = ActivityType.search([('res_model', '=', False)], limit=1)
return activity_type_generic
# owner
res_model_id = fields.Many2one(
'ir.model', 'Document Model',
index=True, ondelete='cascade', required=True)
res_model = fields.Char(
'Related Document Model',
index=True, related='res_model_id.model', compute_sudo=True, store=True, readonly=True)
res_id = fields.Many2oneReference(string='Related Document ID', index=True, required=True, model_field='res_model')
res_name = fields.Char(
'Document Name', compute='_compute_res_name', compute_sudo=True, store=True,
help="Display name of the related document.", readonly=True)
# activity
activity_type_id = fields.Many2one(
'mail.activity.type', string='Activity Type',
domain="['|', ('res_model', '=', False), ('res_model', '=', res_model)]", ondelete='restrict',
default=_default_activity_type_id)
activity_category = fields.Selection(related='activity_type_id.category', readonly=True)
activity_decoration = fields.Selection(related='activity_type_id.decoration_type', readonly=True)
icon = fields.Char('Icon', related='activity_type_id.icon', readonly=True)
summary = fields.Char('Summary')
note = fields.Html('Note', sanitize_style=True)
date_deadline = fields.Date('Due Date', index=True, required=True, default=fields.Date.context_today)
automated = fields.Boolean(
'Automated activity', readonly=True,
help='Indicates this activity has been created automatically and not by any user.')
# description
user_id = fields.Many2one(
'res.users', 'Assigned to',
default=lambda self: self.env.user,
index=True, required=True)
request_partner_id = fields.Many2one('res.partner', string='Requesting Partner')
state = fields.Selection([
('overdue', 'Overdue'),
('today', 'Today'),
('planned', 'Planned')], 'State',
compute='_compute_state')
recommended_activity_type_id = fields.Many2one('mail.activity.type', string="Recommended Activity Type")
previous_activity_type_id = fields.Many2one('mail.activity.type', string='Previous Activity Type', readonly=True)
has_recommended_activities = fields.Boolean(
'Next activities available',
compute='_compute_has_recommended_activities',
help='Technical field for UX purpose')
mail_template_ids = fields.Many2many(related='activity_type_id.mail_template_ids', readonly=True)
chaining_type = fields.Selection(related='activity_type_id.chaining_type', readonly=True)
# access
can_write = fields.Boolean(compute='_compute_can_write', help='Technical field to hide buttons if the current user has no access.')
@api.onchange('previous_activity_type_id')
def _compute_has_recommended_activities(self):
for record in self:
record.has_recommended_activities = bool(record.previous_activity_type_id.suggested_next_type_ids)
@api.onchange('previous_activity_type_id')
def _onchange_previous_activity_type_id(self):
for record in self:
if record.previous_activity_type_id.triggered_next_type_id:
record.activity_type_id = record.previous_activity_type_id.triggered_next_type_id
@api.depends('res_model', 'res_id')
def _compute_res_name(self):
for activity in self:
activity.res_name = activity.res_model and \
self.env[activity.res_model].browse(activity.res_id).display_name
@api.depends('date_deadline')
def _compute_state(self):
for record in self.filtered(lambda activity: activity.date_deadline):
tz = record.user_id.sudo().tz
date_deadline = record.date_deadline
record.state = self._compute_state_from_date(date_deadline, tz)
@api.model
def _compute_state_from_date(self, date_deadline, tz=False):
date_deadline = fields.Date.from_string(date_deadline)
today_default = date.today()
today = today_default
if tz:
today_utc = pytz.utc.localize(datetime.utcnow())
today_tz = today_utc.astimezone(pytz.timezone(tz))
today = date(year=today_tz.year, month=today_tz.month, day=today_tz.day)
diff = (date_deadline - today)
if diff.days == 0:
return 'today'
elif diff.days < 0:
return 'overdue'
else:
return 'planned'
@api.depends('res_model', 'res_id', 'user_id')
def _compute_can_write(self):
valid_records = self._filter_access_rules('write')
for record in self:
record.can_write = record in valid_records
@api.onchange('activity_type_id')
def _onchange_activity_type_id(self):
if self.activity_type_id:
if self.activity_type_id.summary:
self.summary = self.activity_type_id.summary
self.date_deadline = self._calculate_date_deadline(self.activity_type_id)
self.user_id = self.activity_type_id.default_user_id or self.env.user
if self.activity_type_id.default_note:
self.note = self.activity_type_id.default_note
def _calculate_date_deadline(self, activity_type):
# Date.context_today is correct because date_deadline is a Date and is meant to be
# expressed in user TZ
base = fields.Date.context_today(self)
if activity_type.delay_from == 'previous_activity' and 'activity_previous_deadline' in self.env.context:
base = fields.Date.from_string(self.env.context.get('activity_previous_deadline'))
return base + relativedelta(**{activity_type.delay_unit: activity_type.delay_count})
@api.onchange('recommended_activity_type_id')
def _onchange_recommended_activity_type_id(self):
if self.recommended_activity_type_id:
self.activity_type_id = self.recommended_activity_type_id
def _filter_access_rules(self, operation):
# write / unlink: valid for creator / assigned
if operation in ('write', 'unlink'):
valid = super(MailActivity, self)._filter_access_rules(operation)
if valid and valid == self:
return self
else:
valid = self.env[self._name]
return self._filter_access_rules_remaining(valid, operation, '_filter_access_rules')
def _filter_access_rules_python(self, operation):
# write / unlink: valid for creator / assigned
if operation in ('write', 'unlink'):
valid = super(MailActivity, self)._filter_access_rules_python(operation)
if valid and valid == self:
return self
else:
valid = self.env[self._name]
return self._filter_access_rules_remaining(valid, operation, '_filter_access_rules_python')
def _filter_access_rules_remaining(self, valid, operation, filter_access_rules_method):
""" Return the subset of ``self`` for which ``operation`` is allowed.
A custom implementation is done on activities as this document has some
access rules and is based on related document for activities that are
not covered by those rules.
Access on activities are the following :
* create: (``mail_post_access`` or write) right on related documents;
* read: read rights on related documents;
* write: access rule OR
(``mail_post_access`` or write) rights on related documents);
* unlink: access rule OR
(``mail_post_access`` or write) rights on related documents);
"""
# compute remaining for hand-tailored rules
remaining = self - valid
remaining_sudo = remaining.sudo()
# fall back on related document access right checks. Use the same as defined for mail.thread
# if available; otherwise fall back on read for read, write for other operations.
activity_to_documents = dict()
for activity in remaining_sudo:
# write / unlink: if not updating self or assigned, limit to automated activities to avoid
# updating other people's activities. As unlinking a document bypasses access rights checks
# on related activities this will not prevent people from deleting documents with activities
# create / read: just check rights on related document
activity_to_documents.setdefault(activity.res_model, list()).append(activity.res_id)
for doc_model, doc_ids in activity_to_documents.items():
if hasattr(self.env[doc_model], '_mail_post_access'):
doc_operation = self.env[doc_model]._mail_post_access
elif operation == 'read':
doc_operation = 'read'
else:
doc_operation = 'write'
right = self.env[doc_model].check_access_rights(doc_operation, raise_exception=False)
if right:
valid_doc_ids = getattr(self.env[doc_model].browse(doc_ids), filter_access_rules_method)(doc_operation)
valid += remaining.filtered(lambda activity: activity.res_model == doc_model and activity.res_id in valid_doc_ids.ids)
return valid
def _check_access_assignation(self):
""" Check assigned user (user_id field) has access to the document. Purpose
is to allow assigned user to handle their activities. For that purpose
assigned user should be able to at least read the document. We therefore
raise an UserError if the assigned user has no access to the document. """
for activity in self:
model = self.env[activity.res_model].with_user(activity.user_id).with_context(allowed_company_ids=activity.user_id.company_ids.ids)
try:
model.check_access_rights('read')
except exceptions.AccessError:
raise exceptions.UserError(
_('Assigned user %s has no access to the document and is not able to handle this activity.',
activity.user_id.display_name))
else:
try:
target_user = activity.user_id
target_record = self.env[activity.res_model].browse(activity.res_id)
if hasattr(target_record, 'company_id') and (
target_record.company_id != target_user.company_id and (
len(target_user.sudo().company_ids) > 1)):
return # in that case we skip the check, assuming it would fail because of the company
model.browse(activity.res_id).check_access_rule('read')
except exceptions.AccessError:
raise exceptions.UserError(
_('Assigned user %s has no access to the document and is not able to handle this activity.',
activity.user_id.display_name))
# ------------------------------------------------------
# ORM overrides
# ------------------------------------------------------
@api.model_create_multi
def create(self, vals_list):
activities = super(MailActivity, self).create(vals_list)
for activity in activities:
need_sudo = False
try: # in multicompany, reading the partner might break
partner_id = activity.user_id.partner_id.id
except exceptions.AccessError:
need_sudo = True
partner_id = activity.user_id.sudo().partner_id.id
# send a notification to assigned user; in case of manually done activity also check
# target has rights on document otherwise we prevent its creation. Automated activities
# are checked since they are integrated into business flows that should not crash.
if activity.user_id != self.env.user:
if not activity.automated:
activity._check_access_assignation()
if not self.env.context.get('mail_activity_quick_update', False):
if need_sudo:
activity.sudo().action_notify()
else:
activity.action_notify()
self.env[activity.res_model].browse(activity.res_id).message_subscribe(partner_ids=[partner_id])
if activity.date_deadline <= fields.Date.today():
self.env['bus.bus']._sendone(activity.user_id.partner_id, 'mail.activity/updated', {'activity_created': True})
return activities
def read(self, fields=None, load='_classic_read'):
""" When reading specific fields, read calls _read that manually applies ir rules
(_apply_ir_rules), instead of calling check_access_rule.
Meaning that our custom rules enforcing from '_filter_access_rules' and
'_filter_access_rules_python' are bypassed in that case.
To make sure we apply our custom security rules, we force a call to 'check_access_rule'. """
self.check_access_rule('read')
return super(MailActivity, self).read(fields=fields, load=load)
def write(self, values):
if values.get('user_id'):
user_changes = self.filtered(lambda activity: activity.user_id.id != values.get('user_id'))
pre_responsibles = user_changes.mapped('user_id.partner_id')
res = super(MailActivity, self).write(values)
if values.get('user_id'):
if values['user_id'] != self.env.uid:
to_check = user_changes.filtered(lambda act: not act.automated)
to_check._check_access_assignation()
if not self.env.context.get('mail_activity_quick_update', False):
user_changes.action_notify()
for activity in user_changes:
self.env[activity.res_model].browse(activity.res_id).message_subscribe(partner_ids=[activity.user_id.partner_id.id])
if activity.date_deadline <= fields.Date.today():
self.env['bus.bus']._sendone(activity.user_id.partner_id, 'mail.activity/updated', {'activity_created': True})
for activity in user_changes:
if activity.date_deadline <= fields.Date.today():
for partner in pre_responsibles:
self.env['bus.bus']._sendone(partner, 'mail.activity/updated', {'activity_deleted': True})
return res
def unlink(self):
for activity in self:
if activity.date_deadline <= fields.Date.today():
self.env['bus.bus']._sendone(activity.user_id.partner_id, 'mail.activity/updated', {'activity_deleted': True})
return super(MailActivity, self).unlink()
@api.model
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
""" Override that adds specific access rights of mail.activity, to remove
ids uid could not see according to our custom rules. Please refer to
_filter_access_rules_remaining for more details about those rules.
The method is inspired by what has been done on mail.message. """
# Rules do not apply to administrator
if self.env.is_superuser():
return super(MailActivity, self)._search(
args, offset=offset, limit=limit, order=order,
count=count, access_rights_uid=access_rights_uid)
# Perform a super with count as False, to have the ids, not a counter
ids = super(MailActivity, self)._search(
args, offset=offset, limit=limit, order=order,
count=False, access_rights_uid=access_rights_uid)
if not ids and count:
return 0
elif not ids:
return ids
# check read access rights before checking the actual rules on the given ids
super(MailActivity, self.with_user(access_rights_uid or self._uid)).check_access_rights('read')
self.flush(['res_model', 'res_id'])
activities_to_check = []
for sub_ids in self._cr.split_for_in_conditions(ids):
self._cr.execute("""
SELECT DISTINCT activity.id, activity.res_model, activity.res_id
FROM "%s" activity
WHERE activity.id = ANY (%%(ids)s) AND activity.res_id != 0""" % self._table, dict(ids=list(sub_ids)))
activities_to_check += self._cr.dictfetchall()
activity_to_documents = {}
for activity in activities_to_check:
activity_to_documents.setdefault(activity['res_model'], set()).add(activity['res_id'])
allowed_ids = set()
for doc_model, doc_ids in activity_to_documents.items():
# fall back on related document access right checks. Use the same as defined for mail.thread
# if available; otherwise fall back on read
if hasattr(self.env[doc_model], '_mail_post_access'):
doc_operation = self.env[doc_model]._mail_post_access
else:
doc_operation = 'read'
DocumentModel = self.env[doc_model].with_user(access_rights_uid or self._uid)
right = DocumentModel.check_access_rights(doc_operation, raise_exception=False)
if right:
valid_docs = DocumentModel.browse(doc_ids)._filter_access_rules(doc_operation)
valid_doc_ids = set(valid_docs.ids)
allowed_ids.update(
activity['id'] for activity in activities_to_check
if activity['res_model'] == doc_model and activity['res_id'] in valid_doc_ids)
if count:
return len(allowed_ids)
else:
# re-construct a list based on ids, because 'allowed_ids' does not keep the original order
id_list = [id for id in ids if id in allowed_ids]
return id_list
@api.model
def _read_group_raw(self, domain, fields, groupby, offset=0, limit=None, orderby=False, lazy=True):
""" The base _read_group_raw method implementation computes a where based on a given domain
(_where_calc) and manually applies ir rules (_apply_ir_rules).
Meaning that our custom rules enforcing from '_filter_access_rules' and
'_filter_access_rules_python' are bypassed in that case.
This overrides re-uses the _search implementation to force the read group domain to allowed
ids only, that are computed based on our custom rules (see _filter_access_rules_remaining
for more details). """
# Rules do not apply to administrator
if not self.env.is_superuser():
allowed_ids = self._search(domain, count=False)
if allowed_ids:
domain = expression.AND([domain, [('id', 'in', allowed_ids)]])
else:
# force void result if no allowed ids found
domain = expression.AND([domain, [(0, '=', 1)]])
return super(MailActivity, self)._read_group_raw(
domain=domain, fields=fields, groupby=groupby, offset=offset,
limit=limit, orderby=orderby, lazy=lazy,
)
def name_get(self):
res = []
for record in self:
name = record.summary or record.activity_type_id.display_name
res.append((record.id, name))
return res
# ------------------------------------------------------
# Business Methods
# ------------------------------------------------------
def action_notify(self):
if not self:
return
original_context = self.env.context
body_template = self.env.ref('mail.message_activity_assigned')
for activity in self:
if activity.user_id.lang:
# Send the notification in the assigned user's language
self = self.with_context(lang=activity.user_id.lang)
body_template = body_template.with_context(lang=activity.user_id.lang)
activity = activity.with_context(lang=activity.user_id.lang)
model_description = self.env['ir.model']._get(activity.res_model).display_name
body = body_template._render(
dict(
activity=activity,
model_description=model_description,
access_link=self.env['mail.thread']._notify_get_action_link('view', model=activity.res_model, res_id=activity.res_id),
),
engine='ir.qweb',
minimal_qcontext=True
)
record = self.env[activity.res_model].browse(activity.res_id)
if activity.user_id:
record.message_notify(
partner_ids=activity.user_id.partner_id.ids,
body=body,
subject=_('%(activity_name)s: %(summary)s assigned to you',
activity_name=activity.res_name,
summary=activity.summary or activity.activity_type_id.name),
record_name=activity.res_name,
model_description=model_description,
email_layout_xmlid='mail.mail_notification_light',
)
body_template = body_template.with_context(original_context)
self = self.with_context(original_context)
def action_done(self):
""" Wrapper without feedback because web button add context as
parameter, therefore setting context to feedback """
messages, next_activities = self._action_done()
return messages.ids and messages.ids[0] or False
def action_feedback(self, feedback=False, attachment_ids=None):
self = self.with_context(clean_context(self.env.context))
messages, next_activities = self._action_done(feedback=feedback, attachment_ids=attachment_ids)
return messages.ids and messages.ids[0] or False
def action_done_schedule_next(self):
""" Wrapper without feedback because web button add context as
parameter, therefore setting context to feedback """
return self.action_feedback_schedule_next()
def action_feedback_schedule_next(self, feedback=False):
ctx = dict(
clean_context(self.env.context),
default_previous_activity_type_id=self.activity_type_id.id,
activity_previous_deadline=self.date_deadline,
default_res_id=self.res_id,
default_res_model=self.res_model,
)
messages, next_activities = self._action_done(feedback=feedback) # will unlink activity, dont access self after that
if next_activities:
return False
return {
'name': _('Schedule an Activity'),
'context': ctx,
'view_mode': 'form',
'res_model': 'mail.activity',
'views': [(False, 'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
def _action_done(self, feedback=False, attachment_ids=None):
""" Private implementation of marking activity as done: posting a message, deleting activity
(since done), and eventually create the automatical next activity (depending on config).
:param feedback: optional feedback from user when marking activity as done
:param attachment_ids: list of ir.attachment ids to attach to the posted mail.message
:returns (messages, activities) where
- messages is a recordset of posted mail.message
- activities is a recordset of mail.activity of forced automically created activities
"""
# marking as 'done'
messages = self.env['mail.message']
next_activities_values = []
# Search for all attachments linked to the activities we are about to unlink. This way, we
# can link them to the message posted and prevent their deletion.
attachments = self.env['ir.attachment'].search_read([
('res_model', '=', self._name),
('res_id', 'in', self.ids),
], ['id', 'res_id'])
activity_attachments = defaultdict(list)
for attachment in attachments:
activity_id = attachment['res_id']
activity_attachments[activity_id].append(attachment['id'])
for activity in self:
# extract value to generate next activities
if activity.chaining_type == 'trigger':
vals = activity.with_context(activity_previous_deadline=activity.date_deadline)._prepare_next_activity_values()
next_activities_values.append(vals)
# post message on activity, before deleting it
record = self.env[activity.res_model].browse(activity.res_id)
record.message_post_with_view(
'mail.message_activity_done',
values={
'activity': activity,
'feedback': feedback,
'display_assignee': activity.user_id != self.env.user
},
subtype_id=self.env['ir.model.data']._xmlid_to_res_id('mail.mt_activities'),
mail_activity_type_id=activity.activity_type_id.id,
attachment_ids=[Command.link(attachment_id) for attachment_id in attachment_ids] if attachment_ids else [],
)
# Moving the attachments in the message
# TODO: Fix void res_id on attachment when you create an activity with an image
# directly, see route /web_editor/attachment/add
activity_message = record.message_ids[0]
message_attachments = self.env['ir.attachment'].browse(activity_attachments[activity.id])
if message_attachments:
message_attachments.write({
'res_id': activity_message.id,
'res_model': activity_message._name,
})
activity_message.attachment_ids = message_attachments
messages |= activity_message
next_activities = self.env['mail.activity'].create(next_activities_values)
self.unlink() # will unlink activity, dont access `self` after that
return messages, next_activities
def action_close_dialog(self):
return {'type': 'ir.actions.act_window_close'}
def activity_format(self):
activities = self.read()
mail_template_ids = set([template_id for activity in activities for template_id in activity["mail_template_ids"]])
mail_template_info = self.env["mail.template"].browse(mail_template_ids).read(['id', 'name'])
mail_template_dict = dict([(mail_template['id'], mail_template) for mail_template in mail_template_info])
for activity in activities:
activity['mail_template_ids'] = [mail_template_dict[mail_template_id] for mail_template_id in activity['mail_template_ids']]
return activities
@api.model
def get_activity_data(self, res_model, domain):
activity_domain = [('res_model', '=', res_model)]
if domain:
res = self.env[res_model].search(domain)
activity_domain.append(('res_id', 'in', res.ids))
grouped_activities = self.env['mail.activity'].read_group(
activity_domain,
['res_id', 'activity_type_id', 'ids:array_agg(id)', 'date_deadline:min(date_deadline)'],
['res_id', 'activity_type_id'],
lazy=False)
# filter out unreadable records
if not domain:
res_ids = tuple(a['res_id'] for a in grouped_activities)
res = self.env[res_model].search([('id', 'in', res_ids)])
grouped_activities = [a for a in grouped_activities if a['res_id'] in res.ids]
res_id_to_deadline = {}
activity_data = defaultdict(dict)
for group in grouped_activities:
res_id = group['res_id']
activity_type_id = (group.get('activity_type_id') or (False, False))[0]
res_id_to_deadline[res_id] = group['date_deadline'] if (res_id not in res_id_to_deadline or group['date_deadline'] < res_id_to_deadline[res_id]) else res_id_to_deadline[res_id]
state = self._compute_state_from_date(group['date_deadline'], self.user_id.sudo().tz)
activity_data[res_id][activity_type_id] = {
'count': group['__count'],
'ids': group['ids'],
'state': state,
'o_closest_deadline': group['date_deadline'],
}
activity_type_infos = []
activity_type_ids = self.env['mail.activity.type'].search(
['|', ('res_model', '=', res_model), ('res_model', '=', False)])
for elem in sorted(activity_type_ids, key=lambda item: item.sequence):
mail_template_info = []
for mail_template_id in elem.mail_template_ids:
mail_template_info.append({"id": mail_template_id.id, "name": mail_template_id.name})
activity_type_infos.append([elem.id, elem.name, mail_template_info])
return {
'activity_types': activity_type_infos,
'activity_res_ids': sorted(res_id_to_deadline, key=lambda item: res_id_to_deadline[item]),
'grouped_activities': activity_data,
}
# ----------------------------------------------------------------------
# TOOLS
# ----------------------------------------------------------------------
def _prepare_next_activity_values(self):
""" Prepare the next activity values based on the current activity record and applies _onchange methods
:returns a dict of values for the new activity
"""
self.ensure_one()
vals = self.default_get(self.fields_get())
vals.update({
'previous_activity_type_id': self.activity_type_id.id,
'res_id': self.res_id,
'res_model': self.res_model,
'res_model_id': self.env['ir.model']._get(self.res_model).id,
})
virtual_activity = self.new(vals)
virtual_activity._onchange_previous_activity_type_id()
virtual_activity._onchange_activity_type_id()
return virtual_activity._convert_to_write(virtual_activity._cache)
| 50.627543 | 32,351 |
1,799 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class IrConfigParameter(models.Model):
_inherit = 'ir.config_parameter'
@api.model_create_multi
def create(self, vals_list):
for vals in vals_list:
if vals.get('key') in ['mail.bounce.alias', 'mail.catchall.alias']:
vals['value'] = self.env['mail.alias']._clean_and_check_unique([vals.get('value')])[0]
return super().create(vals_list)
def write(self, vals):
for parameter in self:
if 'value' in vals and parameter.key in ['mail.bounce.alias', 'mail.catchall.alias'] and vals['value'] != parameter.value:
vals['value'] = self.env['mail.alias']._clean_and_check_unique([vals.get('value')])[0]
return super().write(vals)
@api.model
def set_param(self, key, value):
if key == 'mail.restrict.template.rendering':
group_user = self.env.ref('base.group_user')
group_mail_template_editor = self.env.ref('mail.group_mail_template_editor')
if not value and group_mail_template_editor not in group_user.implied_ids:
group_user.implied_ids |= group_mail_template_editor
elif value and group_mail_template_editor in group_user.implied_ids:
group_user.implied_ids -= group_mail_template_editor
# remove existing users, including inactive template user
# admin will regain the right via implied_ids on group_system
group_mail_template_editor.with_context(active_test=False).users.write(
{'groups_id': [(3, group_mail_template_editor.id)]})
return super(IrConfigParameter, self).set_param(key, value)
| 46.128205 | 1,799 |
13,984 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import ast
import re
from markupsafe import Markup
from odoo import _, api, fields, models
from odoo.exceptions import ValidationError, UserError
from odoo.tools import is_html_empty, remove_accents
# see rfc5322 section 3.2.3
atext = r"[a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]"
dot_atom_text = re.compile(r"^%s+(\.%s+)*$" % (atext, atext))
class Alias(models.Model):
"""A Mail Alias is a mapping of an email address with a given Odoo Document
model. It is used by Odoo's mail gateway when processing incoming emails
sent to the system. If the recipient address (To) of the message matches
a Mail Alias, the message will be either processed following the rules
of that alias. If the message is a reply it will be attached to the
existing discussion on the corresponding record, otherwise a new
record of the corresponding model will be created.
This is meant to be used in combination with a catch-all email configuration
on the company's mail server, so that as soon as a new mail.alias is
created, it becomes immediately usable and Odoo will accept email for it.
"""
_name = 'mail.alias'
_description = "Email Aliases"
_rec_name = 'alias_name'
_order = 'alias_model_id, alias_name'
alias_name = fields.Char('Alias Name', copy=False, help="The name of the email alias, e.g. 'jobs' if you want to catch emails for <[email protected]>")
alias_model_id = fields.Many2one('ir.model', 'Aliased Model', required=True, ondelete="cascade",
help="The model (Odoo Document Kind) to which this alias "
"corresponds. Any incoming email that does not reply to an "
"existing record will cause the creation of a new record "
"of this model (e.g. a Project Task)",
# hack to only allow selecting mail_thread models (we might
# (have a few false positives, though)
domain="[('field_id.name', '=', 'message_ids')]")
alias_user_id = fields.Many2one('res.users', 'Owner', default=lambda self: self.env.user,
help="The owner of records created upon receiving emails on this alias. "
"If this field is not set the system will attempt to find the right owner "
"based on the sender (From) address, or will use the Administrator account "
"if no system user is found for that address.")
alias_defaults = fields.Text('Default Values', required=True, default='{}',
help="A Python dictionary that will be evaluated to provide "
"default values when creating new records for this alias.")
alias_force_thread_id = fields.Integer(
'Record Thread ID',
help="Optional ID of a thread (record) to which all incoming messages will be attached, even "
"if they did not reply to it. If set, this will disable the creation of new records completely.")
alias_domain = fields.Char('Alias domain', compute='_compute_alias_domain')
alias_parent_model_id = fields.Many2one(
'ir.model', 'Parent Model',
help="Parent model holding the alias. The model holding the alias reference "
"is not necessarily the model given by alias_model_id "
"(example: project (parent_model) and task (model))")
alias_parent_thread_id = fields.Integer('Parent Record Thread ID', help="ID of the parent record holding the alias (example: project holding the task creation alias)")
alias_contact = fields.Selection([
('everyone', 'Everyone'),
('partners', 'Authenticated Partners'),
('followers', 'Followers only')], default='everyone',
string='Alias Contact Security', required=True,
help="Policy to post a message on the document using the mailgateway.\n"
"- everyone: everyone can post\n"
"- partners: only authenticated partners\n"
"- followers: only followers of the related document or members of following channels\n")
alias_bounced_content = fields.Html(
"Custom Bounced Message", translate=True,
help="If set, this content will automatically be sent out to unauthorized users instead of the default message.")
_sql_constraints = [
('alias_unique', 'UNIQUE(alias_name)', 'Unfortunately this email alias is already used, please choose a unique one')
]
@api.constrains('alias_name')
def _alias_is_ascii(self):
""" The local-part ("display-name" <local-part@domain>) of an
address only contains limited range of ascii characters.
We DO NOT allow anything else than ASCII dot-atom formed
local-part. Quoted-string and internationnal characters are
to be rejected. See rfc5322 sections 3.4.1 and 3.2.3
"""
for alias in self:
if alias.alias_name and not dot_atom_text.match(alias.alias_name):
raise ValidationError(_(
"You cannot use anything else than unaccented latin characters in the alias address (%s).",
alias.alias_name,
))
@api.depends('alias_name')
def _compute_alias_domain(self):
self.alias_domain = self.env["ir.config_parameter"].sudo().get_param("mail.catchall.domain")
@api.constrains('alias_defaults')
def _check_alias_defaults(self):
for alias in self:
try:
dict(ast.literal_eval(alias.alias_defaults))
except Exception:
raise ValidationError(_('Invalid expression, it must be a literal python dictionary definition e.g. "{\'field\': \'value\'}"'))
@api.model_create_multi
def create(self, vals_list):
""" Creates email.alias records according to the values provided in
``vals`` with 1 alteration:
* ``alias_name`` value may be cleaned by replacing certain unsafe
characters;
:raise UserError: if given alias_name is already assigned or there are
duplicates in given vals_list;
"""
alias_names = [vals['alias_name'] for vals in vals_list if vals.get('alias_name')]
if alias_names:
sanitized_names = self._clean_and_check_unique(alias_names)
for vals in vals_list:
if vals.get('alias_name'):
vals['alias_name'] = sanitized_names[alias_names.index(vals['alias_name'])]
return super(Alias, self).create(vals_list)
def write(self, vals):
""""Raises UserError if given alias name is already assigned"""
if vals.get('alias_name') and self.ids:
if len(self) > 1:
raise UserError(_(
'Email alias %(alias_name)s cannot be used on %(count)d records at the same time. Please update records one by one.',
alias_name=vals['alias_name'], count=len(self)
))
vals['alias_name'] = self._clean_and_check_unique([vals.get('alias_name')])[0]
return super(Alias, self).write(vals)
def name_get(self):
"""Return the mail alias display alias_name, including the implicit
mail catchall domain if exists from config otherwise "New Alias".
e.g. `[email protected]` or `jobs` or 'New Alias'
"""
res = []
for record in self:
if record.alias_name and record.alias_domain:
res.append((record['id'], "%s@%s" % (record.alias_name, record.alias_domain)))
elif record.alias_name:
res.append((record['id'], "%s" % (record.alias_name)))
else:
res.append((record['id'], _("Inactive Alias")))
return res
def _clean_and_check_unique(self, names):
"""When an alias name appears to already be an email, we keep the local
part only. A sanitizing / cleaning is also performed on the name. If
name already exists an UserError is raised. """
def _sanitize_alias_name(name):
""" Cleans and sanitizes the alias name """
sanitized_name = remove_accents(name).lower().split('@')[0]
sanitized_name = re.sub(r'[^\w+.]+', '-', sanitized_name)
sanitized_name = re.sub(r'^\.+|\.+$|\.+(?=\.)', '', sanitized_name)
sanitized_name = sanitized_name.encode('ascii', errors='replace').decode()
return sanitized_name
sanitized_names = [_sanitize_alias_name(name) for name in names]
catchall_alias = self.env['ir.config_parameter'].sudo().get_param('mail.catchall.alias')
bounce_alias = self.env['ir.config_parameter'].sudo().get_param('mail.bounce.alias')
alias_domain = self.env["ir.config_parameter"].sudo().get_param("mail.catchall.domain")
# matches catchall or bounce alias
for sanitized_name in sanitized_names:
if sanitized_name in [catchall_alias, bounce_alias]:
matching_alias_name = '%s@%s' % (sanitized_name, alias_domain) if alias_domain else sanitized_name
raise UserError(
_('The e-mail alias %(matching_alias_name)s is already used as %(alias_duplicate)s alias. Please choose another alias.',
matching_alias_name=matching_alias_name,
alias_duplicate=_('catchall') if sanitized_name == catchall_alias else _('bounce'))
)
# matches existing alias
domain = [('alias_name', 'in', sanitized_names)]
if self:
domain += [('id', 'not in', self.ids)]
matching_alias = self.search(domain, limit=1)
if not matching_alias:
return sanitized_names
sanitized_alias_name = _sanitize_alias_name(matching_alias.alias_name)
matching_alias_name = '%s@%s' % (sanitized_alias_name, alias_domain) if alias_domain else sanitized_alias_name
if matching_alias.alias_parent_model_id and matching_alias.alias_parent_thread_id:
# If parent model and parent thread ID both are set, display document name also in the warning
document_name = self.env[matching_alias.alias_parent_model_id.model].sudo().browse(matching_alias.alias_parent_thread_id).display_name
raise UserError(
_('The e-mail alias %(matching_alias_name)s is already used by the %(document_name)s %(model_name)s. Choose another alias or change it on the other document.',
matching_alias_name=matching_alias_name,
document_name=document_name,
model_name=matching_alias.alias_parent_model_id.name)
)
raise UserError(
_('The e-mail alias %(matching_alias_name)s is already linked with %(alias_model_name)s. Choose another alias or change it on the linked model.',
matching_alias_name=matching_alias_name,
alias_model_name=matching_alias.alias_model_id.name)
)
def open_document(self):
if not self.alias_model_id or not self.alias_force_thread_id:
return False
return {
'view_mode': 'form',
'res_model': self.alias_model_id.model,
'res_id': self.alias_force_thread_id,
'type': 'ir.actions.act_window',
}
def open_parent_document(self):
if not self.alias_parent_model_id or not self.alias_parent_thread_id:
return False
return {
'view_mode': 'form',
'res_model': self.alias_parent_model_id.model,
'res_id': self.alias_parent_thread_id,
'type': 'ir.actions.act_window',
}
def _get_alias_bounced_body_fallback(self, message_dict):
contact_description = self._get_alias_contact_description()
default_email = self.env.company.partner_id.email_formatted if self.env.company.partner_id.email else self.env.company.name
return Markup(
_("""<p>Dear Sender,<br /><br />
The message below could not be accepted by the address %(alias_display_name)s.
Only %(contact_description)s are allowed to contact it.<br /><br />
Please make sure you are using the correct address or contact us at %(default_email)s instead.<br /><br />
Kind Regards,</p>"""
)) % {
'alias_display_name': self.display_name,
'contact_description': contact_description,
'default_email': default_email,
}
def _get_alias_contact_description(self):
if self.alias_contact == 'partners':
return _('addresses linked to registered partners')
return _('some specific addresses')
def _get_alias_bounced_body(self, message_dict):
"""Get the body of the email return in case of bounced email.
:param message_dict: dictionary of mail values
"""
lang_author = False
if message_dict.get('author_id'):
try:
lang_author = self.env['res.partner'].browse(message_dict['author_id']).lang
except:
pass
if lang_author:
self = self.with_context(lang=lang_author)
if not is_html_empty(self.alias_bounced_content):
body = self.alias_bounced_content
else:
body = self._get_alias_bounced_body_fallback(message_dict)
template = self.env.ref('mail.mail_bounce_alias_security', raise_if_not_found=True)
return template._render({
'body': body,
'message': message_dict
}, engine='ir.qweb', minimal_qcontext=True)
| 51.792593 | 13,984 |
2,116 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import _, api, fields, models, tools
class MailCCMixin(models.AbstractModel):
_name = 'mail.thread.cc'
_inherit = 'mail.thread'
_description = 'Email CC management'
email_cc = fields.Char('Email cc', help='List of cc from incoming emails.')
def _mail_cc_sanitized_raw_dict(self, cc_string):
'''return a dict of sanitize_email:raw_email from a string of cc'''
if not cc_string:
return {}
return {tools.email_normalize(email): tools.formataddr((name, tools.email_normalize(email)))
for (name, email) in tools.email_split_tuples(cc_string)}
@api.model
def message_new(self, msg_dict, custom_values=None):
if custom_values is None:
custom_values = {}
cc_values = {
'email_cc': ", ".join(self._mail_cc_sanitized_raw_dict(msg_dict.get('cc')).values()),
}
cc_values.update(custom_values)
return super(MailCCMixin, self).message_new(msg_dict, cc_values)
def message_update(self, msg_dict, update_vals=None):
'''Adds cc email to self.email_cc while trying to keep email as raw as possible but unique'''
if update_vals is None:
update_vals = {}
cc_values = {}
new_cc = self._mail_cc_sanitized_raw_dict(msg_dict.get('cc'))
if new_cc:
old_cc = self._mail_cc_sanitized_raw_dict(self.email_cc)
new_cc.update(old_cc)
cc_values['email_cc'] = ", ".join(new_cc.values())
cc_values.update(update_vals)
return super(MailCCMixin, self).message_update(msg_dict, cc_values)
def _message_get_suggested_recipients(self):
recipients = super(MailCCMixin, self)._message_get_suggested_recipients()
for record in self:
if record.email_cc:
for email in tools.email_split_and_format(record.email_cc):
record._message_add_suggested_recipient(recipients, email=email, reason=_('CC Email'))
return recipients
| 42.32 | 2,116 |
9,420 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from collections import defaultdict
from odoo import _, api, exceptions, fields, models, modules
from odoo.addons.base.models.res_users import is_selection_groups
class Users(models.Model):
""" Update of res.users class
- add a preference about sending emails about notifications
- make a new user follow itself
- add a welcome message
- add suggestion preference
- if adding groups to a user, check mail.channels linked to this user
group, and the user. This is done by overriding the write method.
"""
_name = 'res.users'
_inherit = ['res.users']
_description = 'Users'
notification_type = fields.Selection([
('email', 'Handle by Emails'),
('inbox', 'Handle in Odoo')],
'Notification', required=True, default='email',
help="Policy on how to handle Chatter notifications:\n"
"- Handle by Emails: notifications are sent to your email address\n"
"- Handle in Odoo: notifications appear in your Odoo Inbox")
res_users_settings_ids = fields.One2many('res.users.settings', 'user_id')
# ------------------------------------------------------------
# CRUD
# ------------------------------------------------------------
@property
def SELF_READABLE_FIELDS(self):
return super().SELF_READABLE_FIELDS + ['notification_type']
@property
def SELF_WRITEABLE_FIELDS(self):
return super().SELF_WRITEABLE_FIELDS + ['notification_type']
@api.model_create_multi
def create(self, vals_list):
for values in vals_list:
if not values.get('login', False):
action = self.env.ref('base.action_res_users')
msg = _("You cannot create a new user from here.\n To create new user please go to configuration panel.")
raise exceptions.RedirectWarning(msg, action.id, _('Go to the configuration panel'))
users = super(Users, self).create(vals_list)
# log a portal status change (manual tracking)
log_portal_access = not self._context.get('mail_create_nolog') and not self._context.get('mail_notrack')
if log_portal_access:
for user in users:
if user.has_group('base.group_portal'):
body = user._get_portal_access_update_body(True)
user.partner_id.message_post(
body=body,
message_type='notification',
subtype_xmlid='mail.mt_note'
)
# Auto-subscribe to channels unless skip explicitly requested
if not self.env.context.get('mail_channel_nosubscribe'):
self.env['mail.channel'].search([('group_ids', 'in', users.groups_id.ids)])._subscribe_users_automatically()
return users
def write(self, vals):
log_portal_access = 'groups_id' in vals and not self._context.get('mail_create_nolog') and not self._context.get('mail_notrack')
user_portal_access_dict = {
user.id: user.has_group('base.group_portal')
for user in self
} if log_portal_access else {}
write_res = super(Users, self).write(vals)
# log a portal status change (manual tracking)
if log_portal_access:
for user in self:
user_has_group = user.has_group('base.group_portal')
portal_access_changed = user_has_group != user_portal_access_dict[user.id]
if portal_access_changed:
body = user._get_portal_access_update_body(user_has_group)
user.partner_id.message_post(
body=body,
message_type='notification',
subtype_xmlid='mail.mt_note'
)
if 'active' in vals and not vals['active']:
self._unsubscribe_from_non_public_channels()
sel_groups = [vals[k] for k in vals if is_selection_groups(k) and vals[k]]
if vals.get('groups_id'):
# form: {'group_ids': [(3, 10), (3, 3), (4, 10), (4, 3)]} or {'group_ids': [(6, 0, [ids]}
user_group_ids = [command[1] for command in vals['groups_id'] if command[0] == 4]
user_group_ids += [id for command in vals['groups_id'] if command[0] == 6 for id in command[2]]
self.env['mail.channel'].search([('group_ids', 'in', user_group_ids)])._subscribe_users_automatically()
elif sel_groups:
self.env['mail.channel'].search([('group_ids', 'in', sel_groups)])._subscribe_users_automatically()
return write_res
def unlink(self):
self._unsubscribe_from_non_public_channels()
return super().unlink()
def _unsubscribe_from_non_public_channels(self):
""" This method un-subscribes users from private mail channels. Main purpose of this
method is to prevent sending internal communication to archived / deleted users.
We do not un-subscribes users from public channels because in most common cases,
public channels are mailing list (e-mail based) and so users should always receive
updates from public channels until they manually un-subscribe themselves.
"""
current_cp = self.env['mail.channel.partner'].sudo().search([
('partner_id', 'in', self.partner_id.ids),
])
current_cp.filtered(
lambda cp: cp.channel_id.public != 'public' and cp.channel_id.channel_type == 'channel'
).unlink()
def _get_portal_access_update_body(self, access_granted):
body = _('Portal Access Granted') if access_granted else _('Portal Access Revoked')
if self.partner_id.email:
return '%s (%s)' % (body, self.partner_id.email)
return body
# ------------------------------------------------------------
# DISCUSS
# ------------------------------------------------------------
def _init_messaging(self):
self.ensure_one()
partner_root = self.env.ref('base.partner_root')
values = {
'channels': self.partner_id._get_channels_as_member().channel_info(),
'companyName': self.env.company.name,
'currentGuest': False,
'current_partner': self.partner_id.mail_partner_format().get(self.partner_id),
'current_user_id': self.id,
'current_user_settings': self.env['res.users.settings']._find_or_create_for_user(self)._res_users_settings_format(),
'mail_failures': [],
'menu_id': self.env['ir.model.data']._xmlid_to_res_id('mail.menu_root_discuss'),
'needaction_inbox_counter': self.partner_id._get_needaction_count(),
'partner_root': partner_root.sudo().mail_partner_format().get(partner_root),
'public_partners': list(self.env.ref('base.group_public').sudo().with_context(active_test=False).users.partner_id.mail_partner_format().values()),
'shortcodes': self.env['mail.shortcode'].sudo().search_read([], ['source', 'substitution', 'description']),
'starred_counter': self.env['mail.message'].search_count([('starred_partner_ids', 'in', self.partner_id.ids)]),
}
return values
@api.model
def systray_get_activities(self):
activities = self.env["mail.activity"].search([("user_id", "=", self.env.uid)])
activities_by_record_by_model_name = defaultdict(lambda: defaultdict(lambda: self.env["mail.activity"]))
for activity in activities:
record = self.env[activity.res_model].browse(activity.res_id)
activities_by_record_by_model_name[activity.res_model][record] += activity
model_ids = list({self.env["ir.model"]._get(name).id for name in activities_by_record_by_model_name.keys()})
user_activities = {}
for model_name, activities_by_record in activities_by_record_by_model_name.items():
domain = [("id", "in", list({r.id for r in activities_by_record.keys()}))]
allowed_records = self.env[model_name].search(domain)
if not allowed_records:
continue
module = self.env[model_name]._original_module
icon = module and modules.module.get_module_icon(module)
user_activities[model_name] = {
"name": self.env["ir.model"]._get(model_name).with_prefetch(model_ids).name,
"model": model_name,
"type": "activity",
"icon": icon,
"total_count": 0,
"today_count": 0,
"overdue_count": 0,
"planned_count": 0,
"actions": [
{
"icon": "fa-clock-o",
"name": "Summary",
}
],
}
for record, activities in activities_by_record.items():
if record not in allowed_records:
continue
for activity in activities:
user_activities[model_name]["%s_count" % activity.state] += 1
if activity.state in ("today", "overdue"):
user_activities[model_name]["total_count"] += 1
return list(user_activities.values())
| 49.319372 | 9,420 |
4,147 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, tools, _
class MailComposerMixin(models.AbstractModel):
""" Mixin used to edit and render some fields used when sending emails or
notifications based on a mail template.
Main current purpose is to hide details related to subject and body computation
and rendering based on a mail.template. It also give the base tools to control
who is allowed to edit body, notably when dealing with templating language
like inline_template or qweb.
It is meant to evolve in a near future with upcoming support of qweb and fine
grain control of rendering access.
"""
_name = 'mail.composer.mixin'
_inherit = 'mail.render.mixin'
_description = 'Mail Composer Mixin'
# Content
subject = fields.Char('Subject', compute='_compute_subject', readonly=False, store=True)
body = fields.Html('Contents', compute='_compute_body', render_engine='qweb', store=True, readonly=False, sanitize=False)
template_id = fields.Many2one('mail.template', 'Mail Template', domain="[('model', '=', render_model)]")
# Access
is_mail_template_editor = fields.Boolean('Is Editor', compute='_compute_is_mail_template_editor')
can_edit_body = fields.Boolean('Can Edit Body', compute='_compute_can_edit_body')
@api.depends('template_id')
def _compute_subject(self):
for composer_mixin in self:
if composer_mixin.template_id:
composer_mixin.subject = composer_mixin.template_id.subject
elif not composer_mixin.subject:
composer_mixin.subject = False
@api.depends('template_id')
def _compute_body(self):
for composer_mixin in self:
if composer_mixin.template_id:
composer_mixin.body = composer_mixin.template_id.body_html
elif not composer_mixin.body:
composer_mixin.body = False
@api.depends_context('uid')
def _compute_is_mail_template_editor(self):
is_mail_template_editor = self.env.is_admin() or self.env.user.has_group('mail.group_mail_template_editor')
for record in self:
record.is_mail_template_editor = is_mail_template_editor
@api.depends('template_id', 'is_mail_template_editor')
def _compute_can_edit_body(self):
for record in self:
record.can_edit_body = (
record.is_mail_template_editor
or not record.template_id
)
def _render_field(self, field, *args, **kwargs):
"""Render the given field on the given records.
This method bypass the rights when needed to
be able to render the template values in mass mode.
"""
if field not in self._fields:
raise ValueError(_("The field %s does not exist on the model %s", field, self._name))
composer_value = self[field]
if (
not self.template_id
or self.is_mail_template_editor
):
# Do not need to bypass the verification
return super(MailComposerMixin, self)._render_field(field, *args, **kwargs)
template_field = 'body_html' if field == 'body' else field
assert template_field in self.template_id._fields
template_value = self.template_id[template_field]
if field == 'body':
sanitized_template_value = tools.html_sanitize(template_value)
if not self.can_edit_body or composer_value in (sanitized_template_value, template_value):
# Take the previous body which we can trust without HTML editor reformatting
self.body = self.template_id.body_html
return super(MailComposerMixin, self.sudo())._render_field(field, *args, **kwargs)
elif composer_value == template_value:
# The value is the same as the mail template so we trust it
return super(MailComposerMixin, self.sudo())._render_field(field, *args, **kwargs)
return super(MailComposerMixin, self)._render_field(field, *args, **kwargs)
| 44.591398 | 4,147 |
2,070 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import datetime
from odoo import api, fields, models, tools
class ResConfigSettings(models.TransientModel):
""" Inherit the base settings to add a counter of failed email + configure
the alias domain. """
_inherit = 'res.config.settings'
fail_counter = fields.Integer('Fail Mail', readonly=True)
alias_domain = fields.Char('Alias Domain', help="If you have setup a catch-all email domain redirected to "
"the Odoo server, enter the domain name here.", config_parameter='mail.catchall.domain')
restrict_template_rendering = fields.Boolean(
'Restrict Template Rendering',
config_parameter='mail.restrict.template.rendering',
help='Users will still be able to render templates.\n'
'However only Mail Template Editors will be able to create new dynamic templates or modify existing ones.')
use_twilio_rtc_servers = fields.Boolean(
'Use Twilio ICE servers',
help="If you want to use twilio as TURN/STUN server provider",
config_parameter='mail.use_twilio_rtc_servers',
)
twilio_account_sid = fields.Char(
'Twilio Account SID',
config_parameter='mail.twilio_account_sid',
)
twilio_account_token = fields.Char(
'Twilio Account Auth Token',
config_parameter='mail.twilio_account_token',
)
@api.model
def get_values(self):
res = super(ResConfigSettings, self).get_values()
previous_date = datetime.datetime.now() - datetime.timedelta(days=30)
res.update(
fail_counter=self.env['mail.mail'].sudo().search_count([
('date', '>=', previous_date.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)),
('state', '=', 'exception')]),
)
return res
def set_values(self):
super(ResConfigSettings, self).set_values()
self.env['ir.config_parameter'].set_param("mail.catchall.domain", self.alias_domain or '')
| 39.807692 | 2,070 |
261 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo import fields, models
class ActWindowView(models.Model):
_inherit = 'ir.actions.act_window.view'
view_mode = fields.Selection(selection_add=[
('activity', 'Activity')
], ondelete={'activity': 'cascade'})
| 26.1 | 261 |
23,160 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
import logging
import pytz
from odoo import api, fields, models
from odoo.osv import expression
_logger = logging.getLogger(__name__)
class MailActivityMixin(models.AbstractModel):
""" Mail Activity Mixin is a mixin class to use if you want to add activities
management on a model. It works like the mail.thread mixin. It defines
an activity_ids one2many field toward activities using res_id and res_model_id.
Various related / computed fields are also added to have a global status of
activities on documents.
Activities come with a new JS widget for the form view. It is integrated in the
Chatter widget although it is a separate widget. It displays activities linked
to the current record and allow to schedule, edit and mark done activities.
Just include field activity_ids in the div.oe-chatter to use it.
There is also a kanban widget defined. It defines a small widget to integrate
in kanban vignettes. It allow to manage activities directly from the kanban
view. Use widget="kanban_activity" on activitiy_ids field in kanban view to
use it.
Some context keys allow to control the mixin behavior. Use those in some
specific cases like import
* ``mail_activity_automation_skip``: skip activities automation; it means
no automated activities will be generated, updated or unlinked, allowing
to save computation and avoid generating unwanted activities;
"""
_name = 'mail.activity.mixin'
_description = 'Activity Mixin'
def _default_activity_type(self):
"""Define a default fallback activity type when requested xml id wasn't found.
Can be overriden to specify the default activity type of a model.
It is only called in in activity_schedule() for now.
"""
return self.env.ref('mail.mail_activity_data_todo', raise_if_not_found=False) \
or self.env['mail.activity.type'].search([('res_model', '=', self._name)], limit=1) \
or self.env['mail.activity.type'].search([('res_model', '=', False)], limit=1)
activity_ids = fields.One2many(
'mail.activity', 'res_id', 'Activities',
auto_join=True,
groups="base.group_user",)
activity_state = fields.Selection([
('overdue', 'Overdue'),
('today', 'Today'),
('planned', 'Planned')], string='Activity State',
compute='_compute_activity_state',
search='_search_activity_state',
groups="base.group_user",
help='Status based on activities\nOverdue: Due date is already passed\n'
'Today: Activity date is today\nPlanned: Future activities.')
activity_user_id = fields.Many2one(
'res.users', 'Responsible User',
related='activity_ids.user_id', readonly=False,
search='_search_activity_user_id',
groups="base.group_user")
activity_type_id = fields.Many2one(
'mail.activity.type', 'Next Activity Type',
related='activity_ids.activity_type_id', readonly=False,
search='_search_activity_type_id',
groups="base.group_user")
activity_type_icon = fields.Char('Activity Type Icon', related='activity_ids.icon')
activity_date_deadline = fields.Date(
'Next Activity Deadline',
compute='_compute_activity_date_deadline', search='_search_activity_date_deadline',
compute_sudo=False, readonly=True, store=False,
groups="base.group_user")
my_activity_date_deadline = fields.Date(
'My Activity Deadline',
compute='_compute_my_activity_date_deadline', search='_search_my_activity_date_deadline',
compute_sudo=False, readonly=True, groups="base.group_user")
activity_summary = fields.Char(
'Next Activity Summary',
related='activity_ids.summary', readonly=False,
search='_search_activity_summary',
groups="base.group_user",)
activity_exception_decoration = fields.Selection([
('warning', 'Alert'),
('danger', 'Error')],
compute='_compute_activity_exception_type',
search='_search_activity_exception_decoration',
help="Type of the exception activity on record.")
activity_exception_icon = fields.Char('Icon', help="Icon to indicate an exception activity.",
compute='_compute_activity_exception_type')
@api.depends('activity_ids.activity_type_id.decoration_type', 'activity_ids.activity_type_id.icon')
def _compute_activity_exception_type(self):
# prefetch all activity types for all activities, this will avoid any query in loops
self.mapped('activity_ids.activity_type_id.decoration_type')
for record in self:
activity_type_ids = record.activity_ids.mapped('activity_type_id')
exception_activity_type_id = False
for activity_type_id in activity_type_ids:
if activity_type_id.decoration_type == 'danger':
exception_activity_type_id = activity_type_id
break
if activity_type_id.decoration_type == 'warning':
exception_activity_type_id = activity_type_id
record.activity_exception_decoration = exception_activity_type_id and exception_activity_type_id.decoration_type
record.activity_exception_icon = exception_activity_type_id and exception_activity_type_id.icon
def _search_activity_exception_decoration(self, operator, operand):
return [('activity_ids.activity_type_id.decoration_type', operator, operand)]
@api.depends('activity_ids.state')
def _compute_activity_state(self):
for record in self:
states = record.activity_ids.mapped('state')
if 'overdue' in states:
record.activity_state = 'overdue'
elif 'today' in states:
record.activity_state = 'today'
elif 'planned' in states:
record.activity_state = 'planned'
else:
record.activity_state = False
def _search_activity_state(self, operator, value):
all_states = {'overdue', 'today', 'planned', False}
if operator == '=':
search_states = {value}
elif operator == '!=':
search_states = all_states - {value}
elif operator == 'in':
search_states = set(value)
elif operator == 'not in':
search_states = all_states - set(value)
reverse_search = False
if False in search_states:
# If we search "activity_state = False", they might be a lot of records
# (million for some models), so instead of returning the list of IDs
# [(id, 'in', ids)] we will reverse the domain and return something like
# [(id, 'not in', ids)], so the list of ids is as small as possible
reverse_search = True
search_states = all_states - search_states
# Use number in the SQL query for performance purpose
integer_state_value = {
'overdue': -1,
'today': 0,
'planned': 1,
False: None,
}
search_states_int = {integer_state_value.get(s or False) for s in search_states}
query = """
SELECT res_id
FROM (
SELECT res_id,
-- Global activity state
MIN(
-- Compute the state of each individual activities
-- -1: overdue
-- 0: today
-- 1: planned
SIGN(EXTRACT(day from (
mail_activity.date_deadline - DATE_TRUNC('day', %(today_utc)s AT TIME ZONE res_partner.tz)
)))
)::INT AS activity_state
FROM mail_activity
LEFT JOIN res_users
ON res_users.id = mail_activity.user_id
LEFT JOIN res_partner
ON res_partner.id = res_users.partner_id
WHERE mail_activity.res_model = %(res_model_table)s
GROUP BY res_id
) AS res_record
WHERE %(search_states_int)s @> ARRAY[activity_state]
"""
self._cr.execute(
query,
{
'today_utc': pytz.utc.localize(datetime.utcnow()),
'res_model_table': self._name,
'search_states_int': list(search_states_int)
},
)
return [('id', 'not in' if reverse_search else 'in', [r[0] for r in self._cr.fetchall()])]
@api.depends('activity_ids.date_deadline')
def _compute_activity_date_deadline(self):
for record in self:
record.activity_date_deadline = record.activity_ids[:1].date_deadline
def _search_activity_date_deadline(self, operator, operand):
if operator == '=' and not operand:
return [('activity_ids', '=', False)]
return [('activity_ids.date_deadline', operator, operand)]
@api.model
def _search_activity_user_id(self, operator, operand):
return [('activity_ids.user_id', operator, operand)]
@api.model
def _search_activity_type_id(self, operator, operand):
return [('activity_ids.activity_type_id', operator, operand)]
@api.model
def _search_activity_summary(self, operator, operand):
return [('activity_ids.summary', operator, operand)]
@api.depends('activity_ids.date_deadline', 'activity_ids.user_id')
@api.depends_context('uid')
def _compute_my_activity_date_deadline(self):
for record in self:
record.my_activity_date_deadline = next((
activity.date_deadline
for activity in record.activity_ids
if activity.user_id.id == record.env.uid
), False)
def _search_my_activity_date_deadline(self, operator, operand):
activity_ids = self.env['mail.activity']._search([
('date_deadline', operator, operand),
('res_model', '=', self._name),
('user_id', '=', self.env.user.id)
])
return [('activity_ids', 'in', activity_ids)]
def write(self, vals):
# Delete activities of archived record.
if 'active' in vals and vals['active'] is False:
self.env['mail.activity'].sudo().search(
[('res_model', '=', self._name), ('res_id', 'in', self.ids)]
).unlink()
return super(MailActivityMixin, self).write(vals)
def unlink(self):
""" Override unlink to delete records activities through (res_model, res_id). """
record_ids = self.ids
result = super(MailActivityMixin, self).unlink()
self.env['mail.activity'].sudo().search(
[('res_model', '=', self._name), ('res_id', 'in', record_ids)]
).unlink()
return result
def _read_progress_bar(self, domain, group_by, progress_bar):
group_by_fname = group_by.partition(':')[0]
if not (progress_bar['field'] == 'activity_state' and self._fields[group_by_fname].store):
return super()._read_progress_bar(domain, group_by, progress_bar)
# optimization for 'activity_state'
# explicitly check access rights, since we bypass the ORM
self.check_access_rights('read')
self._flush_search(domain, fields=[group_by_fname], order='id')
self.env['mail.activity'].flush(['res_model', 'res_id', 'user_id', 'date_deadline'])
query = self._where_calc(domain)
self._apply_ir_rules(query, 'read')
gb = group_by.partition(':')[0]
annotated_groupbys = [
self._read_group_process_groupby(gb, query)
for gb in [group_by, 'activity_state']
]
groupby_dict = {gb['groupby']: gb for gb in annotated_groupbys}
for gb in annotated_groupbys:
if gb['field'] == 'activity_state':
gb['qualified_field'] = '"_last_activity_state"."activity_state"'
groupby_terms, _orderby_terms = self._read_group_prepare('activity_state', [], annotated_groupbys, query)
select_terms = [
'%s as "%s"' % (gb['qualified_field'], gb['groupby'])
for gb in annotated_groupbys
]
from_clause, where_clause, where_params = query.get_sql()
tz = self._context.get('tz') or self.env.user.tz or 'UTC'
select_query = """
SELECT 1 AS id, count(*) AS "__count", {fields}
FROM {from_clause}
JOIN (
SELECT res_id,
CASE
WHEN min(date_deadline - (now() AT TIME ZONE COALESCE(res_partner.tz, %s))::date) > 0 THEN 'planned'
WHEN min(date_deadline - (now() AT TIME ZONE COALESCE(res_partner.tz, %s))::date) < 0 THEN 'overdue'
WHEN min(date_deadline - (now() AT TIME ZONE COALESCE(res_partner.tz, %s))::date) = 0 THEN 'today'
ELSE null
END AS activity_state
FROM mail_activity
JOIN res_users ON (res_users.id = mail_activity.user_id)
JOIN res_partner ON (res_partner.id = res_users.partner_id)
WHERE res_model = '{model}'
GROUP BY res_id
) AS "_last_activity_state" ON ("{table}".id = "_last_activity_state".res_id)
WHERE {where_clause}
GROUP BY {group_by}
""".format(
fields=', '.join(select_terms),
from_clause=from_clause,
model=self._name,
table=self._table,
where_clause=where_clause or '1=1',
group_by=', '.join(groupby_terms),
)
num_from_params = from_clause.count('%s')
where_params[num_from_params:num_from_params] = [tz] * 3 # timezone after from parameters
self.env.cr.execute(select_query, where_params)
fetched_data = self.env.cr.dictfetchall()
self._read_group_resolve_many2x_fields(fetched_data, annotated_groupbys)
data = [
{key: self._read_group_prepare_data(key, val, groupby_dict)
for key, val in row.items()}
for row in fetched_data
]
return [
self._read_group_format_result(vals, annotated_groupbys, [group_by], domain)
for vals in data
]
def toggle_active(self):
""" Before archiving the record we should also remove its ongoing
activities. Otherwise they stay in the systray and concerning archived
records it makes no sense. """
record_to_deactivate = self.filtered(lambda rec: rec[rec._active_name])
if record_to_deactivate:
# use a sudo to bypass every access rights; all activities should be removed
self.env['mail.activity'].sudo().search([
('res_model', '=', self._name),
('res_id', 'in', record_to_deactivate.ids)
]).unlink()
return super(MailActivityMixin, self).toggle_active()
def activity_send_mail(self, template_id):
""" Automatically send an email based on the given mail.template, given
its ID. """
template = self.env['mail.template'].browse(template_id).exists()
if not template:
return False
for record in self:
record.message_post_with_template(
template_id,
composition_mode='comment'
)
return True
def activity_search(self, act_type_xmlids='', user_id=None, additional_domain=None):
""" Search automated activities on current record set, given a list of activity
types xml IDs. It is useful when dealing with specific types involved in automatic
activities management.
:param act_type_xmlids: list of activity types xml IDs
:param user_id: if set, restrict to activities of that user_id;
:param additional_domain: if set, filter on that domain;
"""
if self.env.context.get('mail_activity_automation_skip'):
return False
Data = self.env['ir.model.data'].sudo()
activity_types_ids = [type_id for type_id in (Data._xmlid_to_res_id(xmlid, raise_if_not_found=False) for xmlid in act_type_xmlids) if type_id]
if not any(activity_types_ids):
return False
domain = [
'&', '&', '&',
('res_model', '=', self._name),
('res_id', 'in', self.ids),
('automated', '=', True),
('activity_type_id', 'in', activity_types_ids)
]
if user_id:
domain = expression.AND([domain, [('user_id', '=', user_id)]])
if additional_domain:
domain = expression.AND([domain, additional_domain])
return self.env['mail.activity'].search(domain)
def activity_schedule(self, act_type_xmlid='', date_deadline=None, summary='', note='', **act_values):
""" Schedule an activity on each record of the current record set.
This method allow to provide as parameter act_type_xmlid. This is an
xml_id of activity type instead of directly giving an activity_type_id.
It is useful to avoid having various "env.ref" in the code and allow
to let the mixin handle access rights.
:param date_deadline: the day the activity must be scheduled on
the timezone of the user must be considered to set the correct deadline
"""
if self.env.context.get('mail_activity_automation_skip'):
return False
if not date_deadline:
date_deadline = fields.Date.context_today(self)
if isinstance(date_deadline, datetime):
_logger.warning("Scheduled deadline should be a date (got %s)", date_deadline)
if act_type_xmlid:
activity_type = self.env.ref(act_type_xmlid, raise_if_not_found=False) or self._default_activity_type()
else:
activity_type_id = act_values.get('activity_type_id', False)
activity_type = activity_type_id and self.env['mail.activity.type'].sudo().browse(activity_type_id)
model_id = self.env['ir.model']._get(self._name).id
activities = self.env['mail.activity']
for record in self:
create_vals = {
'activity_type_id': activity_type and activity_type.id,
'summary': summary or activity_type.summary,
'automated': True,
'note': note or activity_type.default_note,
'date_deadline': date_deadline,
'res_model_id': model_id,
'res_id': record.id,
}
create_vals.update(act_values)
if not create_vals.get('user_id'):
create_vals['user_id'] = activity_type.default_user_id.id or self.env.uid
activities |= self.env['mail.activity'].create(create_vals)
return activities
def _activity_schedule_with_view(self, act_type_xmlid='', date_deadline=None, summary='', views_or_xmlid='', render_context=None, **act_values):
""" Helper method: Schedule an activity on each record of the current record set.
This method allow to the same mecanism as `activity_schedule`, but provide
2 additionnal parameters:
:param views_or_xmlid: record of ir.ui.view or string representing the xmlid
of the qweb template to render
:type views_or_xmlid: string or recordset
:param render_context: the values required to render the given qweb template
:type render_context: dict
"""
if self.env.context.get('mail_activity_automation_skip'):
return False
render_context = render_context or dict()
if isinstance(views_or_xmlid, str):
views = self.env.ref(views_or_xmlid, raise_if_not_found=False)
else:
views = views_or_xmlid
if not views:
return
activities = self.env['mail.activity']
for record in self:
render_context['object'] = record
note = views._render(render_context, engine='ir.qweb', minimal_qcontext=True)
activities |= record.activity_schedule(act_type_xmlid=act_type_xmlid, date_deadline=date_deadline, summary=summary, note=note, **act_values)
return activities
def activity_reschedule(self, act_type_xmlids, user_id=None, date_deadline=None, new_user_id=None):
""" Reschedule some automated activities. Activities to reschedule are
selected based on type xml ids and optionally by user. Purpose is to be
able to
* update the deadline to date_deadline;
* update the responsible to new_user_id;
"""
if self.env.context.get('mail_activity_automation_skip'):
return False
Data = self.env['ir.model.data'].sudo()
activity_types_ids = [Data._xmlid_to_res_id(xmlid, raise_if_not_found=False) for xmlid in act_type_xmlids]
activity_types_ids = [act_type_id for act_type_id in activity_types_ids if act_type_id]
if not any(activity_types_ids):
return False
activities = self.activity_search(act_type_xmlids, user_id=user_id)
if activities:
write_vals = {}
if date_deadline:
write_vals['date_deadline'] = date_deadline
if new_user_id:
write_vals['user_id'] = new_user_id
activities.write(write_vals)
return activities
def activity_feedback(self, act_type_xmlids, user_id=None, feedback=None):
""" Set activities as done, limiting to some activity types and
optionally to a given user. """
if self.env.context.get('mail_activity_automation_skip'):
return False
Data = self.env['ir.model.data'].sudo()
activity_types_ids = [Data._xmlid_to_res_id(xmlid, raise_if_not_found=False) for xmlid in act_type_xmlids]
activity_types_ids = [act_type_id for act_type_id in activity_types_ids if act_type_id]
if not any(activity_types_ids):
return False
activities = self.activity_search(act_type_xmlids, user_id=user_id)
if activities:
activities.action_feedback(feedback=feedback)
return True
def activity_unlink(self, act_type_xmlids, user_id=None):
""" Unlink activities, limiting to some activity types and optionally
to a given user. """
if self.env.context.get('mail_activity_automation_skip'):
return False
Data = self.env['ir.model.data'].sudo()
activity_types_ids = [Data._xmlid_to_res_id(xmlid, raise_if_not_found=False) for xmlid in act_type_xmlids]
activity_types_ids = [act_type_id for act_type_id in activity_types_ids if act_type_id]
if not any(activity_types_ids):
return False
self.activity_search(act_type_xmlids, user_id=user_id).unlink()
return True
| 45.770751 | 23,160 |
26,573 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import ast
import base64
import datetime
import logging
import psycopg2
import smtplib
import threading
import re
import pytz
from collections import defaultdict
from dateutil.parser import parse
from odoo import _, api, fields, models
from odoo import tools
from odoo.addons.base.models.ir_mail_server import MailDeliveryException
_logger = logging.getLogger(__name__)
class MailMail(models.Model):
""" Model holding RFC2822 email messages to send. This model also provides
facilities to queue and send new email messages. """
_name = 'mail.mail'
_description = 'Outgoing Mails'
_inherits = {'mail.message': 'mail_message_id'}
_order = 'id desc'
_rec_name = 'subject'
@api.model
def default_get(self, fields):
# protection for `default_type` values leaking from menu action context (e.g. for invoices)
# To remove when automatic context propagation is removed in web client
if self._context.get('default_type') not in type(self).message_type.base_field.selection:
self = self.with_context(dict(self._context, default_type=None))
if self._context.get('default_state') not in type(self).state.base_field.selection:
self = self.with_context(dict(self._context, default_state='outgoing'))
return super(MailMail, self).default_get(fields)
# content
mail_message_id = fields.Many2one('mail.message', 'Message', required=True, ondelete='cascade', index=True, auto_join=True)
body_html = fields.Text('Rich-text Contents', help="Rich-text/HTML message")
references = fields.Text('References', help='Message references, such as identifiers of previous messages', readonly=1)
headers = fields.Text('Headers', copy=False)
# Auto-detected based on create() - if 'mail_message_id' was passed then this mail is a notification
# and during unlink() we will not cascade delete the parent and its attachments
is_notification = fields.Boolean('Notification Email', help='Mail has been created to notify people of an existing mail.message')
# recipients: include inactive partners (they may have been archived after
# the message was sent, but they should remain visible in the relation)
email_to = fields.Text('To', help='Message recipients (emails)')
email_cc = fields.Char('Cc', help='Carbon copy message recipients')
recipient_ids = fields.Many2many('res.partner', string='To (Partners)',
context={'active_test': False})
# process
state = fields.Selection([
('outgoing', 'Outgoing'),
('sent', 'Sent'),
('received', 'Received'),
('exception', 'Delivery Failed'),
('cancel', 'Cancelled'),
], 'Status', readonly=True, copy=False, default='outgoing')
failure_type = fields.Selection(selection=[
# generic
("unknown", "Unknown error"),
# mail
("mail_email_invalid", "Invalid email address"),
("mail_email_missing", "Missing email"),
("mail_smtp", "Connection failed (outgoing mail server problem)"),
# mass mode
("mail_bl", "Blacklisted Address"),
("mail_optout", "Opted Out"),
("mail_dup", "Duplicated Email"),
], string='Failure type')
failure_reason = fields.Text(
'Failure Reason', readonly=1, copy=False,
help="Failure reason. This is usually the exception thrown by the email server, stored to ease the debugging of mailing issues.")
auto_delete = fields.Boolean(
'Auto Delete',
help="This option permanently removes any track of email after it's been sent, including from the Technical menu in the Settings, in order to preserve storage space of your Odoo database.")
scheduled_date = fields.Char('Scheduled Send Date',
help="If set, the queue manager will send the email after the date. If not set, the email will be send as soon as possible. Unless a timezone is specified, it is considered as being in UTC timezone.")
@api.model_create_multi
def create(self, values_list):
# notification field: if not set, set if mail comes from an existing mail.message
for values in values_list:
if 'is_notification' not in values and values.get('mail_message_id'):
values['is_notification'] = True
if values.get('scheduled_date'):
parsed_datetime = self._parse_scheduled_datetime(values['scheduled_date'])
if parsed_datetime:
values['scheduled_date'] = parsed_datetime.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)
else:
values['scheduled_date'] = False
new_mails = super(MailMail, self).create(values_list)
new_mails_w_attach = self
for mail, values in zip(new_mails, values_list):
if values.get('attachment_ids'):
new_mails_w_attach += mail
if new_mails_w_attach:
new_mails_w_attach.mapped('attachment_ids').check(mode='read')
return new_mails
def write(self, vals):
if vals.get('scheduled_date'):
parsed_datetime = self._parse_scheduled_datetime(vals['scheduled_date'])
if parsed_datetime:
vals['scheduled_date'] = parsed_datetime.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)
else:
vals['scheduled_date'] = False
res = super(MailMail, self).write(vals)
if vals.get('attachment_ids'):
for mail in self:
mail.attachment_ids.check(mode='read')
return res
def unlink(self):
# cascade-delete the parent message for all mails that are not created for a notification
mail_msg_cascade_ids = [mail.mail_message_id.id for mail in self if not mail.is_notification]
res = super(MailMail, self).unlink()
if mail_msg_cascade_ids:
self.env['mail.message'].browse(mail_msg_cascade_ids).unlink()
return res
def action_retry(self):
self.filtered(lambda mail: mail.state == 'exception').mark_outgoing()
def mark_outgoing(self):
return self.write({'state': 'outgoing'})
def cancel(self):
return self.write({'state': 'cancel'})
@api.model
def process_email_queue(self, ids=None):
"""Send immediately queued messages, committing after each
message is sent - this is not transactional and should
not be called during another transaction!
:param list ids: optional list of emails ids to send. If passed
no search is performed, and these ids are used
instead.
:param dict context: if a 'filters' key is present in context,
this value will be used as an additional
filter to further restrict the outgoing
messages to send (by default all 'outgoing'
messages are sent).
"""
filters = [
'&',
('state', '=', 'outgoing'),
'|',
('scheduled_date', '=', False),
('scheduled_date', '<=', datetime.datetime.utcnow()),
]
if 'filters' in self._context:
filters.extend(self._context['filters'])
# TODO: make limit configurable
filtered_ids = self.search(filters, limit=10000).ids
if not ids:
ids = filtered_ids
else:
ids = list(set(filtered_ids) & set(ids))
ids.sort()
res = None
try:
# auto-commit except in testing mode
auto_commit = not getattr(threading.current_thread(), 'testing', False)
res = self.browse(ids).send(auto_commit=auto_commit)
except Exception:
_logger.exception("Failed processing mail queue")
return res
def _postprocess_sent_message(self, success_pids, failure_reason=False, failure_type=None):
"""Perform any post-processing necessary after sending ``mail``
successfully, including deleting it completely along with its
attachment if the ``auto_delete`` flag of the mail was set.
Overridden by subclasses for extra post-processing behaviors.
:return: True
"""
notif_mails_ids = [mail.id for mail in self if mail.is_notification]
if notif_mails_ids:
notifications = self.env['mail.notification'].search([
('notification_type', '=', 'email'),
('mail_mail_id', 'in', notif_mails_ids),
('notification_status', 'not in', ('sent', 'canceled'))
])
if notifications:
# find all notification linked to a failure
failed = self.env['mail.notification']
if failure_type:
failed = notifications.filtered(lambda notif: notif.res_partner_id not in success_pids)
(notifications - failed).sudo().write({
'notification_status': 'sent',
'failure_type': '',
'failure_reason': '',
})
if failed:
failed.sudo().write({
'notification_status': 'exception',
'failure_type': failure_type,
'failure_reason': failure_reason,
})
messages = notifications.mapped('mail_message_id').filtered(lambda m: m.is_thread_message())
# TDE TODO: could be great to notify message-based, not notifications-based, to lessen number of notifs
messages._notify_message_notification_update() # notify user that we have a failure
if not failure_type or failure_type in ['mail_email_invalid', 'mail_email_missing']: # if we have another error, we want to keep the mail.
mail_to_delete_ids = [mail.id for mail in self if mail.auto_delete]
self.browse(mail_to_delete_ids).sudo().unlink()
return True
def _parse_scheduled_datetime(self, scheduled_datetime):
""" Taking an arbitrary datetime (either as a date, a datetime or a string)
try to parse it and return a datetime timezoned to UTC.
If no specific timezone information is given, we consider it as being
given in UTC, as all datetime values given to the server. Trying to
guess its timezone based on user or flow would be strange as this is
not standard. When manually creating datetimes for mail.mail scheduled
date, business code should ensure either a timezone info is set, either
it is converted into UTC.
Using yearfirst when parsing str datetimes eases parser's job when
dealing with the hard-to-parse trio (01/04/09 -> ?). In most use cases
year will be given first as this is the expected default formatting.
:return datetime: parsed datetime (or False if parser failed)
"""
if isinstance(scheduled_datetime, datetime.datetime):
parsed_datetime = scheduled_datetime
elif isinstance(scheduled_datetime, datetime.date):
parsed_datetime = datetime.combine(scheduled_datetime, datetime.time.min)
else:
try:
parsed_datetime = parse(scheduled_datetime, yearfirst=True)
except (ValueError, TypeError):
parsed_datetime = False
if parsed_datetime:
if not parsed_datetime.tzinfo:
parsed_datetime = pytz.utc.localize(parsed_datetime)
else:
try:
parsed_datetime = parsed_datetime.astimezone(pytz.utc)
except Exception:
pass
return parsed_datetime
# ------------------------------------------------------
# mail_mail formatting, tools and send mechanism
# ------------------------------------------------------
def _send_prepare_body(self):
"""Return a specific ir_email body. The main purpose of this method
is to be inherited to add custom content depending on some module."""
self.ensure_one()
return self.body_html or ''
def _send_prepare_values(self, partner=None):
"""Return a dictionary for specific email values, depending on a
partner, or generic to the whole recipients given by mail.email_to.
:param Model partner: specific recipient partner
"""
self.ensure_one()
body = self._send_prepare_body()
body_alternative = tools.html2plaintext(body)
if partner:
email_to = [tools.formataddr((partner.name or 'False', partner.email or 'False'))]
else:
email_to = tools.email_split_and_format(self.email_to)
res = {
'body': body,
'body_alternative': body_alternative,
'email_to': email_to,
}
return res
def _split_by_mail_configuration(self):
"""Group the <mail.mail> based on their "email_from" and their "mail_server_id".
The <mail.mail> will have the "same sending configuration" if they have the same
mail server or the same mail from. For performance purpose, we can use an SMTP
session in batch and therefore we need to group them by the parameter that will
influence the mail server used.
The same "sending configuration" may repeat in order to limit batch size
according to the `mail.session.batch.size` system parameter.
Return iterators over
mail_server_id, email_from, Records<mail.mail>.ids
"""
mail_values = self.read(['id', 'email_from', 'mail_server_id'])
# First group the <mail.mail> per mail_server_id and per email_from
group_per_email_from = defaultdict(list)
for values in mail_values:
mail_server_id = values['mail_server_id'][0] if values['mail_server_id'] else False
group_per_email_from[(mail_server_id, values['email_from'])].append(values['id'])
# Then find the mail server for each email_from and group the <mail.mail>
# per mail_server_id and smtp_from
mail_servers = self.env['ir.mail_server'].sudo().search([], order='sequence')
group_per_smtp_from = defaultdict(list)
for (mail_server_id, email_from), mail_ids in group_per_email_from.items():
if not mail_server_id:
mail_server, smtp_from = self.env['ir.mail_server']._find_mail_server(email_from, mail_servers)
mail_server_id = mail_server.id if mail_server else False
else:
smtp_from = email_from
group_per_smtp_from[(mail_server_id, smtp_from)].extend(mail_ids)
sys_params = self.env['ir.config_parameter'].sudo()
batch_size = int(sys_params.get_param('mail.session.batch.size', 1000))
for (mail_server_id, smtp_from), record_ids in group_per_smtp_from.items():
for batch_ids in tools.split_every(batch_size, record_ids):
yield mail_server_id, smtp_from, batch_ids
def send(self, auto_commit=False, raise_exception=False):
""" Sends the selected emails immediately, ignoring their current
state (mails that have already been sent should not be passed
unless they should actually be re-sent).
Emails successfully delivered are marked as 'sent', and those
that fail to be deliver are marked as 'exception', and the
corresponding error mail is output in the server logs.
:param bool auto_commit: whether to force a commit of the mail status
after sending each mail (meant only for scheduler processing);
should never be True during normal transactions (default: False)
:param bool raise_exception: whether to raise an exception if the
email sending process has failed
:return: True
"""
for mail_server_id, smtp_from, batch_ids in self._split_by_mail_configuration():
smtp_session = None
try:
smtp_session = self.env['ir.mail_server'].connect(mail_server_id=mail_server_id, smtp_from=smtp_from)
except Exception as exc:
if raise_exception:
# To be consistent and backward compatible with mail_mail.send() raised
# exceptions, it is encapsulated into an Odoo MailDeliveryException
raise MailDeliveryException(_('Unable to connect to SMTP Server'), exc)
else:
batch = self.browse(batch_ids)
batch.write({'state': 'exception', 'failure_reason': exc})
batch._postprocess_sent_message(success_pids=[], failure_type="mail_smtp")
else:
self.browse(batch_ids)._send(
auto_commit=auto_commit,
raise_exception=raise_exception,
smtp_session=smtp_session)
_logger.info(
'Sent batch %s emails via mail server ID #%s',
len(batch_ids), mail_server_id)
finally:
if smtp_session:
smtp_session.quit()
def _send(self, auto_commit=False, raise_exception=False, smtp_session=None):
IrMailServer = self.env['ir.mail_server']
IrAttachment = self.env['ir.attachment']
for mail_id in self.ids:
success_pids = []
failure_type = None
processing_pid = None
mail = None
try:
mail = self.browse(mail_id)
if mail.state != 'outgoing':
if mail.state != 'exception' and mail.auto_delete:
mail.sudo().unlink()
continue
# remove attachments if user send the link with the access_token
body = mail.body_html or ''
attachments = mail.attachment_ids
for link in re.findall(r'/web/(?:content|image)/([0-9]+)', body):
attachments = attachments - IrAttachment.browse(int(link))
# load attachment binary data with a separate read(), as prefetching all
# `datas` (binary field) could bloat the browse cache, triggerring
# soft/hard mem limits with temporary data.
attachments = [(a['name'], base64.b64decode(a['datas']), a['mimetype'])
for a in attachments.sudo().read(['name', 'datas', 'mimetype']) if a['datas'] is not False]
# specific behavior to customize the send email for notified partners
email_list = []
if mail.email_to:
email_list.append(mail._send_prepare_values())
for partner in mail.recipient_ids:
values = mail._send_prepare_values(partner=partner)
values['partner_id'] = partner
email_list.append(values)
# headers
headers = {}
ICP = self.env['ir.config_parameter'].sudo()
bounce_alias = ICP.get_param("mail.bounce.alias")
catchall_domain = ICP.get_param("mail.catchall.domain")
if bounce_alias and catchall_domain:
headers['Return-Path'] = '%s@%s' % (bounce_alias, catchall_domain)
if mail.headers:
try:
headers.update(ast.literal_eval(mail.headers))
except Exception:
pass
# Writing on the mail object may fail (e.g. lock on user) which
# would trigger a rollback *after* actually sending the email.
# To avoid sending twice the same email, provoke the failure earlier
mail.write({
'state': 'exception',
'failure_reason': _('Error without exception. Probably due do sending an email without computed recipients.'),
})
# Update notification in a transient exception state to avoid concurrent
# update in case an email bounces while sending all emails related to current
# mail record.
notifs = self.env['mail.notification'].search([
('notification_type', '=', 'email'),
('mail_mail_id', 'in', mail.ids),
('notification_status', 'not in', ('sent', 'canceled'))
])
if notifs:
notif_msg = _('Error without exception. Probably due do concurrent access update of notification records. Please see with an administrator.')
notifs.sudo().write({
'notification_status': 'exception',
'failure_type': 'unknown',
'failure_reason': notif_msg,
})
# `test_mail_bounce_during_send`, force immediate update to obtain the lock.
# see rev. 56596e5240ef920df14d99087451ce6f06ac6d36
notifs.flush(fnames=['notification_status', 'failure_type', 'failure_reason'], records=notifs)
# build an RFC2822 email.message.Message object and send it without queuing
res = None
# TDE note: could be great to pre-detect missing to/cc and skip sending it
# to go directly to failed state update
for email in email_list:
msg = IrMailServer.build_email(
email_from=mail.email_from,
email_to=email.get('email_to'),
subject=mail.subject,
body=email.get('body'),
body_alternative=email.get('body_alternative'),
email_cc=tools.email_split(mail.email_cc),
reply_to=mail.reply_to,
attachments=attachments,
message_id=mail.message_id,
references=mail.references,
object_id=mail.res_id and ('%s-%s' % (mail.res_id, mail.model)),
subtype='html',
subtype_alternative='plain',
headers=headers)
processing_pid = email.pop("partner_id", None)
try:
res = IrMailServer.send_email(
msg, mail_server_id=mail.mail_server_id.id, smtp_session=smtp_session)
if processing_pid:
success_pids.append(processing_pid)
processing_pid = None
except AssertionError as error:
if str(error) == IrMailServer.NO_VALID_RECIPIENT:
# if we have a list of void emails for email_list -> email missing, otherwise generic email failure
if not email.get('email_to') and failure_type != "mail_email_invalid":
failure_type = "mail_email_missing"
else:
failure_type = "mail_email_invalid"
# No valid recipient found for this particular
# mail item -> ignore error to avoid blocking
# delivery to next recipients, if any. If this is
# the only recipient, the mail will show as failed.
_logger.info("Ignoring invalid recipients for mail.mail %s: %s",
mail.message_id, email.get('email_to'))
else:
raise
if res: # mail has been sent at least once, no major exception occurred
mail.write({'state': 'sent', 'message_id': res, 'failure_reason': False})
_logger.info('Mail with ID %r and Message-Id %r successfully sent', mail.id, mail.message_id)
# /!\ can't use mail.state here, as mail.refresh() will cause an error
# see revid:[email protected] in 6.1
mail._postprocess_sent_message(success_pids=success_pids, failure_type=failure_type)
except MemoryError:
# prevent catching transient MemoryErrors, bubble up to notify user or abort cron job
# instead of marking the mail as failed
_logger.exception(
'MemoryError while processing mail with ID %r and Msg-Id %r. Consider raising the --limit-memory-hard startup option',
mail.id, mail.message_id)
# mail status will stay on ongoing since transaction will be rollback
raise
except (psycopg2.Error, smtplib.SMTPServerDisconnected):
# If an error with the database or SMTP session occurs, chances are that the cursor
# or SMTP session are unusable, causing further errors when trying to save the state.
_logger.exception(
'Exception while processing mail with ID %r and Msg-Id %r.',
mail.id, mail.message_id)
raise
except Exception as e:
failure_reason = tools.ustr(e)
_logger.exception('failed sending mail (id: %s) due to %s', mail.id, failure_reason)
mail.write({'state': 'exception', 'failure_reason': failure_reason})
mail._postprocess_sent_message(success_pids=success_pids, failure_reason=failure_reason, failure_type='unknown')
if raise_exception:
if isinstance(e, (AssertionError, UnicodeEncodeError)):
if isinstance(e, UnicodeEncodeError):
value = "Invalid text: %s" % e.object
else:
value = '. '.join(e.args)
raise MailDeliveryException(value)
raise
if auto_commit is True:
self._cr.commit()
return True
| 51.003839 | 26,573 |
7,174 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from dateutil.relativedelta import relativedelta
from odoo import _, api, fields, models
from odoo.exceptions import UserError, ValidationError
class ServerActions(models.Model):
""" Add email option in server actions. """
_name = 'ir.actions.server'
_description = 'Server Action'
_inherit = ['ir.actions.server']
state = fields.Selection(selection_add=[
('email', 'Send Email'),
('followers', 'Add Followers'),
('next_activity', 'Create Next Activity'),
], ondelete={'email': 'cascade', 'followers': 'cascade', 'next_activity': 'cascade'})
# Followers
partner_ids = fields.Many2many('res.partner', string='Add Followers')
# Template
template_id = fields.Many2one(
'mail.template', 'Email Template', ondelete='set null',
domain="[('model_id', '=', model_id)]",
)
# Next Activity
activity_type_id = fields.Many2one(
'mail.activity.type', string='Activity',
domain="['|', ('res_model', '=', False), ('res_model', '=', model_name)]",
ondelete='restrict')
activity_summary = fields.Char('Summary')
activity_note = fields.Html('Note')
activity_date_deadline_range = fields.Integer(string='Due Date In')
activity_date_deadline_range_type = fields.Selection([
('days', 'Days'),
('weeks', 'Weeks'),
('months', 'Months'),
], string='Due type', default='days')
activity_user_type = fields.Selection([
('specific', 'Specific User'),
('generic', 'Generic User From Record')], default="specific",
help="Use 'Specific User' to always assign the same user on the next activity. Use 'Generic User From Record' to specify the field name of the user to choose on the record.")
activity_user_id = fields.Many2one('res.users', string='Responsible')
activity_user_field_name = fields.Char('User field name', help="Technical name of the user on the record", default="user_id")
@api.onchange('activity_date_deadline_range')
def _onchange_activity_date_deadline_range(self):
if self.activity_date_deadline_range < 0:
raise UserError(_("The 'Due Date In' value can't be negative."))
@api.constrains('state', 'model_id')
def _check_mail_thread(self):
for action in self:
if action.state == 'followers' and not action.model_id.is_mail_thread:
raise ValidationError(_("Add Followers can only be done on a mail thread model"))
@api.constrains('state', 'model_id')
def _check_activity_mixin(self):
for action in self:
if action.state == 'next_activity' and not action.model_id.is_mail_thread:
raise ValidationError(_("A next activity can only be planned on models that use the chatter"))
def _run_action_followers_multi(self, eval_context=None):
Model = self.env[self.model_name]
if self.partner_ids and hasattr(Model, 'message_subscribe'):
records = Model.browse(self._context.get('active_ids', self._context.get('active_id')))
records.message_subscribe(partner_ids=self.partner_ids.ids)
return False
def _is_recompute(self):
"""When an activity is set on update of a record,
update might be triggered many times by recomputes.
When need to know it to skip these steps.
Except if the computed field is supposed to trigger the action
"""
records = self.env[self.model_name].browse(
self._context.get('active_ids', self._context.get('active_id')))
old_values = self._context.get('old_values')
if old_values:
domain_post = self._context.get('domain_post')
tracked_fields = []
if domain_post:
for leaf in domain_post:
if isinstance(leaf, (tuple, list)):
tracked_fields.append(leaf[0])
fields_to_check = [field for record, field_names in old_values.items() for field in field_names if field not in tracked_fields]
if fields_to_check:
field = records._fields[fields_to_check[0]]
# Pick an arbitrary field; if it is marked to be recomputed,
# it means we are in an extraneous write triggered by the recompute.
# In this case, we should not create a new activity.
if records & self.env.records_to_compute(field):
return True
return False
def _run_action_email(self, eval_context=None):
# TDE CLEANME: when going to new api with server action, remove action
if not self.template_id or not self._context.get('active_id') or self._is_recompute():
return False
# Clean context from default_type to avoid making attachment
# with wrong values in subsequent operations
cleaned_ctx = dict(self.env.context)
cleaned_ctx.pop('default_type', None)
cleaned_ctx.pop('default_parent_id', None)
self.template_id.with_context(cleaned_ctx).send_mail(self._context.get('active_id'), force_send=False,
raise_exception=False)
return False
def _run_action_next_activity(self, eval_context=None):
if not self.activity_type_id or not self._context.get('active_id') or self._is_recompute():
return False
records = self.env[self.model_name].browse(self._context.get('active_ids', self._context.get('active_id')))
vals = {
'summary': self.activity_summary or '',
'note': self.activity_note or '',
'activity_type_id': self.activity_type_id.id,
}
if self.activity_date_deadline_range > 0:
vals['date_deadline'] = fields.Date.context_today(self) + relativedelta(**{
self.activity_date_deadline_range_type: self.activity_date_deadline_range})
for record in records:
user = False
if self.activity_user_type == 'specific':
user = self.activity_user_id
elif self.activity_user_type == 'generic' and self.activity_user_field_name in record:
user = record[self.activity_user_field_name]
if user:
vals['user_id'] = user.id
record.activity_schedule(**vals)
return False
@api.model
def _get_eval_context(self, action=None):
""" Override the method giving the evaluation context but also the
context used in all subsequent calls. Add the mail_notify_force_send
key set to False in the context. This way all notification emails linked
to the currently executed action will be set in the queue instead of
sent directly. This will avoid possible break in transactions. """
eval_context = super(ServerActions, self)._get_eval_context(action=action)
ctx = dict(eval_context['env'].context)
ctx['mail_notify_force_send'] = False
eval_context['env'].context = ctx
return eval_context
| 48.802721 | 7,174 |
55,983 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import re
from collections import defaultdict
from binascii import Error as binascii_error
from odoo import _, api, Command, fields, models, modules, tools
from odoo.exceptions import AccessError
from odoo.osv import expression
from odoo.tools.misc import clean_context
_logger = logging.getLogger(__name__)
_image_dataurl = re.compile(r'(data:image/[a-z]+?);base64,([a-z0-9+/\n]{3,}=*)\n*([\'"])(?: data-filename="([^"]*)")?', re.I)
class Message(models.Model):
""" Message model: notification (system, replacing res.log notifications),
comment (user input), email (incoming emails) and user_notification
(user-specific notification)
Note:: State management / Error codes / Failure types summary
* mail.notification
* notification_status
'ready', 'sent', 'bounce', 'exception', 'canceled'
* notification_type
'inbox', 'email', 'sms' (SMS addon), 'snail' (snailmail addon)
* failure_type
# generic
unknown,
# mail
"mail_email_invalid", "mail_smtp", "mail_email_missing"
# sms (SMS addon)
'sms_number_missing', 'sms_number_format', 'sms_credit',
'sms_server', 'sms_acc'
# snailmail (snailmail addon)
'sn_credit', 'sn_trial', 'sn_price', 'sn_fields',
'sn_format', 'sn_error'
* mail.mail
* state
'outgoing', 'sent', 'received', 'exception', 'cancel'
* failure_reason: text
* sms.sms (SMS addon)
* state
'outgoing', 'sent', 'error', 'canceled'
* error_code
'sms_number_missing', 'sms_number_format', 'sms_credit',
'sms_server', 'sms_acc',
# mass mode specific codes
'sms_blacklist', 'sms_duplicate'
* snailmail.letter (snailmail addon)
* state
'pending', 'sent', 'error', 'canceled'
* error_code
'CREDIT_ERROR', 'TRIAL_ERROR', 'NO_PRICE_AVAILABLE', 'FORMAT_ERROR',
'UNKNOWN_ERROR',
See ``mailing.trace`` model in mass_mailing application for mailing trace
information.
"""
_name = 'mail.message'
_description = 'Message'
_order = 'id desc'
_rec_name = 'record_name'
@api.model
def default_get(self, fields):
res = super(Message, self).default_get(fields)
missing_author = 'author_id' in fields and 'author_id' not in res
missing_email_from = 'email_from' in fields and 'email_from' not in res
if missing_author or missing_email_from:
author_id, email_from = self.env['mail.thread']._message_compute_author(res.get('author_id'), res.get('email_from'), raise_exception=False)
if missing_email_from:
res['email_from'] = email_from
if missing_author:
res['author_id'] = author_id
return res
# content
subject = fields.Char('Subject')
date = fields.Datetime('Date', default=fields.Datetime.now)
body = fields.Html('Contents', default='', sanitize_style=True)
description = fields.Char(
'Short description', compute="_compute_description",
help='Message description: either the subject, or the beginning of the body')
attachment_ids = fields.Many2many(
'ir.attachment', 'message_attachment_rel',
'message_id', 'attachment_id',
string='Attachments',
help='Attachments are linked to a document through model / res_id and to the message '
'through this field.')
parent_id = fields.Many2one(
'mail.message', 'Parent Message', index=True, ondelete='set null',
help="Initial thread message.")
child_ids = fields.One2many('mail.message', 'parent_id', 'Child Messages')
# related document
model = fields.Char('Related Document Model', index=True)
res_id = fields.Many2oneReference('Related Document ID', index=True, model_field='model')
record_name = fields.Char('Message Record Name', help="Name get of the related document.")
# characteristics
message_type = fields.Selection([
('email', 'Email'),
('comment', 'Comment'),
('notification', 'System notification'),
('user_notification', 'User Specific Notification')],
'Type', required=True, default='email',
help="Message type: email for email message, notification for system "
"message, comment for other messages such as user replies",
)
subtype_id = fields.Many2one('mail.message.subtype', 'Subtype', ondelete='set null', index=True)
mail_activity_type_id = fields.Many2one(
'mail.activity.type', 'Mail Activity Type',
index=True, ondelete='set null')
is_internal = fields.Boolean('Employee Only', help='Hide to public / portal users, independently from subtype configuration.')
# origin
email_from = fields.Char('From', help="Email address of the sender. This field is set when no matching partner is found and replaces the author_id field in the chatter.")
author_id = fields.Many2one(
'res.partner', 'Author', index=True, ondelete='set null',
help="Author of the message. If not set, email_from may hold an email address that did not match any partner.")
author_avatar = fields.Binary("Author's avatar", related='author_id.avatar_128', depends=['author_id'], readonly=False)
author_guest_id = fields.Many2one(string="Guest", comodel_name='mail.guest')
is_current_user_or_guest_author = fields.Boolean(compute='_compute_is_current_user_or_guest_author')
# recipients: include inactive partners (they may have been archived after
# the message was sent, but they should remain visible in the relation)
partner_ids = fields.Many2many('res.partner', string='Recipients', context={'active_test': False})
# list of partner having a notification. Caution: list may change over time because of notif gc cron.
# mainly usefull for testing
notified_partner_ids = fields.Many2many(
'res.partner', 'mail_notification', string='Partners with Need Action',
context={'active_test': False}, depends=['notification_ids'], copy=False)
needaction = fields.Boolean(
'Need Action', compute='_compute_needaction', search='_search_needaction',
help='Need Action')
has_error = fields.Boolean(
'Has error', compute='_compute_has_error', search='_search_has_error',
help='Has error')
# notifications
notification_ids = fields.One2many(
'mail.notification', 'mail_message_id', 'Notifications',
auto_join=True, copy=False, depends=['notified_partner_ids'])
# user interface
starred_partner_ids = fields.Many2many(
'res.partner', 'mail_message_res_partner_starred_rel', string='Favorited By')
starred = fields.Boolean(
'Starred', compute='_compute_starred', search='_search_starred', compute_sudo=False,
help='Current user has a starred notification linked to this message')
# tracking
tracking_value_ids = fields.One2many(
'mail.tracking.value', 'mail_message_id',
string='Tracking values',
groups="base.group_system",
help='Tracked values are stored in a separate model. This field allow to reconstruct '
'the tracking and to generate statistics on the model.')
# mail gateway
reply_to_force_new = fields.Boolean(
'No threading for answers',
help='If true, answers do not go in the original document discussion thread. Instead, it will check for the reply_to in tracking message-id and redirected accordingly. This has an impact on the generated message-id.')
message_id = fields.Char('Message-Id', help='Message unique identifier', index=True, readonly=1, copy=False)
reply_to = fields.Char('Reply-To', help='Reply email address. Setting the reply_to bypasses the automatic thread creation.')
mail_server_id = fields.Many2one('ir.mail_server', 'Outgoing mail server')
# keep notification layout informations to be able to generate mail again
email_layout_xmlid = fields.Char('Layout', copy=False) # xml id of layout
add_sign = fields.Boolean(default=True)
# `test_adv_activity`, `test_adv_activity_full`, `test_message_assignation_inbox`,...
# By setting an inverse for mail.mail_message_id, the number of SQL queries done by `modified` is reduced.
# 'mail.mail' inherits from `mail.message`: `_inherits = {'mail.message': 'mail_message_id'}`
# Therefore, when changing a field on `mail.message`, this triggers the modification of the same field on `mail.mail`
# By setting up the inverse one2many, we avoid to have to do a search to find the mails linked to the `mail.message`
# as the cache value for this inverse one2many is up-to-date.
# Besides for new messages, and messages never sending emails, there was no mail, and it was searching for nothing.
mail_ids = fields.One2many('mail.mail', 'mail_message_id', string='Mails', groups="base.group_system")
canned_response_ids = fields.One2many('mail.shortcode', 'message_ids', string="Canned Responses", store=False)
reaction_ids = fields.One2many('mail.message.reaction', 'message_id', string="Reactions", groups="base.group_system")
def _compute_description(self):
for message in self:
if message.subject:
message.description = message.subject
else:
plaintext_ct = '' if not message.body else tools.html2plaintext(message.body)
message.description = plaintext_ct[:30] + '%s' % (' [...]' if len(plaintext_ct) >= 30 else '')
@api.depends('author_id', 'author_guest_id')
@api.depends_context('guest', 'uid')
def _compute_is_current_user_or_guest_author(self):
user = self.env.user
guest = self.env['mail.guest']._get_guest_from_context()
for message in self:
if not user._is_public() and (message.author_id and message.author_id == user.partner_id):
message.is_current_user_or_guest_author = True
elif message.author_guest_id and message.author_guest_id == guest:
message.is_current_user_or_guest_author = True
else:
message.is_current_user_or_guest_author = False
def _compute_needaction(self):
""" Need action on a mail.message = notified on my channel """
my_messages = self.env['mail.notification'].sudo().search([
('mail_message_id', 'in', self.ids),
('res_partner_id', '=', self.env.user.partner_id.id),
('is_read', '=', False)]).mapped('mail_message_id')
for message in self:
message.needaction = message in my_messages
@api.model
def _search_needaction(self, operator, operand):
is_read = False if operator == '=' and operand else True
notification_ids = self.env['mail.notification']._search([('res_partner_id', '=', self.env.user.partner_id.id), ('is_read', '=', is_read)])
return [('notification_ids', 'in', notification_ids)]
def _compute_has_error(self):
error_from_notification = self.env['mail.notification'].sudo().search([
('mail_message_id', 'in', self.ids),
('notification_status', 'in', ('bounce', 'exception'))]).mapped('mail_message_id')
for message in self:
message.has_error = message in error_from_notification
def _search_has_error(self, operator, operand):
if operator == '=' and operand:
return [('notification_ids.notification_status', 'in', ('bounce', 'exception'))]
return ['!', ('notification_ids.notification_status', 'in', ('bounce', 'exception'))] # this wont work and will be equivalent to "not in" beacause of orm restrictions. Dont use "has_error = False"
@api.depends('starred_partner_ids')
@api.depends_context('uid')
def _compute_starred(self):
""" Compute if the message is starred by the current user. """
# TDE FIXME: use SQL
starred = self.sudo().filtered(lambda msg: self.env.user.partner_id in msg.starred_partner_ids)
for message in self:
message.starred = message in starred
@api.model
def _search_starred(self, operator, operand):
if operator == '=' and operand:
return [('starred_partner_ids', 'in', [self.env.user.partner_id.id])]
return [('starred_partner_ids', 'not in', [self.env.user.partner_id.id])]
# ------------------------------------------------------
# CRUD / ORM
# ------------------------------------------------------
def init(self):
self._cr.execute("""SELECT indexname FROM pg_indexes WHERE indexname = 'mail_message_model_res_id_idx'""")
if not self._cr.fetchone():
self._cr.execute("""CREATE INDEX mail_message_model_res_id_idx ON mail_message (model, res_id)""")
@api.model
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
""" Override that adds specific access rights of mail.message, to remove
ids uid could not see according to our custom rules. Please refer to
check_access_rule for more details about those rules.
Non employees users see only message with subtype (aka do not see
internal logs).
After having received ids of a classic search, keep only:
- if author_id == pid, uid is the author, OR
- uid belongs to a notified channel, OR
- uid is in the specified recipients, OR
- uid has a notification on the message
- otherwise: remove the id
"""
# Rules do not apply to administrator
if self.env.is_superuser():
return super(Message, self)._search(
args, offset=offset, limit=limit, order=order,
count=count, access_rights_uid=access_rights_uid)
# Non-employee see only messages with a subtype and not internal
if not self.env['res.users'].has_group('base.group_user'):
args = expression.AND([self._get_search_domain_share(), args])
# Perform a super with count as False, to have the ids, not a counter
ids = super(Message, self)._search(
args, offset=offset, limit=limit, order=order,
count=False, access_rights_uid=access_rights_uid)
if not ids and count:
return 0
elif not ids:
return ids
pid = self.env.user.partner_id.id
author_ids, partner_ids, allowed_ids = set([]), set([]), set([])
model_ids = {}
# check read access rights before checking the actual rules on the given ids
super(Message, self.with_user(access_rights_uid or self._uid)).check_access_rights('read')
self.flush(['model', 'res_id', 'author_id', 'message_type', 'partner_ids'])
self.env['mail.notification'].flush(['mail_message_id', 'res_partner_id'])
for sub_ids in self._cr.split_for_in_conditions(ids):
self._cr.execute("""
SELECT DISTINCT m.id, m.model, m.res_id, m.author_id, m.message_type,
COALESCE(partner_rel.res_partner_id, needaction_rel.res_partner_id)
FROM "%s" m
LEFT JOIN "mail_message_res_partner_rel" partner_rel
ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = %%(pid)s
LEFT JOIN "mail_notification" needaction_rel
ON needaction_rel.mail_message_id = m.id AND needaction_rel.res_partner_id = %%(pid)s
WHERE m.id = ANY (%%(ids)s)""" % self._table, dict(pid=pid, ids=list(sub_ids)))
for msg_id, rmod, rid, author_id, message_type, partner_id in self._cr.fetchall():
if author_id == pid:
author_ids.add(msg_id)
elif partner_id == pid:
partner_ids.add(msg_id)
elif rmod and rid and message_type != 'user_notification':
model_ids.setdefault(rmod, {}).setdefault(rid, set()).add(msg_id)
allowed_ids = self._find_allowed_doc_ids(model_ids)
final_ids = author_ids | partner_ids | allowed_ids
if count:
return len(final_ids)
else:
# re-construct a list based on ids, because set did not keep the original order
id_list = [id for id in ids if id in final_ids]
return id_list
@api.model
def _find_allowed_model_wise(self, doc_model, doc_dict):
doc_ids = list(doc_dict)
allowed_doc_ids = self.env[doc_model].with_context(active_test=False).search([('id', 'in', doc_ids)]).ids
return set([message_id for allowed_doc_id in allowed_doc_ids for message_id in doc_dict[allowed_doc_id]])
@api.model
def _find_allowed_doc_ids(self, model_ids):
IrModelAccess = self.env['ir.model.access']
allowed_ids = set()
for doc_model, doc_dict in model_ids.items():
if not IrModelAccess.check(doc_model, 'read', False):
continue
allowed_ids |= self._find_allowed_model_wise(doc_model, doc_dict)
return allowed_ids
def check_access_rule(self, operation):
""" Access rules of mail.message:
- read: if
- author_id == pid, uid is the author OR
- uid is in the recipients (partner_ids) OR
- uid has been notified (needaction) OR
- uid have read access to the related document if model, res_id
- otherwise: raise
- create: if
- no model, no res_id (private message) OR
- pid in message_follower_ids if model, res_id OR
- uid can read the parent OR
- uid have write or create access on the related document if model, res_id, OR
- otherwise: raise
- write: if
- author_id == pid, uid is the author, OR
- uid is in the recipients (partner_ids) OR
- uid has write or create access on the related document if model, res_id
- otherwise: raise
- unlink: if
- uid has write or create access on the related document
- otherwise: raise
Specific case: non employee users see only messages with subtype (aka do
not see internal logs).
"""
def _generate_model_record_ids(msg_val, msg_ids):
""" :param model_record_ids: {'model': {'res_id': (msg_id, msg_id)}, ... }
:param message_values: {'msg_id': {'model': .., 'res_id': .., 'author_id': ..}}
"""
model_record_ids = {}
for id in msg_ids:
vals = msg_val.get(id, {})
if vals.get('model') and vals.get('res_id'):
model_record_ids.setdefault(vals['model'], set()).add(vals['res_id'])
return model_record_ids
if self.env.is_superuser():
return
# Non employees see only messages with a subtype (aka, not internal logs)
if not self.env['res.users'].has_group('base.group_user'):
self._cr.execute('''SELECT DISTINCT message.id, message.subtype_id, subtype.internal
FROM "%s" AS message
LEFT JOIN "mail_message_subtype" as subtype
ON message.subtype_id = subtype.id
WHERE message.message_type = %%s AND
(message.is_internal IS TRUE OR message.subtype_id IS NULL OR subtype.internal IS TRUE) AND
message.id = ANY (%%s)''' % (self._table), ('comment', self.ids,))
if self._cr.fetchall():
raise AccessError(
_('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)', self._description, operation)
+ ' - ({} {}, {} {})'.format(_('Records:'), self.ids[:6], _('User:'), self._uid)
)
# Read mail_message.ids to have their values
message_values = dict((message_id, {}) for message_id in self.ids)
self.flush(['model', 'res_id', 'author_id', 'parent_id', 'message_type', 'partner_ids'])
self.env['mail.notification'].flush(['mail_message_id', 'res_partner_id'])
if operation == 'read':
self._cr.execute("""
SELECT DISTINCT m.id, m.model, m.res_id, m.author_id, m.parent_id,
COALESCE(partner_rel.res_partner_id, needaction_rel.res_partner_id),
m.message_type as message_type
FROM "%s" m
LEFT JOIN "mail_message_res_partner_rel" partner_rel
ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = %%(pid)s
LEFT JOIN "mail_notification" needaction_rel
ON needaction_rel.mail_message_id = m.id AND needaction_rel.res_partner_id = %%(pid)s
WHERE m.id = ANY (%%(ids)s)""" % self._table, dict(pid=self.env.user.partner_id.id, ids=self.ids))
for mid, rmod, rid, author_id, parent_id, partner_id, message_type in self._cr.fetchall():
message_values[mid] = {
'model': rmod,
'res_id': rid,
'author_id': author_id,
'parent_id': parent_id,
'notified': any((message_values[mid].get('notified'), partner_id)),
'message_type': message_type,
}
elif operation == 'write':
self._cr.execute("""
SELECT DISTINCT m.id, m.model, m.res_id, m.author_id, m.parent_id,
COALESCE(partner_rel.res_partner_id, needaction_rel.res_partner_id),
m.message_type as message_type
FROM "%s" m
LEFT JOIN "mail_message_res_partner_rel" partner_rel
ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = %%(pid)s
LEFT JOIN "mail_notification" needaction_rel
ON needaction_rel.mail_message_id = m.id AND needaction_rel.res_partner_id = %%(pid)s
WHERE m.id = ANY (%%(ids)s)""" % self._table, dict(pid=self.env.user.partner_id.id, uid=self.env.user.id, ids=self.ids))
for mid, rmod, rid, author_id, parent_id, partner_id, message_type in self._cr.fetchall():
message_values[mid] = {
'model': rmod,
'res_id': rid,
'author_id': author_id,
'parent_id': parent_id,
'notified': any((message_values[mid].get('notified'), partner_id)),
'message_type': message_type,
}
elif operation in ('create', 'unlink'):
self._cr.execute("""SELECT DISTINCT id, model, res_id, author_id, parent_id, message_type FROM "%s" WHERE id = ANY (%%s)""" % self._table, (self.ids,))
for mid, rmod, rid, author_id, parent_id, message_type in self._cr.fetchall():
message_values[mid] = {
'model': rmod,
'res_id': rid,
'author_id': author_id,
'parent_id': parent_id,
'message_type': message_type,
}
else:
raise ValueError(_('Wrong operation name (%s)', operation))
# Author condition (READ, WRITE, CREATE (private))
author_ids = []
if operation == 'read':
author_ids = [mid for mid, message in message_values.items()
if message.get('author_id') and message.get('author_id') == self.env.user.partner_id.id]
elif operation == 'write':
author_ids = [mid for mid, message in message_values.items() if message.get('author_id') == self.env.user.partner_id.id]
elif operation == 'create':
author_ids = [mid for mid, message in message_values.items()
if not self.is_thread_message(message)]
messages_to_check = self.ids
messages_to_check = set(messages_to_check).difference(set(author_ids))
if not messages_to_check:
return
# Recipients condition, for read and write (partner_ids)
# keep on top, usefull for systray notifications
notified_ids = []
model_record_ids = _generate_model_record_ids(message_values, messages_to_check)
if operation in ['read', 'write']:
notified_ids = [mid for mid, message in message_values.items() if message.get('notified')]
messages_to_check = set(messages_to_check).difference(set(notified_ids))
if not messages_to_check:
return
# CRUD: Access rights related to the document
document_related_ids = []
document_related_candidate_ids = [
mid for mid, message in message_values.items()
if (message.get('model') and message.get('res_id') and
message.get('message_type') != 'user_notification')
]
model_record_ids = _generate_model_record_ids(message_values, document_related_candidate_ids)
for model, doc_ids in model_record_ids.items():
DocumentModel = self.env[model]
if hasattr(DocumentModel, '_get_mail_message_access'):
check_operation = DocumentModel._get_mail_message_access(doc_ids, operation) ## why not giving model here?
else:
check_operation = self.env['mail.thread']._get_mail_message_access(doc_ids, operation, model_name=model)
records = DocumentModel.browse(doc_ids)
records.check_access_rights(check_operation)
mids = records.browse(doc_ids)._filter_access_rules(check_operation)
document_related_ids += [
mid for mid, message in message_values.items()
if (
message.get('model') == model and
message.get('res_id') in mids.ids and
message.get('message_type') != 'user_notification'
)
]
messages_to_check = messages_to_check.difference(set(document_related_ids))
if not messages_to_check:
return
# Parent condition, for create (check for received notifications for the created message parent)
notified_ids = []
if operation == 'create':
# TDE: probably clean me
parent_ids = [message.get('parent_id') for message in message_values.values()
if message.get('parent_id')]
self._cr.execute("""SELECT DISTINCT m.id, partner_rel.res_partner_id FROM "%s" m
LEFT JOIN "mail_message_res_partner_rel" partner_rel
ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = (%%s)
WHERE m.id = ANY (%%s)""" % self._table, (self.env.user.partner_id.id, parent_ids,))
not_parent_ids = [mid[0] for mid in self._cr.fetchall() if mid[1]]
notified_ids += [mid for mid, message in message_values.items()
if message.get('parent_id') in not_parent_ids]
messages_to_check = messages_to_check.difference(set(notified_ids))
if not messages_to_check:
return
# Recipients condition for create (message_follower_ids)
if operation == 'create':
for doc_model, doc_ids in model_record_ids.items():
followers = self.env['mail.followers'].sudo().search([
('res_model', '=', doc_model),
('res_id', 'in', list(doc_ids)),
('partner_id', '=', self.env.user.partner_id.id),
])
fol_mids = [follower.res_id for follower in followers]
notified_ids += [mid for mid, message in message_values.items()
if message.get('model') == doc_model and
message.get('res_id') in fol_mids and
message.get('message_type') != 'user_notification'
]
messages_to_check = messages_to_check.difference(set(notified_ids))
if not messages_to_check:
return
if not self.browse(messages_to_check).exists():
return
raise AccessError(
_('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)', self._description, operation)
+ ' - ({} {}, {} {})'.format(_('Records:'), list(messages_to_check)[:6], _('User:'), self._uid)
)
@api.model_create_multi
def create(self, values_list):
tracking_values_list = []
for values in values_list:
if 'email_from' not in values: # needed to compute reply_to
author_id, email_from = self.env['mail.thread']._message_compute_author(values.get('author_id'), email_from=None, raise_exception=False)
values['email_from'] = email_from
if not values.get('message_id'):
values['message_id'] = self._get_message_id(values)
if 'reply_to' not in values:
values['reply_to'] = self._get_reply_to(values)
if 'record_name' not in values and 'default_record_name' not in self.env.context:
values['record_name'] = self._get_record_name(values)
if 'attachment_ids' not in values:
values['attachment_ids'] = []
# extract base64 images
if 'body' in values:
Attachments = self.env['ir.attachment'].with_context(clean_context(self._context))
data_to_url = {}
def base64_to_boundary(match):
key = match.group(2)
if not data_to_url.get(key):
name = match.group(4) if match.group(4) else 'image%s' % len(data_to_url)
try:
attachment = Attachments.create({
'name': name,
'datas': match.group(2),
'res_model': values.get('model'),
'res_id': values.get('res_id'),
})
except binascii_error:
_logger.warning("Impossible to create an attachment out of badly formated base64 embedded image. Image has been removed.")
return match.group(3) # group(3) is the url ending single/double quote matched by the regexp
else:
attachment.generate_access_token()
values['attachment_ids'].append((4, attachment.id))
data_to_url[key] = ['/web/image/%s?access_token=%s' % (attachment.id, attachment.access_token), name]
return '%s%s alt="%s"' % (data_to_url[key][0], match.group(3), data_to_url[key][1])
values['body'] = _image_dataurl.sub(base64_to_boundary, tools.ustr(values['body']))
# delegate creation of tracking after the create as sudo to avoid access rights issues
tracking_values_list.append(values.pop('tracking_value_ids', False))
messages = super(Message, self).create(values_list)
check_attachment_access = []
if all(isinstance(command, int) or command[0] in (4, 6) for values in values_list for command in values.get('attachment_ids')):
for values in values_list:
for command in values.get('attachment_ids'):
if isinstance(command, int):
check_attachment_access += [command]
elif command[0] == 6:
check_attachment_access += command[2]
else: # command[0] == 4:
check_attachment_access += [command[1]]
else:
check_attachment_access = messages.mapped('attachment_ids').ids # fallback on read if any unknow command
if check_attachment_access:
self.env['ir.attachment'].browse(check_attachment_access).check(mode='read')
for message, values, tracking_values_cmd in zip(messages, values_list, tracking_values_list):
if tracking_values_cmd:
vals_lst = [dict(cmd[2], mail_message_id=message.id) for cmd in tracking_values_cmd if len(cmd) == 3 and cmd[0] == 0]
other_cmd = [cmd for cmd in tracking_values_cmd if len(cmd) != 3 or cmd[0] != 0]
if vals_lst:
self.env['mail.tracking.value'].sudo().create(vals_lst)
if other_cmd:
message.sudo().write({'tracking_value_ids': tracking_values_cmd})
if message.is_thread_message(values):
message._invalidate_documents(values.get('model'), values.get('res_id'))
return messages
def read(self, fields=None, load='_classic_read'):
""" Override to explicitely call check_access_rule, that is not called
by the ORM. It instead directly fetches ir.rules and apply them. """
self.check_access_rule('read')
return super(Message, self).read(fields=fields, load=load)
def write(self, vals):
record_changed = 'model' in vals or 'res_id' in vals
if record_changed or 'message_type' in vals:
self._invalidate_documents()
res = super(Message, self).write(vals)
if vals.get('attachment_ids'):
for mail in self:
mail.attachment_ids.check(mode='read')
if 'notification_ids' in vals or record_changed:
self._invalidate_documents()
return res
def unlink(self):
# cascade-delete attachments that are directly attached to the message (should only happen
# for mail.messages that act as parent for a standalone mail.mail record).
if not self:
return True
self.check_access_rule('unlink')
self.mapped('attachment_ids').filtered(
lambda attach: attach.res_model == self._name and (attach.res_id in self.ids or attach.res_id == 0)
).unlink()
for elem in self:
if elem.is_thread_message():
elem._invalidate_documents()
return super(Message, self).unlink()
@api.model
def _read_group_raw(self, domain, fields, groupby, offset=0, limit=None, orderby=False, lazy=True):
if not self.env.is_admin():
raise AccessError(_("Only administrators are allowed to use grouped read on message model"))
return super(Message, self)._read_group_raw(
domain=domain, fields=fields, groupby=groupby, offset=offset,
limit=limit, orderby=orderby, lazy=lazy,
)
def export_data(self, fields_to_export):
if not self.env.is_admin():
raise AccessError(_("Only administrators are allowed to export mail message"))
return super(Message, self).export_data(fields_to_export)
def _update_content(self, body, attachment_ids):
self.ensure_one()
thread = self.env[self.model].browse(self.res_id)
thread._check_can_update_message_content(self)
self.body = body
if not attachment_ids:
self.attachment_ids._delete_and_notify()
else:
message_values = {
'model': self.model,
'body': body,
'res_id': self.res_id,
}
attachement_values = thread._message_post_process_attachments([], attachment_ids, message_values)
self.update(attachement_values)
# Cleanup related message data if the message is empty
self.sudo()._filter_empty()._cleanup_side_records()
thread._message_update_content_after_hook(self)
def action_open_document(self):
""" Opens the related record based on the model and ID """
self.ensure_one()
return {
'res_id': self.res_id,
'res_model': self.model,
'target': 'current',
'type': 'ir.actions.act_window',
'view_mode': 'form',
}
# ------------------------------------------------------
# DISCUSS API
# ------------------------------------------------------
@api.model
def mark_all_as_read(self, domain=None):
# not really efficient method: it does one db request for the
# search, and one for each message in the result set is_read to True in the
# current notifications from the relation.
notif_domain = [
('res_partner_id', '=', self.env.user.partner_id.id),
('is_read', '=', False)]
if domain:
messages = self.search(domain)
messages.set_message_done()
return messages.ids
notifications = self.env['mail.notification'].sudo().search(notif_domain)
notifications.write({'is_read': True})
ids = [n['mail_message_id'] for n in notifications.read(['mail_message_id'])]
self.env['bus.bus']._sendone(self.env.user.partner_id, 'mail.message/mark_as_read', {
'message_ids': [id[0] for id in ids],
'needaction_inbox_counter': self.env.user.partner_id._get_needaction_count(),
})
return ids
def set_message_done(self):
""" Remove the needaction from messages for the current partner. """
partner_id = self.env.user.partner_id
notifications = self.env['mail.notification'].sudo().search([
('mail_message_id', 'in', self.ids),
('res_partner_id', '=', partner_id.id),
('is_read', '=', False)])
if not notifications:
return
notifications.write({'is_read': True})
# notifies changes in messages through the bus.
self.env['bus.bus']._sendone(partner_id, 'mail.message/mark_as_read', {
'message_ids': notifications.mail_message_id.ids,
'needaction_inbox_counter': self.env.user.partner_id._get_needaction_count(),
})
@api.model
def unstar_all(self):
""" Unstar messages for the current partner. """
partner_id = self.env.user.partner_id.id
starred_messages = self.search([('starred_partner_ids', 'in', partner_id)])
starred_messages.write({'starred_partner_ids': [Command.unlink(partner_id)]})
ids = [m.id for m in starred_messages]
self.env['bus.bus']._sendone(self.env.user.partner_id, 'mail.message/toggle_star', {
'message_ids': ids,
'starred': False,
})
def toggle_message_starred(self):
""" Toggle messages as (un)starred. Technically, the notifications related
to uid are set to (un)starred.
"""
# a user should always be able to star a message he can read
self.check_access_rule('read')
starred = not self.starred
if starred:
self.sudo().write({'starred_partner_ids': [Command.link(self.env.user.partner_id.id)]})
else:
self.sudo().write({'starred_partner_ids': [Command.unlink(self.env.user.partner_id.id)]})
self.env['bus.bus']._sendone(self.env.user.partner_id, 'mail.message/toggle_star', {
'message_ids': [self.id],
'starred': starred,
})
def _message_add_reaction(self, content):
self.ensure_one()
self.check_access_rule('write')
self.check_access_rights('write')
guest = self.env['mail.guest']._get_guest_from_context()
if self.env.user._is_public() and guest:
partner = self.env['res.partner']
else:
guest = self.env['mail.guest']
partner = self.env.user.partner_id
reaction = self.env['mail.message.reaction'].sudo().search([('message_id', '=', self.id), ('partner_id', '=', partner.id), ('guest_id', '=', guest.id), ('content', '=', content)])
if not reaction:
reaction = self.env['mail.message.reaction'].sudo().create({
'message_id': self.id,
'content': content,
'partner_id': partner.id,
'guest_id': guest.id,
})
self.env[self.model].browse(self.res_id)._message_add_reaction_after_hook(message=self, content=reaction.content)
def _message_remove_reaction(self, content):
self.ensure_one()
self.check_access_rule('write')
self.check_access_rights('write')
guest = self.env['mail.guest']._get_guest_from_context()
if self.env.user._is_public() and guest:
partner = self.env['res.partner']
else:
guest = self.env['mail.guest']
partner = self.env.user.partner_id
reaction = self.env['mail.message.reaction'].sudo().search([('message_id', '=', self.id), ('partner_id', '=', partner.id), ('guest_id', '=', guest.id), ('content', '=', content)])
reaction.unlink()
self.env[self.model].browse(self.res_id)._message_remove_reaction_after_hook(message=self, content=content)
# ------------------------------------------------------
# MESSAGE READ / FETCH / FAILURE API
# ------------------------------------------------------
def _message_format(self, fnames, format_reply=True):
"""Reads values from messages and formats them for the web client."""
self.check_access_rule('read')
vals_list = self._read_format(fnames)
thread_ids_by_model_name = defaultdict(set)
for message in self:
if message.model and message.res_id:
thread_ids_by_model_name[message.model].add(message.res_id)
for vals in vals_list:
message_sudo = self.browse(vals['id']).sudo().with_prefetch(self.ids)
# Author
if message_sudo.author_id:
author = (message_sudo.author_id.id, message_sudo.author_id.display_name)
else:
author = (0, message_sudo.email_from)
# Tracking values
tracking_value_ids = []
for tracking in message_sudo.tracking_value_ids:
groups = tracking.field_groups
if not groups or self.env.is_superuser() or self.user_has_groups(groups):
tracking_value_ids.append({
'id': tracking.id,
'changed_field': tracking.field_desc,
'old_value': tracking.get_old_display_value()[0],
'new_value': tracking.get_new_display_value()[0],
'field_type': tracking.field_type,
'currency_id': tracking.currency_id.id,
})
if message_sudo.model and message_sudo.res_id:
record_name = self.env[message_sudo.model] \
.browse(message_sudo.res_id) \
.sudo() \
.with_prefetch(thread_ids_by_model_name[message_sudo.model]) \
.display_name
else:
record_name = False
if message_sudo.author_guest_id:
vals['guestAuthor'] = [('insert', {
'id': message_sudo.author_guest_id.id,
'name': message_sudo.author_guest_id.name,
})]
else:
vals['author_id'] = author
reactions_per_content = defaultdict(self.env['mail.message.reaction'].sudo().browse)
for reaction in message_sudo.reaction_ids:
reactions_per_content[reaction.content] |= reaction
reaction_groups = [('insert-and-replace', [{
'messageId': message_sudo.id,
'content': content,
'count': len(reactions),
'partners': [('insert-and-replace', [{'id': partner.id, 'name': partner.name} for partner in reactions.partner_id])],
'guests': [('insert-and-replace', [{'id': guest.id, 'name': guest.name} for guest in reactions.guest_id])],
} for content, reactions in reactions_per_content.items()])]
if format_reply and message_sudo.model == 'mail.channel' and message_sudo.parent_id:
vals['parentMessage'] = message_sudo.parent_id.message_format(format_reply=False)[0]
vals.update({
'notifications': message_sudo.notification_ids._filtered_for_web_client()._notification_format(),
'attachment_ids': message_sudo.attachment_ids._attachment_format(),
'tracking_value_ids': tracking_value_ids,
'messageReactionGroups': reaction_groups,
'record_name': record_name,
})
return vals_list
@api.model
def _message_fetch(self, domain, max_id=None, min_id=None, limit=30):
""" Get a limited amount of formatted messages with provided domain.
:param domain: the domain to filter messages;
:param min_id: messages must be more recent than this id
:param max_id: message must be less recent than this id
:param limit: the maximum amount of messages to get;
:returns list(dict).
"""
if max_id:
domain = expression.AND([domain, [('id', '<', max_id)]])
if min_id:
domain = expression.AND([domain, [('id', '>', min_id)]])
return self.search(domain, limit=limit).message_format()
def message_format(self, format_reply=True):
""" Get the message values in the format for web client. Since message values can be broadcasted,
computed fields MUST NOT BE READ and broadcasted.
:returns list(dict).
Example :
{
'body': HTML content of the message
'model': u'res.partner',
'record_name': u'Agrolait',
'attachment_ids': [
{
'file_type_icon': u'webimage',
'id': 45,
'name': u'sample.png',
'filename': u'sample.png'
}
],
'needaction_partner_ids': [], # list of partner ids
'res_id': 7,
'tracking_value_ids': [
{
'old_value': "",
'changed_field': "Customer",
'id': 2965,
'new_value': "Axelor"
}
],
'author_id': (3, u'Administrator'),
'email_from': '[email protected]' # email address or False
'subtype_id': (1, u'Discussions'),
'date': '2015-06-30 08:22:33',
'partner_ids': [[7, "Sacha Du Bourg-Palette"]], # list of partner name_get
'message_type': u'comment',
'id': 59,
'subject': False
'is_note': True # only if the message is a note (subtype == note)
'is_discussion': False # only if the message is a discussion (subtype == discussion)
'is_notification': False # only if the message is a note but is a notification aka not linked to a document like assignation
'parentMessage': {...}, # formatted message that this message is a reply to. Only present if format_reply is True
}
"""
vals_list = self._message_format(self._get_message_format_fields(), format_reply=format_reply)
com_id = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_comment')
note_id = self.env['ir.model.data']._xmlid_to_res_id('mail.mt_note')
for vals in vals_list:
message_sudo = self.browse(vals['id']).sudo().with_prefetch(self.ids)
notifs = message_sudo.notification_ids.filtered(lambda n: n.res_partner_id)
vals.update({
'needaction_partner_ids': notifs.filtered(lambda n: not n.is_read).res_partner_id.ids,
'history_partner_ids': notifs.filtered(lambda n: n.is_read).res_partner_id.ids,
'is_note': message_sudo.subtype_id.id == note_id,
'is_discussion': message_sudo.subtype_id.id == com_id,
'subtype_description': message_sudo.subtype_id.description,
'is_notification': vals['message_type'] == 'user_notification',
'recipients': [{'id': p.id, 'name': p.name} for p in message_sudo.partner_ids],
})
if vals['model'] and self.env[vals['model']]._original_module:
vals['module_icon'] = modules.module.get_module_icon(self.env[vals['model']]._original_module)
return vals_list
def _get_message_format_fields(self):
return [
'id', 'body', 'date', 'author_id', 'email_from', # base message fields
'message_type', 'subtype_id', 'subject', # message specific
'model', 'res_id', 'record_name', # document related
'partner_ids', # recipients
'starred_partner_ids', # list of partner ids for whom the message is starred
]
def _message_notification_format(self):
"""Returns the current messages and their corresponding notifications in
the format expected by the web client.
Notifications hold the information about each recipient of a message: if
the message was successfully sent or if an exception or bounce occurred.
"""
return [{
'id': message.id,
'res_id': message.res_id,
'model': message.model,
'res_model_name': message.env['ir.model']._get(message.model).display_name,
'date': message.date,
'message_type': message.message_type,
'notifications': message.notification_ids._filtered_for_web_client()._notification_format(),
} for message in self]
def _notify_message_notification_update(self):
"""Send bus notifications to update status of notifications in the web
client. Purpose is to send the updated status per author."""
messages = self.env['mail.message']
for message in self:
# Check if user has access to the record before displaying a notification about it.
# In case the user switches from one company to another, it might happen that he doesn't
# have access to the record related to the notification. In this case, we skip it.
# YTI FIXME: check allowed_company_ids if necessary
if message.model and message.res_id:
record = self.env[message.model].browse(message.res_id)
try:
record.check_access_rights('read')
record.check_access_rule('read')
except AccessError:
continue
else:
messages |= message
messages_per_partner = defaultdict(lambda: self.env['mail.message'])
for message in messages:
if not self.env.user._is_public():
messages_per_partner[self.env.user.partner_id] |= message
if message.author_id and not any(user._is_public() for user in message.author_id.with_context(active_test=False).user_ids):
messages_per_partner[message.author_id] |= message
updates = [
(partner, 'mail.message/notification_update', {'elements': messages._message_notification_format()})
for partner, messages in messages_per_partner.items()
]
self.env['bus.bus']._sendmany(updates)
# ------------------------------------------------------
# TOOLS
# ------------------------------------------------------
def _cleanup_side_records(self):
""" Clean related data: notifications, stars, ... to avoid lingering
notifications / unreachable counters with void messages notably. """
self.write({
'starred_partner_ids': [(5, 0, 0)],
'notification_ids': [(5, 0, 0)],
})
def _filter_empty(self):
""" Return subset of "void" messages """
return self.filtered(
lambda msg:
(not msg.body or tools.is_html_empty(msg.body)) and
(not msg.subtype_id or not msg.subtype_id.description) and
not msg.attachment_ids and
not msg.tracking_value_ids
)
@api.model
def _get_record_name(self, values):
""" Return the related document name, using name_get. It is done using
SUPERUSER_ID, to be sure to have the record name correctly stored. """
model = values.get('model', self.env.context.get('default_model'))
res_id = values.get('res_id', self.env.context.get('default_res_id'))
if not model or not res_id or model not in self.env:
return False
return self.env[model].sudo().browse(res_id).display_name
@api.model
def _get_reply_to(self, values):
""" Return a specific reply_to for the document """
model = values.get('model', self._context.get('default_model'))
res_id = values.get('res_id', self._context.get('default_res_id')) or False
email_from = values.get('email_from')
message_type = values.get('message_type')
records = None
if self.is_thread_message({'model': model, 'res_id': res_id, 'message_type': message_type}):
records = self.env[model].browse([res_id])
else:
records = self.env[model] if model else self.env['mail.thread']
return records._notify_get_reply_to(default=email_from)[res_id]
@api.model
def _get_message_id(self, values):
if values.get('reply_to_force_new', False) is True:
message_id = tools.generate_tracking_message_id('reply_to')
elif self.is_thread_message(values):
message_id = tools.generate_tracking_message_id('%(res_id)s-%(model)s' % values)
else:
message_id = tools.generate_tracking_message_id('private')
return message_id
def is_thread_message(self, vals=None):
if vals:
res_id = vals.get('res_id')
model = vals.get('model')
message_type = vals.get('message_type')
else:
self.ensure_one()
res_id = self.res_id
model = self.model
message_type = self.message_type
return res_id and model and message_type != 'user_notification'
def _invalidate_documents(self, model=None, res_id=None):
""" Invalidate the cache of the documents followed by ``self``. """
for record in self:
model = model or record.model
res_id = res_id or record.res_id
if model and issubclass(self.pool[model], self.pool['mail.thread']):
self.env[model].invalidate_cache(fnames=[
'message_ids',
'message_unread',
'message_unread_counter',
'message_needaction',
'message_needaction_counter',
], ids=[res_id])
def _get_search_domain_share(self):
return ['&', '&', ('is_internal', '=', False), ('subtype_id', '!=', False), ('subtype_id.internal', '=', False)]
| 50.299191 | 55,983 |
11,736 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import _, api, fields, models, tools
from odoo.addons.bus.models.bus_presence import AWAY_TIMER
from odoo.addons.bus.models.bus_presence import DISCONNECTION_TIMER
from odoo.osv import expression
class Partner(models.Model):
""" Update partner to add a field about notification preferences. Add a generic opt-out field that can be used
to restrict usage of automatic email templates. """
_name = "res.partner"
_inherit = ['res.partner', 'mail.activity.mixin', 'mail.thread.blacklist']
_mail_flat_thread = False
# override to add and order tracking
email = fields.Char(tracking=1)
phone = fields.Char(tracking=2)
parent_id = fields.Many2one(tracking=3)
user_id = fields.Many2one(tracking=4)
vat = fields.Char(tracking=5)
# channels
channel_ids = fields.Many2many('mail.channel', 'mail_channel_partner', 'partner_id', 'channel_id', string='Channels', copy=False)
def _compute_im_status(self):
super()._compute_im_status()
odoobot_id = self.env['ir.model.data']._xmlid_to_res_id('base.partner_root')
odoobot = self.env['res.partner'].browse(odoobot_id)
if odoobot in self:
odoobot.im_status = 'bot'
# pseudo computes
def _get_needaction_count(self):
""" compute the number of needaction of the current partner """
self.ensure_one()
self.env['mail.notification'].flush(['is_read', 'res_partner_id'])
self.env.cr.execute("""
SELECT count(*) as needaction_count
FROM mail_notification R
WHERE R.res_partner_id = %s AND (R.is_read = false OR R.is_read IS NULL)""", (self.id,))
return self.env.cr.dictfetchall()[0].get('needaction_count')
def _get_starred_count(self):
""" compute the number of starred of the current partner """
self.ensure_one()
self.env.cr.execute("""
SELECT count(*) as starred_count
FROM mail_message_res_partner_starred_rel R
WHERE R.res_partner_id = %s """, (self.id,))
return self.env.cr.dictfetchall()[0].get('starred_count')
# ------------------------------------------------------------
# MESSAGING
# ------------------------------------------------------------
def _message_get_suggested_recipients(self):
recipients = super(Partner, self)._message_get_suggested_recipients()
for partner in self:
partner._message_add_suggested_recipient(recipients, partner=partner, reason=_('Partner Profile'))
return recipients
def _message_get_default_recipients(self):
return {
r.id:
{'partner_ids': [r.id],
'email_to': False,
'email_cc': False
}
for r in self
}
# ------------------------------------------------------------
# ORM
# ------------------------------------------------------------
@api.model
@api.returns('self', lambda value: value.id)
def find_or_create(self, email, assert_valid_email=False):
""" Override to use the email_normalized field. """
if not email:
raise ValueError(_('An email is required for find_or_create to work'))
parsed_name, parsed_email = self._parse_partner_name(email)
if not parsed_email and assert_valid_email:
raise ValueError(_('%(email)s is not recognized as a valid email. This is required to create a new customer.'))
if parsed_email:
email_normalized = tools.email_normalize(parsed_email)
if email_normalized:
partners = self.search([('email_normalized', '=', email_normalized)], limit=1)
if partners:
return partners
# We don't want to call `super()` to avoid searching twice on the email
# Especially when the search `email =ilike` cannot be as efficient as
# a search on email_normalized with a btree index
# If you want to override `find_or_create()` your module should depend on `mail`
create_values = {self._rec_name: parsed_name or parsed_email}
if parsed_email: # otherwise keep default_email in context
create_values['email'] = parsed_email
return self.create(create_values)
# ------------------------------------------------------------
# DISCUSS
# ------------------------------------------------------------
def mail_partner_format(self):
partners_format = dict()
for partner in self:
internal_users = partner.user_ids - partner.user_ids.filtered('share')
main_user = internal_users[0] if len(internal_users) > 0 else partner.user_ids[0] if len(partner.user_ids) > 0 else self.env['res.users']
partners_format[partner] = {
"id": partner.id,
"display_name": partner.display_name,
"name": partner.name,
"email": partner.email,
"active": partner.active,
"im_status": partner.im_status,
"user_id": main_user.id,
"is_internal_user": not partner.partner_share,
}
if not self.env.user._is_internal():
partners_format[partner].pop('email')
return partners_format
def _message_fetch_failed(self):
"""Returns first 100 messages, sent by the current partner, that have errors, in
the format expected by the web client."""
self.ensure_one()
messages = self.env['mail.message'].search([
('has_error', '=', True),
('author_id', '=', self.id),
('res_id', '!=', 0),
('model', '!=', False),
('message_type', '!=', 'user_notification')
], limit=100)
return messages._message_notification_format()
def _get_channels_as_member(self):
"""Returns the channels of the partner."""
self.ensure_one()
channels = self.env['mail.channel']
# get the channels and groups
channels |= self.env['mail.channel'].search([
('channel_type', 'in', ('channel', 'group')),
('channel_partner_ids', 'in', [self.id]),
])
# get the pinned direct messages
channels |= self.env['mail.channel'].search([
('channel_type', '=', 'chat'),
('channel_last_seen_partner_ids', 'in', self.env['mail.channel.partner'].sudo()._search([
('partner_id', '=', self.id),
('is_pinned', '=', True),
])),
])
return channels
@api.model
def search_for_channel_invite(self, search_term, channel_id=None, limit=30):
""" Returns partners matching search_term that can be invited to a channel.
If the channel_id is specified, only partners that can actually be invited to the channel
are returned (not already members, and in accordance to the channel configuration).
"""
domain = expression.AND([
expression.OR([
[('name', 'ilike', search_term)],
[('email', 'ilike', search_term)],
]),
[('active', '=', True)],
[('type', '!=', 'private')],
[('user_ids', '!=', False)],
[('user_ids.active', '=', True)],
[('user_ids.share', '=', False)],
])
if channel_id:
channel = self.env['mail.channel'].search([('id', '=', int(channel_id))])
domain = expression.AND([domain, [('channel_ids', 'not in', channel.id)]])
if channel.public == 'groups':
domain = expression.AND([domain, [('user_ids.groups_id', 'in', channel.group_public_id.id)]])
query = self.env['res.partner']._search(domain, order='name, id')
query.order = 'LOWER("res_partner"."name"), "res_partner"."id"' # bypass lack of support for case insensitive order in search()
query.limit = int(limit)
return {
'count': self.env['res.partner'].search_count(domain),
'partners': list(self.env['res.partner'].browse(query).mail_partner_format().values()),
}
@api.model
def get_mention_suggestions(self, search, limit=8, channel_id=None):
""" Return 'limit'-first partners' such that the name or email matches a 'search' string.
Prioritize partners that are also (internal) users, and then extend the research to all partners.
If channel_id is given, only members of this channel are returned.
The return format is a list of partner data (as per returned by `mail_partner_format()`).
"""
search_dom = expression.OR([[('name', 'ilike', search)], [('email', 'ilike', search)]])
search_dom = expression.AND([[('active', '=', True), ('type', '!=', 'private')], search_dom])
if channel_id:
search_dom = expression.AND([[('channel_ids', 'in', channel_id)], search_dom])
domain_is_user = expression.AND([[('user_ids.id', '!=', False), ('user_ids.active', '=', True)], search_dom])
priority_conditions = [
expression.AND([domain_is_user, [('partner_share', '=', False)]]), # Search partners that are internal users
domain_is_user, # Search partners that are users
search_dom, # Search partners that are not users
]
partners = self.env['res.partner']
for domain in priority_conditions:
remaining_limit = limit - len(partners)
if remaining_limit <= 0:
break
partners |= self.search(expression.AND([[('id', 'not in', partners.ids)], domain]), limit=remaining_limit)
return list(partners.mail_partner_format().values())
@api.model
def im_search(self, name, limit=20):
""" Search partner with a name and return its id, name and im_status.
Note : the user must be logged
:param name : the partner name to search
:param limit : the limit of result to return
"""
# This method is supposed to be used only in the context of channel creation or
# extension via an invite. As both of these actions require the 'create' access
# right, we check this specific ACL.
if self.env['mail.channel'].check_access_rights('create', raise_exception=False):
name = '%' + name + '%'
excluded_partner_ids = [self.env.user.partner_id.id]
self.env.cr.execute("""
SELECT
U.id as user_id,
P.id as id,
P.name as name,
P.email as email,
CASE WHEN B.last_poll IS NULL THEN 'offline'
WHEN age(now() AT TIME ZONE 'UTC', B.last_poll) > interval %s THEN 'offline'
WHEN age(now() AT TIME ZONE 'UTC', B.last_presence) > interval %s THEN 'away'
ELSE 'online'
END as im_status
FROM res_users U
JOIN res_partner P ON P.id = U.partner_id
LEFT JOIN bus_presence B ON B.user_id = U.id
WHERE P.name ILIKE %s
AND P.id NOT IN %s
AND U.active = 't'
AND U.share IS NOT TRUE
ORDER BY P.name ASC, P.id ASC
LIMIT %s
""", ("%s seconds" % DISCONNECTION_TIMER, "%s seconds" % AWAY_TIMER, name, tuple(excluded_partner_ids), limit))
return self.env.cr.dictfetchall()
else:
return {}
| 46.944 | 11,736 |
5,948 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, tools, _
from odoo.exceptions import AccessError, UserError
class MailBlackListMixin(models.AbstractModel):
""" Mixin that is inherited by all model with opt out. This mixin stores a normalized
email based on primary_email field.
A normalized email is considered as :
- having a left part + @ + a right part (the domain can be without '.something')
- being lower case
- having no name before the address. Typically, having no 'Name <>'
Ex:
- Formatted Email : 'Name <[email protected]>'
- Normalized Email : '[email protected]'
The primary email field can be specified on the parent model, if it differs from the default one ('email')
The email_normalized field can than be used on that model to search quickly on emails (by simple comparison
and not using time consuming regex anymore).
Using this email_normalized field, blacklist status is computed.
Mail Thread capabilities are required for this mixin. """
_name = 'mail.thread.blacklist'
_inherit = ['mail.thread']
_description = 'Mail Blacklist mixin'
_primary_email = 'email'
email_normalized = fields.Char(
string='Normalized Email', compute="_compute_email_normalized", compute_sudo=True,
store=True, invisible=True,
help="This field is used to search on email address as the primary email field can contain more than strictly an email address.")
# Note : is_blacklisted sould only be used for display. As the compute is not depending on the blacklist,
# once read, it won't be re-computed again if the blacklist is modified in the same request.
is_blacklisted = fields.Boolean(
string='Blacklist', compute="_compute_is_blacklisted", compute_sudo=True, store=False,
search="_search_is_blacklisted", groups="base.group_user",
help="If the email address is on the blacklist, the contact won't receive mass mailing anymore, from any list")
# messaging
message_bounce = fields.Integer('Bounce', help="Counter of the number of bounced emails for this contact", default=0)
@api.depends(lambda self: [self._primary_email])
def _compute_email_normalized(self):
self._assert_primary_email()
for record in self:
record.email_normalized = tools.email_normalize(record[self._primary_email])
@api.model
def _search_is_blacklisted(self, operator, value):
# Assumes operator is '=' or '!=' and value is True or False
self.flush(['email_normalized'])
self.env['mail.blacklist'].flush(['email', 'active'])
self._assert_primary_email()
if operator != '=':
if operator == '!=' and isinstance(value, bool):
value = not value
else:
raise NotImplementedError()
if value:
query = """
SELECT m.id
FROM mail_blacklist bl
JOIN %s m
ON m.email_normalized = bl.email AND bl.active
"""
else:
query = """
SELECT m.id
FROM %s m
LEFT JOIN mail_blacklist bl
ON m.email_normalized = bl.email AND bl.active
WHERE bl.id IS NULL
"""
self._cr.execute(query % self._table)
res = self._cr.fetchall()
if not res:
return [(0, '=', 1)]
return [('id', 'in', [r[0] for r in res])]
@api.depends('email_normalized')
def _compute_is_blacklisted(self):
# TODO : Should remove the sudo as compute_sudo defined on methods.
# But if user doesn't have access to mail.blacklist, doen't work without sudo().
blacklist = set(self.env['mail.blacklist'].sudo().search([
('email', 'in', self.mapped('email_normalized'))]).mapped('email'))
for record in self:
record.is_blacklisted = record.email_normalized in blacklist
def _assert_primary_email(self):
if not hasattr(self, "_primary_email") or not isinstance(self._primary_email, str):
raise UserError(_('Invalid primary email field on model %s', self._name))
if self._primary_email not in self._fields or self._fields[self._primary_email].type != 'char':
raise UserError(_('Invalid primary email field on model %s', self._name))
def _message_receive_bounce(self, email, partner):
""" Override of mail.thread generic method. Purpose is to increment the
bounce counter of the record. """
super(MailBlackListMixin, self)._message_receive_bounce(email, partner)
for record in self:
record.message_bounce = record.message_bounce + 1
def _message_reset_bounce(self, email):
""" Override of mail.thread generic method. Purpose is to reset the
bounce counter of the record. """
super(MailBlackListMixin, self)._message_reset_bounce(email)
self.write({'message_bounce': 0})
def mail_action_blacklist_remove(self):
# wizard access rights currently not working as expected and allows users without access to
# open this wizard, therefore we check to make sure they have access before the wizard opens.
can_access = self.env['mail.blacklist'].check_access_rights('write', raise_exception=False)
if can_access:
return {
'name': _('Are you sure you want to unblacklist this Email Address?'),
'type': 'ir.actions.act_window',
'view_mode': 'form',
'res_model': 'mail.blacklist.remove',
'target': 'new',
}
else:
raise AccessError(_("You do not have the access right to unblacklist emails. Please contact your administrator."))
| 47.206349 | 5,948 |
66,253 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import logging
from collections import defaultdict
from hashlib import sha512
from secrets import choice
from odoo import _, api, fields, models, tools, Command
from odoo.addons.base.models.avatar_mixin import get_hsl_from_seed
from odoo.exceptions import UserError, ValidationError
from odoo.osv import expression
from odoo.tools import html_escape
from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT
_logger = logging.getLogger(__name__)
channel_avatar = '''<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 530.06 530.06">
<circle cx="265.03" cy="265.03" r="265.03" fill="#875a7b"/>
<path d="M416.74,217.29l5-28a8.4,8.4,0,0,0-8.27-9.88H361.09l10.24-57.34a8.4,8.4,0,0,0-8.27-9.88H334.61a8.4,8.4,0,0,0-8.27,6.93L315.57,179.4H246.5l10.24-57.34a8.4,8.4,0,0,0-8.27-9.88H220a8.4,8.4,0,0,0-8.27,6.93L201,179.4H145.6a8.42,8.42,0,0,0-8.28,6.93l-5,28a8.4,8.4,0,0,0,8.27,9.88H193l-16,89.62H121.59a8.4,8.4,0,0,0-8.27,6.93l-5,28a8.4,8.4,0,0,0,8.27,9.88H169L158.73,416a8.4,8.4,0,0,0,8.27,9.88h28.45a8.42,8.42,0,0,0,8.28-6.93l10.76-60.29h69.07L273.32,416a8.4,8.4,0,0,0,8.27,9.88H310a8.4,8.4,0,0,0,8.27-6.93l10.77-60.29h55.38a8.41,8.41,0,0,0,8.28-6.93l5-28a8.4,8.4,0,0,0-8.27-9.88H337.08l16-89.62h55.38A8.4,8.4,0,0,0,416.74,217.29ZM291.56,313.84H222.5l16-89.62h69.07Z" fill="#ffffff"/>
</svg>'''
group_avatar = '''<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 530.06 530.06">
<circle cx="265.03" cy="265.03" r="265.03" fill="#875a7b"/>
<path d="m184.356059,265.030004c-23.740561,0.73266 -43.157922,10.11172 -58.252302,28.136961l-29.455881,0c-12.0169,0 -22.128621,-2.96757 -30.335161,-8.90271s-12.309921,-14.618031 -12.309921,-26.048671c0,-51.730902 9.08582,-77.596463 27.257681,-77.596463c0.87928,0 4.06667,1.53874 9.56217,4.61622s12.639651,6.19167 21.432451,9.34235s17.512401,4.72613 26.158581,4.72613c9.8187,0 19.563981,-1.68536 29.236061,-5.05586c-0.73266,5.4223 -1.0991,10.25834 -1.0991,14.508121c0,20.370061 5.93514,39.127962 17.805421,56.273922zm235.42723,140.025346c0,17.585601 -5.34888,31.470971 -16.046861,41.655892s-24.912861,15.277491 -42.645082,15.277491l-192.122688,0c-17.732221,0 -31.947101,-5.09257 -42.645082,-15.277491s-16.046861,-24.070291 -16.046861,-41.655892c0,-7.7669 0.25653,-15.350691 0.76937,-22.751371s1.53874,-15.387401 3.07748,-23.960381s3.48041,-16.523211 5.82523,-23.850471s5.4955,-14.471411 9.45226,-21.432451s8.49978,-12.89618 13.628841,-17.805421c5.12906,-4.90924 11.393931,-8.82951 18.794611,-11.76037s15.570511,-4.3964 24.509931,-4.3964c1.46554,0 4.61622,1.57545 9.45226,4.72613s10.18492,6.6678 16.046861,10.55136c5.86194,3.88356 13.702041,7.40068 23.520741,10.55136s19.710601,4.72613 29.675701,4.72613s19.857001,-1.57545 29.675701,-4.72613s17.658801,-6.6678 23.520741,-10.55136c5.86194,-3.88356 11.21082,-7.40068 16.046861,-10.55136s7.98672,-4.72613 9.45226,-4.72613c8.93942,0 17.109251,1.46554 24.509931,4.3964s13.665551,6.85113 18.794611,11.76037c5.12906,4.90924 9.67208,10.844381 13.628841,17.805421s7.10744,14.105191 9.45226,21.432451s4.28649,15.277491 5.82523,23.850471s2.56464,16.559701 3.07748,23.960381s0.76937,14.984471 0.76937,22.751371zm-225.095689,-280.710152c0,15.534021 -5.4955,28.796421 -16.486501,39.787422s-24.253401,16.486501 -39.787422,16.486501s-28.796421,-5.4955 -39.787422,-16.486501s-16.486501,-24.253401 -16.486501,-39.787422s5.4955,-28.796421 16.486501,-39.787422s24.253401,-16.486501 39.787422,-16.486501s28.796421,5.4955 39.787422,16.486501s16.486501,24.253401 16.486501,39.787422zm154.753287,84.410884c0,23.300921 -8.24325,43.194632 -24.729751,59.681133s-36.380212,24.729751 -59.681133,24.729751s-43.194632,-8.24325 -59.681133,-24.729751s-24.729751,-36.380212 -24.729751,-59.681133s8.24325,-43.194632 24.729751,-59.681133s36.380212,-24.729751 59.681133,-24.729751s43.194632,8.24325 59.681133,24.729751s24.729751,36.380212 24.729751,59.681133zm126.616325,49.459502c0,11.43064 -4.10338,20.113531 -12.309921,26.048671s-18.318261,8.90271 -30.335161,8.90271l-29.455881,0c-15.094381,-18.025241 -34.511741,-27.404301 -58.252302,-28.136961c11.87028,-17.145961 17.805421,-35.903862 17.805421,-56.273922c0,-4.24978 -0.36644,-9.08582 -1.0991,-14.508121c9.67208,3.3705 19.417361,5.05586 29.236061,5.05586c8.64618,0 17.365781,-1.57545 26.158581,-4.72613s15.936951,-6.26487 21.432451,-9.34235s8.68289,-4.61622 9.56217,-4.61622c18.171861,0 27.257681,25.865561 27.257681,77.596463zm-28.136961,-133.870386c0,15.534021 -5.4955,28.796421 -16.486501,39.787422s-24.253401,16.486501 -39.787422,16.486501s-28.796421,-5.4955 -39.787422,-16.486501s-16.486501,-24.253401 -16.486501,-39.787422s5.4955,-28.796421 16.486501,-39.787422s24.253401,-16.486501 39.787422,-16.486501s28.796421,5.4955 39.787422,16.486501s16.486501,24.253401 16.486501,39.787422z" fill="#ffffff"/>
</svg>'''
class Channel(models.Model):
""" A mail.channel is a discussion group that may behave like a listener
on documents. """
_description = 'Discussion Channel'
_name = 'mail.channel'
_mail_flat_thread = False
_mail_post_access = 'read'
_inherit = ['mail.thread', 'mail.alias.mixin']
MAX_BOUNCE_LIMIT = 10
@api.model
def default_get(self, fields):
res = super(Channel, self).default_get(fields)
if not res.get('alias_contact') and (not fields or 'alias_contact' in fields):
res['alias_contact'] = 'everyone' if res.get('public', 'private') == 'public' else 'followers'
return res
@api.model
def _generate_random_token(self):
# Built to be shared on invitation link. It uses non-ambiguous characters and it is of a
# reasonable length: enough to avoid brute force, but short enough to be shareable easily.
# This token should not contain "mail.guest"._cookie_separator value.
return ''.join(choice('abcdefghijkmnopqrstuvwxyzABCDEFGHIJKLMNPQRSTUVWXYZ23456789') for _i in range(10))
# description
name = fields.Char('Name', required=True, translate=True)
active = fields.Boolean(default=True, help="Set active to false to hide the channel without removing it.")
channel_type = fields.Selection([
('chat', 'Chat'),
('channel', 'Channel'),
('group', 'Group')],
string='Channel Type', default='channel', help="Chat is private and unique between 2 persons. Group is private among invited persons. Channel can be freely joined (depending on its configuration).")
is_chat = fields.Boolean(string='Is a chat', compute='_compute_is_chat')
default_display_mode = fields.Selection(string="Default Display Mode", selection=[('video_full_screen', "Full screen video")], help="Determines how the channel will be displayed by default when opening it from its invitation link. No value means display text (no voice/video).")
description = fields.Text('Description')
image_128 = fields.Image("Image", max_width=128, max_height=128)
avatar_128 = fields.Image("Avatar", max_width=128, max_height=128, compute='_compute_avatar_128')
channel_partner_ids = fields.Many2many(
'res.partner', string='Members',
compute='_compute_channel_partner_ids', inverse='_inverse_channel_partner_ids',
compute_sudo=True, search='_search_channel_partner_ids',
groups='base.group_user')
channel_last_seen_partner_ids = fields.One2many(
'mail.channel.partner', 'channel_id', string='Last Seen',
groups='base.group_user')
rtc_session_ids = fields.One2many('mail.channel.rtc.session', 'channel_id', groups="base.group_system")
is_member = fields.Boolean('Is Member', compute='_compute_is_member', compute_sudo=True)
member_count = fields.Integer(string="Member Count", compute='_compute_member_count', compute_sudo=True, help="Excluding guests from count.")
group_ids = fields.Many2many(
'res.groups', string='Auto Subscription',
help="Members of those groups will automatically added as followers. "
"Note that they will be able to manage their subscription manually "
"if necessary.")
# access
uuid = fields.Char('UUID', size=50, default=_generate_random_token, copy=False)
public = fields.Selection([
('public', 'Everyone'),
('private', 'Invited people only'),
('groups', 'Selected group of users')], string='Privacy',
required=True, default='groups',
help='This group is visible by non members. Invisible groups can add members through the invite button.')
group_public_id = fields.Many2one('res.groups', string='Authorized Group',
default=lambda self: self.env.ref('base.group_user'))
_sql_constraints = [
('uuid_unique', 'UNIQUE(uuid)', 'The channel UUID must be unique'),
]
# CHAT CONSTRAINT
@api.constrains('channel_last_seen_partner_ids', 'channel_partner_ids')
def _constraint_partners_chat(self):
for ch in self.sudo().filtered(lambda ch: ch.channel_type == 'chat'):
if len(ch.channel_last_seen_partner_ids) > 2 or len(ch.channel_partner_ids) > 2:
raise ValidationError(_("A channel of type 'chat' cannot have more than two users."))
# COMPUTE / INVERSE
@api.depends('channel_type')
def _compute_is_chat(self):
for record in self:
record.is_chat = record.channel_type == 'chat'
@api.depends('channel_type', 'image_128', 'uuid')
def _compute_avatar_128(self):
for record in self:
record.avatar_128 = record.image_128 or record._generate_avatar()
def _generate_avatar(self):
if self.channel_type not in ('channel', 'group'):
return False
avatar = group_avatar if self.channel_type == 'group' else channel_avatar
bgcolor = get_hsl_from_seed(self.uuid)
avatar = avatar.replace('fill="#875a7b"', f'fill="{bgcolor}"')
return base64.b64encode(avatar.encode())
@api.depends('channel_last_seen_partner_ids.partner_id')
def _compute_channel_partner_ids(self):
for channel in self:
channel.channel_partner_ids = channel.channel_last_seen_partner_ids.partner_id
def _inverse_channel_partner_ids(self):
new_members = []
outdated = self.env['mail.channel.partner']
for channel in self:
current_members = channel.channel_last_seen_partner_ids
partners = channel.channel_partner_ids
partners_new = partners - current_members.partner_id
new_members += [{
'channel_id': channel.id,
'partner_id': partner.id,
} for partner in partners_new]
outdated += current_members.filtered(lambda m: m.partner_id not in partners)
if new_members:
self.env['mail.channel.partner'].create(new_members)
if outdated:
outdated.sudo().unlink()
def _search_channel_partner_ids(self, operator, operand):
return [(
'channel_last_seen_partner_ids',
'in',
self.env['mail.channel.partner'].sudo()._search([
('partner_id', operator, operand)
])
)]
@api.depends('channel_partner_ids')
def _compute_is_member(self):
for channel in self:
channel.is_member = self.env.user.partner_id in channel.channel_partner_ids
@api.depends('channel_partner_ids')
def _compute_member_count(self):
read_group_res = self.env['mail.channel.partner'].read_group(domain=[('channel_id', 'in', self.ids)], fields=['channel_id'], groupby=['channel_id'])
member_count_by_channel_id = {item['channel_id'][0]: item['channel_id_count'] for item in read_group_res}
for channel in self:
channel.member_count = member_count_by_channel_id.get(channel.id, 0)
# ONCHANGE
@api.onchange('public')
def _onchange_public(self):
if self.public != 'public' and self.alias_contact == 'everyone':
self.alias_contact = 'followers'
# ------------------------------------------------------------
# CRUD
# ------------------------------------------------------------
@api.model_create_multi
def create(self, vals_list):
defaults = self.default_get(['public'])
access_types = []
for vals in vals_list:
# find partners to add from partner_ids
partner_ids_cmd = vals.get('channel_partner_ids') or []
if any(cmd[0] not in (4, 6) for cmd in partner_ids_cmd):
raise ValidationError(_('Invalid value when creating a channel with members, only 4 or 6 are allowed.'))
partner_ids = [cmd[1] for cmd in partner_ids_cmd if cmd[0] == 4]
partner_ids += [cmd[2] for cmd in partner_ids_cmd if cmd[0] == 6]
# find partners to add from channel_last_seen_partner_ids
membership_ids_cmd = vals.get('channel_last_seen_partner_ids') or []
if any(cmd[0] != 0 for cmd in membership_ids_cmd):
raise ValidationError(_('Invalid value when creating a channel with memberships, only 0 is allowed.'))
membership_pids = [cmd[2]['partner_id'] for cmd in membership_ids_cmd if cmd[0] == 0]
# always add current user to new channel to have right values for
# is_pinned + ensure he has rights to see channel
partner_ids_to_add = list(set(partner_ids + [self.env.user.partner_id.id]))
vals['channel_last_seen_partner_ids'] = membership_ids_cmd + [
(0, 0, {'partner_id': pid})
for pid in partner_ids_to_add if pid not in membership_pids
]
# save visibility, apply public visibility for create then set back after creation
# to avoid ACLS issue
access_type = vals.pop('public', defaults['public'])
access_types.append(access_type)
vals['public'] = 'public'
if not vals.get('alias_contact') and access_type != 'public':
vals['alias_contact'] = 'followers'
# clean vals
vals.pop('channel_partner_ids', False)
# Create channel and alias
channels = super(Channel, self.with_context(mail_create_nolog=True, mail_create_nosubscribe=True)).create(vals_list)
for access_type, channel in zip(access_types, channels):
if access_type != 'public':
channel.sudo().public = access_type
channels._subscribe_users_automatically()
return channels
@api.ondelete(at_uninstall=False)
def _unlink_except_all_employee_channel(self):
# Delete mail.channel
try:
all_emp_group = self.env.ref('mail.channel_all_employees')
except ValueError:
all_emp_group = None
if all_emp_group and all_emp_group in self:
raise UserError(_('You cannot delete those groups, as the Whole Company group is required by other modules.'))
def write(self, vals):
result = super(Channel, self).write(vals)
if vals.get('group_ids'):
self._subscribe_users_automatically()
if 'image_128' in vals:
notifications = []
for channel in self:
notifications.append([channel, 'mail.channel/insert', {
'id': channel.id,
'avatarCacheKey': channel._get_avatar_cache_key(),
}])
self.env['bus.bus']._sendmany(notifications)
return result
def init(self):
self._cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('mail_channel_partner_seen_message_id_idx',))
if not self._cr.fetchone():
self._cr.execute('CREATE INDEX mail_channel_partner_seen_message_id_idx ON mail_channel_partner (channel_id,partner_id,seen_message_id)')
# ------------------------------------------------------------
# MEMBERS MANAGEMENT
# ------------------------------------------------------------
def _subscribe_users_automatically(self):
new_members = self._subscribe_users_automatically_get_members()
if new_members:
to_create = [
{'channel_id': channel_id, 'partner_id': partner_id}
for channel_id in new_members
for partner_id in new_members[channel_id]
]
self.env['mail.channel.partner'].sudo().create(to_create)
def _subscribe_users_automatically_get_members(self):
""" Return new members per channel ID """
return dict(
(channel.id, (channel.group_ids.users.partner_id - channel.channel_partner_ids).ids)
for channel in self
)
def action_unfollow(self):
return self._action_unfollow(self.env.user.partner_id)
def _action_unfollow(self, partner):
self.message_unsubscribe(partner.ids)
if partner not in self.with_context(active_test=False).channel_partner_ids:
return True
channel_info = self.channel_info()[0] # must be computed before leaving the channel (access rights)
result = self.write({'channel_partner_ids': [Command.unlink(partner.id)]})
# side effect of unsubscribe that wasn't taken into account because
# channel_info is called before actually unpinning the channel
channel_info['is_pinned'] = False
self.env['bus.bus']._sendone(partner, 'mail.channel/leave', channel_info)
notification = _('<div class="o_mail_notification">left the channel</div>')
# post 'channel left' message as root since the partner just unsubscribed from the channel
self.sudo().message_post(body=notification, subtype_xmlid="mail.mt_comment", author_id=partner.id)
self.env['bus.bus']._sendone(self, 'mail.channel/insert', {
'id': self.id,
'memberCount': self.member_count,
'members': [('insert-and-unlink', {'id': partner.id})],
})
return result
def add_members(self, partner_ids=None, guest_ids=None, invite_to_rtc_call=False):
""" Adds the given partner_ids and guest_ids as member of self channels. """
self.check_access_rights('write')
self.check_access_rule('write')
partners = self.env['res.partner'].browse(partner_ids or []).exists()
guests = self.env['mail.guest'].browse(guest_ids or []).exists()
for channel in self:
members_to_create = []
if channel.public == 'groups':
invalid_partners = partners.filtered(lambda partner: channel.group_public_id not in partner.user_ids.groups_id)
if invalid_partners:
raise UserError(_(
'Channel "%(channel_name)s" only accepts members of group "%(group_name)s". Forbidden for: %(partner_names)s',
channel_name=channel.name,
group_name=channel.group_public_id.name,
partner_names=', '.join(partner.name for partner in invalid_partners)
))
if guests:
raise UserError(_(
'Channel "%(channel_name)s" only accepts members of group "%(group_name)s". Forbidden for: %(guest_names)s',
channel_name=channel.name,
group_name=channel.group_public_id.name,
guest_names=', '.join(guest.name for guest in guests)
))
existing_partners = self.env['res.partner'].search([('id', 'in', partners.ids), ('channel_ids', 'in', channel.id)])
members_to_create += [{
'partner_id': partner.id,
'channel_id': channel.id,
} for partner in partners - existing_partners]
existing_guests = self.env['mail.guest'].search([('id', 'in', guests.ids), ('channel_ids', 'in', channel.id)])
members_to_create += [{
'guest_id': partner.id,
'channel_id': channel.id,
} for partner in guests - existing_guests]
new_members = self.env['mail.channel.partner'].sudo().create(members_to_create)
members_data = []
guest_members_data = []
for channel_partner in new_members.filtered(lambda channel_partner: channel_partner.partner_id):
user = channel_partner.partner_id.user_ids[0] if channel_partner.partner_id.user_ids else self.env['res.users']
# notify invited members through the bus
if user:
self.env['bus.bus']._sendone(channel_partner.partner_id, 'mail.channel/joined', {
'channel': channel_partner.channel_id.with_user(user).with_context(allowed_company_ids=user.company_ids.ids).sudo().channel_info()[0],
'invited_by_user_id': self.env.user.id,
})
# notify existing members with a new message in the channel
if channel_partner.partner_id == self.env.user.partner_id:
notification = _('<div class="o_mail_notification">joined the channel</div>')
else:
notification = _(
'<div class="o_mail_notification">invited <a href="#" data-oe-model="res.partner" data-oe-id="%(new_partner_id)d">%(new_partner_name)s</a> to the channel</div>',
new_partner_id=channel_partner.partner_id.id,
new_partner_name=channel_partner.partner_id.name,
)
channel_partner.channel_id.message_post(body=notification, message_type="notification", subtype_xmlid="mail.mt_comment", notify_by_email=False)
members_data.append({
'id': channel_partner.partner_id.id,
'im_status': channel_partner.partner_id.im_status,
'name': channel_partner.partner_id.name,
})
for channel_partner in new_members.filtered(lambda channel_partner: channel_partner.guest_id):
channel_partner.channel_id.message_post(body=_('<div class="o_mail_notification">joined the channel</div>'), message_type="notification", subtype_xmlid="mail.mt_comment", notify_by_email=False)
guest_members_data.append({
'id': channel_partner.guest_id.id,
'name': channel_partner.guest_id.name,
})
guest = channel_partner.guest_id
if guest:
self.env['bus.bus']._sendone(guest, 'mail.channel/joined', {
'channel': channel_partner.channel_id.sudo().channel_info()[0],
})
self.env['bus.bus']._sendone(channel, 'mail.channel/insert', {
'id': channel.id,
'guestMembers': [('insert', guest_members_data)],
'memberCount': channel.member_count,
'members': [('insert', members_data)],
})
if invite_to_rtc_call:
guest = self.env['mail.guest']._get_guest_from_context()
if self.env.user._is_public() and guest:
partner = self.env['res.partner']
else:
guest = self.env['mail.guest']
partner = self.env.user.partner_id
for channel in self:
current_channel_partner = self.env['mail.channel.partner'].sudo().search([('channel_id', '=', channel.id), ('partner_id', '=', partner.id), ('guest_id', '=', guest.id)])
if current_channel_partner and current_channel_partner.rtc_session_ids:
current_channel_partner._rtc_invite_members(partner_ids=partners.ids, guest_ids=guests.ids)
def _action_remove_members(self, partners):
""" Private implementation to remove members from channels. Done as sudo
to avoid ACLs issues with channel partners. """
self.env['mail.channel.partner'].sudo().search([
('partner_id', 'in', partners.ids),
('channel_id', 'in', self.ids)
]).unlink()
self.invalidate_cache(fnames=['channel_partner_ids', 'channel_last_seen_partner_ids'])
def _can_invite(self, partner_id):
"""Return True if the current user can invite the partner to the channel.
* public: ok;
* private: must be member;
* group: both current user and target must have group;
:return boolean: whether inviting is ok"""
partner = self.env['res.partner'].browse(partner_id)
for channel in self.sudo():
if channel.public == 'private' and not channel.is_member:
return False
if channel.public == 'groups':
if not partner.user_ids or channel.group_public_id not in partner.user_ids.groups_id:
return False
if channel.group_public_id not in self.env.user.groups_id:
return False
return True
# ------------------------------------------------------------
# RTC
# ------------------------------------------------------------
def _rtc_cancel_invitations(self, partner_ids=None, guest_ids=None):
""" Cancels the invitations of the RTC call from all invited members (or the specified partner_ids).
:param list partner_ids: list of the partner ids from which the invitation has to be removed
:param list guest_ids: list of the guest ids from which the invitation has to be removed
if either partner_ids or guest_ids is set, only the specified ids will be invited.
"""
self.ensure_one()
channel_partner_domain = [
('channel_id', '=', self.id),
('rtc_inviting_session_id', '!=', False),
]
if partner_ids or guest_ids:
channel_partner_domain = expression.AND([channel_partner_domain, [
'|',
('partner_id', 'in', partner_ids or []),
('guest_id', 'in', guest_ids or []),
]])
invited_partners = self.env['res.partner']
invited_guests = self.env['mail.guest']
invitation_notifications = []
for member in self.env['mail.channel.partner'].search(channel_partner_domain):
member.rtc_inviting_session_id = False
if member.partner_id:
invited_partners |= member.partner_id
target = member.partner_id
else:
invited_guests |= member.guest_id
target = member.guest_id
invitation_notifications.append((target, 'mail.channel/insert', {
'id': self.id,
'rtcInvitingSession': [('unlink',)],
}))
self.env['bus.bus']._sendmany(invitation_notifications)
channel_data = {'id': self.id}
if invited_guests:
channel_data['invitedGuests'] = [('insert-and-unlink', [{'id': guest.id} for guest in invited_guests])]
if invited_partners:
channel_data['invitedPartners'] = [('insert-and-unlink', [{'id': partner.id} for partner in invited_partners])]
if invited_partners or invited_guests:
self.env['bus.bus']._sendone(self, 'mail.channel/insert', channel_data)
return channel_data
# ------------------------------------------------------------
# MAILING
# ------------------------------------------------------------
def _alias_get_creation_values(self):
values = super(Channel, self)._alias_get_creation_values()
values['alias_model_id'] = self.env['ir.model']._get('mail.channel').id
if self.id:
values['alias_force_thread_id'] = self.id
return values
def _alias_get_error_message(self, message, message_dict, alias):
if alias.alias_contact == 'followers' and self.ids:
author = self.env['res.partner'].browse(message_dict.get('author_id', False))
if not author or author not in self.channel_partner_ids:
return _('restricted to channel members')
return False
return super(Channel, self)._alias_get_error_message(message, message_dict, alias)
def _notify_compute_recipients(self, message, msg_vals):
""" Override recipients computation as channel is not a standard
mail.thread document. Indeed there are no followers on a channel.
Instead of followers it has members that should be notified.
:param message: see ``MailThread._notify_compute_recipients()``;
:param msg_vals: see ``MailThread._notify_compute_recipients()``;
:return recipients: structured data holding recipients data. See
``MailThread._notify_thread()`` for more details about its content
and use;
"""
# get values from msg_vals or from message if msg_vals doen't exists
msg_sudo = message.sudo()
message_type = msg_vals.get('message_type', 'email') if msg_vals else msg_sudo.message_type
pids = msg_vals.get('partner_ids', []) if msg_vals else msg_sudo.partner_ids.ids
# notify only user input (comment or incoming emails)
if message_type not in ('comment', 'email'):
return []
# notify only mailing lists or if mentioning recipients
if not pids:
return []
email_from = tools.email_normalize(msg_vals.get('email_from') or msg_sudo.email_from)
author_id = msg_vals.get('author_id') or msg_sudo.author_id.id
recipients_data = []
if pids:
self.env['res.partner'].flush(fnames=['active', 'email', 'partner_share'])
self.env['res.users'].flush(fnames=['notification_type', 'partner_id'])
sql_query = """
SELECT DISTINCT ON (partner.id) partner.id,
partner.partner_share,
users.notification_type
FROM res_partner partner
LEFT JOIN res_users users on partner.id = users.partner_id
WHERE partner.active IS TRUE
AND partner.email != %s
AND partner.id = ANY(%s) AND partner.id != ANY(%s)"""
self.env.cr.execute(
sql_query,
(email_from or '', list(pids), [author_id] if author_id else [], )
)
for partner_id, partner_share, notif in self._cr.fetchall():
# ocn_client: will add partners to recipient recipient_data. more ocn notifications. We neeed to filter them maybe
recipients_data.append({
'id': partner_id,
'share': partner_share,
'active': True,
'notif': notif or 'email',
'type': 'user' if not partner_share and notif else 'customer',
'groups': [],
})
return recipients_data
def _notify_get_groups(self, msg_vals=None):
""" All recipients of a message on a channel are considered as partners.
This means they will receive a minimal email, without a link to access
in the backend. Mailing lists should indeed send minimal emails to avoid
the noise. """
groups = super(Channel, self)._notify_get_groups(msg_vals=msg_vals)
for (index, (group_name, group_func, group_data)) in enumerate(groups):
if group_name != 'customer':
groups[index] = (group_name, lambda partner: False, group_data)
return groups
def _notify_thread(self, message, msg_vals=False, notify_by_email=True, **kwargs):
# link message to channel
rdata = super(Channel, self)._notify_thread(message, msg_vals=msg_vals, notify_by_email=notify_by_email, **kwargs)
message_format_values = message.message_format()[0]
bus_notifications = self._channel_message_notifications(message, message_format_values)
self.env['bus.bus'].sudo()._sendmany(bus_notifications)
# Last interest is updated for a chat when posting a message.
# So a notification is needed to update UI.
if self.is_chat or self.channel_type == 'group':
notifications = []
for channel_partners in self.channel_last_seen_partner_ids.filtered('partner_id'):
notifications.append([channel_partners.partner_id, 'mail.channel/last_interest_dt_changed', {
'id': self.id,
'last_interest_dt': channel_partners.last_interest_dt,
}])
self.env['bus.bus']._sendmany(notifications)
return rdata
def _message_receive_bounce(self, email, partner):
""" Override bounce management to unsubscribe bouncing addresses """
for p in partner:
if p.message_bounce >= self.MAX_BOUNCE_LIMIT:
self._action_unfollow(p)
return super(Channel, self)._message_receive_bounce(email, partner)
def _message_compute_author(self, author_id=None, email_from=None, raise_exception=False):
return super()._message_compute_author(author_id=author_id, email_from=email_from, raise_exception=False)
def _message_compute_parent_id(self, parent_id):
# super() unravels the chain of parents to set parent_id as the first
# ancestor. We don't want that in channel.
if not parent_id:
return parent_id
return self.env['mail.message'].search(
[('id', '=', parent_id),
('model', '=', self._name),
('res_id', '=', self.id)
]).id
@api.returns('mail.message', lambda value: value.id)
def message_post(self, *, message_type='notification', **kwargs):
self.filtered(lambda channel: channel.is_chat or channel.channel_type == 'group').mapped('channel_last_seen_partner_ids').sudo().write({
'is_pinned': True,
'last_interest_dt': fields.Datetime.now(),
})
# mail_post_autofollow=False is necessary to prevent adding followers
# when using mentions in channels. Followers should not be added to
# channels, and especially not automatically (because channel membership
# should be managed with channel.partner instead).
# The current client code might be setting the key to True on sending
# message but it is only useful when targeting customers in chatter.
# This value should simply be set to False in channels no matter what.
return super(Channel, self.with_context(mail_create_nosubscribe=True, mail_post_autofollow=False)).message_post(message_type=message_type, **kwargs)
def _message_post_after_hook(self, message, msg_vals):
"""
Automatically set the message posted by the current user as seen for himself.
"""
self._set_last_seen_message(message)
return super()._message_post_after_hook(message=message, msg_vals=msg_vals)
def _check_can_update_message_content(self, message):
""" We don't call super in this override as we want to ignore the
mail.thread behavior completely """
if not message.message_type == 'comment':
raise UserError(_("Only messages type comment can have their content updated on model 'mail.channel'"))
def _message_update_content_after_hook(self, message):
self.ensure_one()
self.env['bus.bus']._sendone(self, 'mail.message/insert', {
'id': message.id,
'body': message.body,
'attachments': [('insert-and-replace', message.attachment_ids._attachment_format(commands=True))],
})
return super()._message_update_content_after_hook(message=message)
def _message_add_reaction_after_hook(self, message, content):
self.ensure_one()
guest = self.env['mail.guest']._get_guest_from_context()
if self.env.user._is_public() and guest:
guests = [('insert', {'id': guest.id})]
partners = []
else:
guests = []
partners = [('insert', {'id': self.env.user.partner_id.id})]
reactions = self.env['mail.message.reaction'].sudo().search([('message_id', '=', message.id), ('content', '=', content)])
self.env['bus.bus']._sendone(self, 'mail.message/insert', {
'id': message.id,
'messageReactionGroups': [('insert' if len(reactions) > 0 else 'insert-and-unlink', {
'messageId': message.id,
'content': content,
'count': len(reactions),
'guests': guests,
'partners': partners,
})],
})
return super()._message_add_reaction_after_hook(message=message, content=content)
def _message_remove_reaction_after_hook(self, message, content):
self.ensure_one()
guest = self.env['mail.guest']._get_guest_from_context()
if self.env.user._is_public() and guest:
guests = [('insert-and-unlink', {'id': guest.id})]
partners = []
else:
guests = []
partners = [('insert-and-unlink', {'id': self.env.user.partner_id.id})]
reactions = self.env['mail.message.reaction'].sudo().search([('message_id', '=', message.id), ('content', '=', content)])
self.env['bus.bus']._sendone(self, 'mail.message/insert', {
'id': message.id,
'messageReactionGroups': [('insert' if len(reactions) > 0 else 'insert-and-unlink', {
'messageId': message.id,
'content': content,
'count': len(reactions),
'guests': guests,
'partners': partners,
})],
})
return super()._message_remove_reaction_after_hook(message=message, content=content)
def _message_subscribe(self, partner_ids=None, subtype_ids=None, customer_ids=None):
""" Do not allow follower subscription on channels. Only members are
considered. """
raise UserError(_('Adding followers on channels is not possible. Consider adding members instead.'))
# ------------------------------------------------------------
# BROADCAST
# ------------------------------------------------------------
# Anonymous method
def _broadcast(self, partner_ids):
""" Broadcast the current channel header to the given partner ids
:param partner_ids : the partner to notify
"""
notifications = self._channel_channel_notifications(partner_ids)
self.env['bus.bus']._sendmany(notifications)
def _channel_channel_notifications(self, partner_ids):
""" Generate the bus notifications of current channel for the given partner ids
:param partner_ids : the partner to send the current channel header
:returns list of bus notifications (tuple (bus_channe, message_content))
"""
notifications = []
for partner in self.env['res.partner'].browse(partner_ids):
user_id = partner.user_ids and partner.user_ids[0] or False
if user_id:
user_channels = self.with_user(user_id).with_context(
allowed_company_ids=user_id.company_ids.ids
)
for channel_info in user_channels.channel_info():
notifications.append((partner, 'mail.channel/legacy_insert', channel_info))
return notifications
def _channel_message_notifications(self, message, message_format=False):
""" Generate the bus notifications for the given message
:param message : the mail.message to sent
:returns list of bus notifications (tuple (bus_channe, message_content))
"""
message_format = message_format or message.message_format()[0]
notifications = []
for channel in self:
payload = {
'id': channel.id,
'message': dict(message_format),
}
notifications.append((channel, 'mail.channel/new_message', payload))
# add uuid to allow anonymous to listen
if channel.public == 'public':
notifications.append((channel.uuid, 'mail.channel/new_message', payload))
return notifications
# ------------------------------------------------------------
# INSTANT MESSAGING API
# ------------------------------------------------------------
# A channel header should be broadcasted:
# - when adding user to channel (only to the new added partners)
# - when folding/minimizing a channel (only to the user making the action)
# A message should be broadcasted:
# - when a message is posted on a channel (to the channel, using _notify() method)
# ------------------------------------------------------------
def channel_info(self):
""" Get the informations header for the current channels
:returns a list of channels values
:rtype : list(dict)
"""
if not self:
return []
channel_infos = []
rtc_sessions_by_channel = self.sudo().rtc_session_ids._mail_rtc_session_format_by_channel()
channel_last_message_ids = dict((r['id'], r['message_id']) for r in self._channel_last_message_ids())
all_needed_members_domain = expression.OR([
[('channel_id.channel_type', '!=', 'channel')],
[('rtc_inviting_session_id', '!=', False)],
[('partner_id', '=', self.env.user.partner_id.id)] if self.env.user and self.env.user.partner_id else expression.FALSE_LEAF,
])
all_needed_members = self.env['mail.channel.partner'].search(expression.AND([[('channel_id', 'in', self.ids)], all_needed_members_domain]))
partner_format_by_partner = all_needed_members.partner_id.sudo().mail_partner_format()
members_by_channel = defaultdict(lambda: self.env['mail.channel.partner'])
invited_members_by_channel = defaultdict(lambda: self.env['mail.channel.partner'])
member_of_current_user_by_channel = defaultdict(lambda: self.env['mail.channel.partner'])
for member in all_needed_members:
members_by_channel[member.channel_id] |= member
if member.rtc_inviting_session_id:
invited_members_by_channel[member.channel_id] |= member
if self.env.user and self.env.user.partner_id and member.partner_id == self.env.user.partner_id:
member_of_current_user_by_channel[member.channel_id] = member
for channel in self:
info = {
'avatarCacheKey': channel._get_avatar_cache_key(),
'id': channel.id,
'name': channel.name,
'defaultDisplayMode': channel.default_display_mode,
'description': channel.description,
'uuid': channel.uuid,
'state': 'open',
'is_minimized': False,
'channel_type': channel.channel_type,
'public': channel.public,
'group_based_subscription': bool(channel.group_ids),
'create_uid': channel.create_uid.id,
}
# add last message preview (only used in mobile)
info['last_message_id'] = channel_last_message_ids.get(channel.id, False)
info['memberCount'] = channel.member_count
# find the channel partner state, if logged user
if self.env.user and self.env.user.partner_id:
info['message_needaction_counter'] = channel.message_needaction_counter
info['message_unread_counter'] = channel.message_unread_counter
partner_channel = member_of_current_user_by_channel.get(channel, self.env['mail.channel.partner'])
if partner_channel:
partner_channel = partner_channel[0]
info['state'] = partner_channel.fold_state or 'open'
info['is_minimized'] = partner_channel.is_minimized
info['seen_message_id'] = partner_channel.seen_message_id.id
info['custom_channel_name'] = partner_channel.custom_channel_name
info['is_pinned'] = partner_channel.is_pinned
info['last_interest_dt'] = partner_channel.last_interest_dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
if partner_channel.rtc_inviting_session_id:
info['rtc_inviting_session'] = {'id': partner_channel.rtc_inviting_session_id.id}
# add members info
if channel.channel_type != 'channel':
# avoid sending potentially a lot of members for big channels
# exclude chat and other small channels from this optimization because they are
# assumed to be smaller and it's important to know the member list for them
info['members'] = sorted(list(channel._channel_info_format_member(member.partner_id, partner_format_by_partner[member.partner_id]) for member in members_by_channel[channel] if member.partner_id), key=lambda p: p['id'])
info['seen_partners_info'] = sorted([{
'id': cp.id,
'partner_id': cp.partner_id.id,
'fetched_message_id': cp.fetched_message_id.id,
'seen_message_id': cp.seen_message_id.id,
} for cp in members_by_channel[channel] if cp.partner_id], key=lambda p: p['partner_id'])
info['guestMembers'] = [('insert', sorted([{
'id': member.guest_id.sudo().id,
'name': member.guest_id.sudo().name,
} for member in members_by_channel[channel] if member.guest_id], key=lambda g: g['id']))]
# add RTC sessions info
info.update({
'invitedGuests': [('insert', [{'id': member.guest_id.id, 'name': member.guest_id.name} for member in invited_members_by_channel[channel] if member.guest_id])],
'invitedPartners': [('insert', [{'id': member.partner_id.id, 'name': member.partner_id.name} for member in invited_members_by_channel[channel] if member.partner_id])],
'rtcSessions': [('insert', rtc_sessions_by_channel.get(channel, []))],
})
channel_infos.append(info)
return channel_infos
def _channel_info_format_member(self, partner, partner_info):
"""Returns member information in the context of self channel."""
self.ensure_one()
return partner_info
def _channel_fetch_message(self, last_id=False, limit=20):
""" Return message values of the current channel.
:param last_id : last message id to start the research
:param limit : maximum number of messages to fetch
:returns list of messages values
:rtype : list(dict)
"""
self.ensure_one()
domain = ["&", ("model", "=", "mail.channel"), ("res_id", "in", self.ids)]
if last_id:
domain.append(("id", "<", last_id))
return self.env['mail.message']._message_fetch(domain=domain, limit=limit)
# User methods
@api.model
def channel_get(self, partners_to, pin=True):
""" Get the canonical private channel between some partners, create it if needed.
To reuse an old channel (conversation), this one must be private, and contains
only the given partners.
:param partners_to : list of res.partner ids to add to the conversation
:param pin : True if getting the channel should pin it for the current user
:returns: channel_info of the created or existing channel
:rtype: dict
"""
if self.env.user.partner_id.id not in partners_to:
partners_to.append(self.env.user.partner_id.id)
if len(partners_to) > 2:
raise UserError(_("A chat should not be created with more than 2 persons. Create a group instead."))
# determine type according to the number of partner in the channel
self.flush()
self.env.cr.execute("""
SELECT P.channel_id
FROM mail_channel C, mail_channel_partner P
WHERE P.channel_id = C.id
AND C.public LIKE 'private'
AND P.partner_id IN %s
AND C.channel_type LIKE 'chat'
AND NOT EXISTS (
SELECT *
FROM mail_channel_partner P2
WHERE P2.channel_id = C.id
AND P2.partner_id NOT IN %s
)
GROUP BY P.channel_id
HAVING ARRAY_AGG(DISTINCT P.partner_id ORDER BY P.partner_id) = %s
LIMIT 1
""", (tuple(partners_to), tuple(partners_to), sorted(list(partners_to)),))
result = self.env.cr.dictfetchall()
if result:
# get the existing channel between the given partners
channel = self.browse(result[0].get('channel_id'))
# pin up the channel for the current partner
if pin:
self.env['mail.channel.partner'].search([('partner_id', '=', self.env.user.partner_id.id), ('channel_id', '=', channel.id)]).write({
'is_pinned': True,
'last_interest_dt': fields.Datetime.now(),
})
channel._broadcast(self.env.user.partner_id.ids)
else:
# create a new one
channel = self.create({
'channel_last_seen_partner_ids': [
Command.create({
'partner_id': partner_id,
# only pin for the current user, so the chat does not show up for the correspondent until a message has been sent
'is_pinned': partner_id == self.env.user.partner_id.id
}) for partner_id in partners_to
],
'public': 'private',
'channel_type': 'chat',
'name': ', '.join(self.env['res.partner'].sudo().browse(partners_to).mapped('name')),
})
channel._broadcast(partners_to)
return channel.channel_info()[0]
@api.model
def channel_fold(self, uuid, state=None):
""" Update the fold_state of the given session. In order to syncronize web browser
tabs, the change will be broadcast to himself (the current user channel).
Note: the user need to be logged
:param state : the new status of the session for the current user.
"""
domain = [('partner_id', '=', self.env.user.partner_id.id), ('channel_id.uuid', '=', uuid)]
for session_state in self.env['mail.channel.partner'].search(domain):
if not state:
state = session_state.fold_state
if session_state.fold_state == 'open':
state = 'folded'
else:
state = 'open'
is_minimized = bool(state != 'closed')
vals = {}
if session_state.fold_state != state:
vals['fold_state'] = state
if session_state.is_minimized != is_minimized:
vals['is_minimized'] = is_minimized
if vals:
session_state.write(vals)
self.env['bus.bus']._sendone(self.env.user.partner_id, 'mail.channel/insert', {
'id': session_state.channel_id.id,
'serverFoldState': state,
})
@api.model
def channel_pin(self, uuid, pinned=False):
# add the person in the channel, and pin it (or unpin it)
channel = self.search([('uuid', '=', uuid)])
channel._execute_channel_pin(pinned)
def _execute_channel_pin(self, pinned=False):
""" Hook for website_livechat channel unpin and cleaning """
self.ensure_one()
channel_partners = self.env['mail.channel.partner'].search(
[('partner_id', '=', self.env.user.partner_id.id), ('channel_id', '=', self.id), ('is_pinned', '!=', pinned)])
if channel_partners:
channel_partners.write({'is_pinned': pinned})
if not pinned:
self.env['bus.bus']._sendone(self.env.user.partner_id, 'mail.channel/unpin', {'id': self.id})
else:
self.env['bus.bus']._sendone(self.env.user.partner_id, 'mail.channel/legacy_insert', self.channel_info()[0])
def _channel_seen(self, last_message_id=None):
"""
Mark channel as seen by updating seen message id of the current logged partner
:param last_message_id: the id of the message to be marked as seen, last message of the
thread by default. This param SHOULD be required, the default behaviour is DEPRECATED and
kept only for compatibility reasons.
"""
self.ensure_one()
domain = ["&", ("model", "=", "mail.channel"), ("res_id", "in", self.ids)]
if last_message_id:
domain = expression.AND([domain, [('id', '<=', last_message_id)]])
last_message = self.env['mail.message'].search(domain, order="id DESC", limit=1)
if not last_message:
return
self._set_last_seen_message(last_message)
data = {
'channel_id': self.id,
'last_message_id': last_message.id,
'partner_id': self.env.user.partner_id.id,
}
target = self if self.channel_type == 'chat' else self.env.user.partner_id
self.env['bus.bus']._sendone(target, 'mail.channel.partner/seen', data)
return last_message.id
def _set_last_seen_message(self, last_message):
"""
Set last seen message of `self` channels for the current user.
:param last_message: the message to set as last seen message
"""
channel_partner_domain = expression.AND([
[('channel_id', 'in', self.ids)],
[('partner_id', '=', self.env.user.partner_id.id)],
expression.OR([
[('seen_message_id', '=', False)],
[('seen_message_id', '<', last_message.id)]
])
])
channel_partner_domain = expression.AND([channel_partner_domain, [('partner_id', '=', self.env.user.partner_id.id)]])
channel_partner = self.env['mail.channel.partner'].search(channel_partner_domain)
channel_partner.write({
'fetched_message_id': last_message.id,
'seen_message_id': last_message.id,
})
def channel_fetched(self):
""" Broadcast the channel_fetched notification to channel members
"""
for channel in self:
if not channel.message_ids.ids:
return
if channel.channel_type != 'chat':
return
last_message_id = channel.message_ids.ids[0] # zero is the index of the last message
channel_partner = self.env['mail.channel.partner'].search([('channel_id', '=', channel.id), ('partner_id', '=', self.env.user.partner_id.id)], limit=1)
if channel_partner.fetched_message_id.id == last_message_id:
# last message fetched by user is already up-to-date
return
channel_partner.write({
'fetched_message_id': last_message_id,
})
self.env['bus.bus']._sendone(channel, 'mail.channel.partner/fetched', {
'channel_id': channel.id,
'id': channel_partner.id,
'last_message_id': last_message_id,
'partner_id': self.env.user.partner_id.id,
})
def channel_set_custom_name(self, name):
self.ensure_one()
channel_partner = self.env['mail.channel.partner'].search([('partner_id', '=', self.env.user.partner_id.id), ('channel_id', '=', self.id)])
channel_partner.write({'custom_channel_name': name})
self.env['bus.bus']._sendone(channel_partner.partner_id, 'mail.channel/insert', {
'id': self.id,
'custom_channel_name': name,
})
def channel_rename(self, name):
self.ensure_one()
self.write({'name': name})
self.env['bus.bus']._sendone(self, 'mail.channel/insert', {
'id': self.id,
'name': name,
})
def channel_change_description(self, description):
self.ensure_one()
self.write({'description': description})
self.env['bus.bus']._sendone(self, 'mail.channel/insert', {
'id': self.id,
'description': description
})
def notify_typing(self, is_typing):
""" Broadcast the typing notification to channel members
:param is_typing: (boolean) tells whether the current user is typing or not
"""
notifications = []
for channel in self:
data = dict({
'channel_id': channel.id,
'is_typing': is_typing,
}, **channel._notify_typing_partner_data())
notifications.append([channel, 'mail.channel.partner/typing_status', data]) # notify backend users
notifications.append([channel.uuid, 'mail.channel.partner/typing_status', data]) # notify frontend users
self.env['bus.bus']._sendmany(notifications)
def _notify_typing_partner_data(self):
"""Returns typing partner data for self channel."""
self.ensure_one()
return {
'partner_id': self.env.user.partner_id.id,
'partner_name': self.env.user.partner_id.name,
}
@api.model
def channel_search_to_join(self, name=None, domain=None):
""" Return the channel info of the channel the current partner can join
:param name : the name of the researched channels
:param domain : the base domain of the research
:returns dict : channel dict
"""
if not domain:
domain = []
domain = expression.AND([
[('channel_type', '=', 'channel')],
[('channel_partner_ids', 'not in', [self.env.user.partner_id.id])],
[('public', '!=', 'private')],
domain
])
if name:
domain = expression.AND([domain, [('name', 'ilike', '%'+name+'%')]])
return self.search(domain).read(['name', 'public', 'uuid', 'channel_type'])
def channel_join(self):
""" Shortcut to add the current user as member of self channels.
Prefer calling add_members() directly when possible.
"""
self.add_members(self.env.user.partner_id.ids)
@api.model
def channel_create(self, name, privacy='groups'):
""" Create a channel and add the current partner, broadcast it (to make the user directly
listen to it when polling)
:param name : the name of the channel to create
:param privacy : privacy of the channel. Should be 'public' or 'private'.
:return dict : channel header
"""
# create the channel
new_channel = self.create({
'name': name,
'public': privacy,
})
notification = _('<div class="o_mail_notification">created <a href="#" class="o_channel_redirect" data-oe-id="%s">#%s</a></div>', new_channel.id, new_channel.name)
new_channel.message_post(body=notification, message_type="notification", subtype_xmlid="mail.mt_comment")
channel_info = new_channel.channel_info()[0]
self.env['bus.bus']._sendone(self.env.user.partner_id, 'mail.channel/legacy_insert', channel_info)
return channel_info
@api.model
def create_group(self, partners_to, default_display_mode=False):
""" Create a group channel.
:param partners_to : list of res.partner ids to add to the conversation
:returns: channel_info of the created channel
:rtype: dict
"""
channel = self.create({
'channel_last_seen_partner_ids': [Command.create({'partner_id': partner_id}) for partner_id in partners_to],
'channel_type': 'group',
'default_display_mode': default_display_mode,
'name': '', # default name is computed client side from the list of members
'public': 'private',
})
channel._broadcast(partners_to)
return channel.channel_info()[0]
@api.model
def get_mention_suggestions(self, search, limit=8):
""" Return 'limit'-first channels' id, name and public fields such that the name matches a
'search' string. Exclude channels of type chat (DM), and private channels the current
user isn't registered to. """
domain = expression.AND([
[('name', 'ilike', search)],
[('channel_type', '=', 'channel')],
expression.OR([
[('public', '!=', 'private')],
[('channel_partner_ids', 'in', [self.env.user.partner_id.id])]
])
])
return self.search_read(domain, ['id', 'name', 'public', 'channel_type'], limit=limit)
@api.model
def channel_fetch_listeners(self, uuid):
""" Return the id, name and email of partners listening to the given channel """
self._cr.execute("""
SELECT P.id, P.name, P.email
FROM mail_channel_partner CP
INNER JOIN res_partner P ON CP.partner_id = P.id
INNER JOIN mail_channel C ON CP.channel_id = C.id
WHERE C.uuid = %s""", (uuid,))
return self._cr.dictfetchall()
def channel_fetch_preview(self):
""" Return the last message of the given channels """
if not self:
return []
channels_last_message_ids = self._channel_last_message_ids()
channels_preview = dict((r['message_id'], r) for r in channels_last_message_ids)
last_messages = self.env['mail.message'].browse(channels_preview).message_format()
for message in last_messages:
channel = channels_preview[message['id']]
del(channel['message_id'])
channel['last_message'] = message
return list(channels_preview.values())
def _channel_last_message_ids(self):
""" Return the last message of the given channels."""
if not self:
return []
self.flush()
self.env.cr.execute("""
SELECT res_id AS id, MAX(id) AS message_id
FROM mail_message
WHERE model = 'mail.channel' AND res_id IN %s
GROUP BY res_id
""", (tuple(self.ids),))
return self.env.cr.dictfetchall()
def load_more_members(self, known_member_ids):
self.ensure_one()
partners = self.env['res.partner'].with_context(active_test=False).search_read(
domain=[('id', 'not in', known_member_ids), ('channel_ids', 'in', self.id)],
fields=['id', 'name', 'im_status'],
limit=30
)
return [('insert', partners)]
def _get_avatar_cache_key(self):
if not self.avatar_128:
return 'no-avatar'
return sha512(self.avatar_128).hexdigest()
# ------------------------------------------------------------
# COMMANDS
# ------------------------------------------------------------
def _send_transient_message(self, partner_to, content):
""" Notifies partner_to that a message (not stored in DB) has been
written in this channel.
`content` is HTML, dynamic parts should be escaped by the caller.
"""
self.env['bus.bus']._sendone(partner_to, 'mail.channel/transient_message', {
'body': "<span class='o_mail_notification'>" + content + "</span>",
'model': self._name,
'res_id': self.id,
})
def execute_command_help(self, **kwargs):
partner = self.env.user.partner_id
if self.channel_type == 'channel':
msg = _("You are in channel <b>#%s</b>.", html_escape(self.name))
if self.public == 'private':
msg += _(" This channel is private. People must be invited to join it.")
else:
all_channel_partners = self.env['mail.channel.partner'].with_context(active_test=False)
channel_partners = all_channel_partners.search([('partner_id', '!=', partner.id), ('channel_id', '=', self.id)])
msg = _("You are in a private conversation with <b>@%s</b>.", _(" @").join(html_escape(member.partner_id.name or member.guest_id.name) for member in channel_partners) if channel_partners else _('Anonymous'))
msg += self._execute_command_help_message_extra()
self._send_transient_message(partner, msg)
def _execute_command_help_message_extra(self):
msg = _("""<br><br>
Type <b>@username</b> to mention someone, and grab his attention.<br>
Type <b>#channel</b> to mention a channel.<br>
Type <b>/command</b> to execute a command.<br>""")
return msg
def execute_command_leave(self, **kwargs):
if self.channel_type in ('channel', 'group'):
self.action_unfollow()
else:
self.channel_pin(self.uuid, False)
def execute_command_who(self, **kwargs):
channel_members = self.env['mail.channel.partner'].with_context(active_test=False).search([('partner_id', '!=', self.env.user.partner_id.id), ('channel_id', '=', self.id)])
members = [
f'<strong><a href="#" data-oe-id={str(m.partner_id.id)} data-oe-model="res.partner">@{html_escape(m.partner_id.name)}</a></strong>' if m.partner_id else f'<strong>@{html_escape(m.guest_id.name)}</strong>'
for m in channel_members[:30]
]
if len(members) == 0:
msg = _("You are alone in this channel.")
else:
dots = "..." if len(members) != len(channel_members) else ""
msg = _("Users in this channel: %(members)s %(dots)s and you.", members=", ".join(members), dots=dots)
self._send_transient_message(self.env.user.partner_id, msg)
| 53.215261 | 66,253 |
1,868 |
py
|
PYTHON
|
15.0
|
#!/usr/bin/env python2
# Part of Odoo. See LICENSE file for full copyright and licensing details.
#
# odoo-mailgate
#
# This program will read an email from stdin and forward it to odoo. Configure
# a pipe alias in your mail server to use it, postfix uses a syntax that looks
# like:
#
# email@address: "|/home/odoo/src/odoo-mail.py"
#
# while exim uses a syntax that looks like:
#
# *: |/home/odoo/src/odoo-mail.py
#
# Note python2 was chosen on purpose for backward compatibility with old mail
# servers.
#
import optparse
import sys
import traceback
import xmlrpclib
def main():
op = optparse.OptionParser(usage='usage: %prog [options]', version='%prog v1.2')
op.add_option("-d", "--database", dest="database", help="Odoo database name (default: %default)", default='odoo')
op.add_option("-u", "--userid", dest="userid", help="Odoo user id to connect with (default: %default)", default=1, type=int)
op.add_option("-p", "--password", dest="password", help="Odoo user password (default: %default)", default='admin')
op.add_option("--host", dest="host", help="Odoo host (default: %default)", default='localhost')
op.add_option("--port", dest="port", help="Odoo port (default: %default)", default=8069, type=int)
(o, args) = op.parse_args()
try:
msg = sys.stdin.read()
models = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/2/object' % (o.host, o.port), allow_none=True)
models.execute_kw(o.database, o.userid, o.password, 'mail.thread', 'message_process', [False, xmlrpclib.Binary(msg)], {})
except xmlrpclib.Fault as e:
# reformat xmlrpc faults to print a readable traceback
err = "xmlrpclib.Fault: %s\n%s" % (e.faultCode, e.faultString)
sys.exit(err)
except Exception as e:
traceback.print_exc(None, sys.stderr)
sys.exit(2)
if __name__ == '__main__':
main()
| 40.608696 | 1,868 |
7,723 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from werkzeug.urls import url_encode
from odoo import http
from odoo.exceptions import AccessError
from odoo.http import request
from odoo.tools import consteq
_logger = logging.getLogger(__name__)
class MailController(http.Controller):
_cp_path = '/mail'
@classmethod
def _redirect_to_messaging(cls):
url = '/web#%s' % url_encode({'action': 'mail.action_discuss'})
return request.redirect(url)
@classmethod
def _check_token(cls, token):
base_link = request.httprequest.path
params = dict(request.params)
params.pop('token', '')
valid_token = request.env['mail.thread']._notify_encode_link(base_link, params)
return consteq(valid_token, str(token))
@classmethod
def _check_token_and_record_or_redirect(cls, model, res_id, token):
comparison = cls._check_token(token)
if not comparison:
_logger.warning('Invalid token in route %s', request.httprequest.url)
return comparison, None, cls._redirect_to_messaging()
try:
record = request.env[model].browse(res_id).exists()
except Exception:
record = None
redirect = cls._redirect_to_messaging()
else:
redirect = cls._redirect_to_record(model, res_id)
return comparison, record, redirect
@classmethod
def _redirect_to_record(cls, model, res_id, access_token=None, **kwargs):
# access_token and kwargs are used in the portal controller override for the Send by email or Share Link
# to give access to the record to a recipient that has normally no access.
uid = request.session.uid
user = request.env['res.users'].sudo().browse(uid)
cids = False
# no model / res_id, meaning no possible record -> redirect to login
if not model or not res_id or model not in request.env:
return cls._redirect_to_messaging()
# find the access action using sudo to have the details about the access link
RecordModel = request.env[model]
record_sudo = RecordModel.sudo().browse(res_id).exists()
if not record_sudo:
# record does not seem to exist -> redirect to login
return cls._redirect_to_messaging()
# the record has a window redirection: check access rights
if uid is not None:
if not RecordModel.with_user(uid).check_access_rights('read', raise_exception=False):
return cls._redirect_to_messaging()
try:
# We need here to extend the "allowed_company_ids" to allow a redirection
# to any record that the user can access, regardless of currently visible
# records based on the "currently allowed companies".
cids = request.httprequest.cookies.get('cids', str(user.company_id.id))
cids = [int(cid) for cid in cids.split(',')]
try:
record_sudo.with_user(uid).with_context(allowed_company_ids=cids).check_access_rule('read')
except AccessError:
# In case the allowed_company_ids from the cookies (i.e. the last user configuration
# on his browser) is not sufficient to avoid an ir.rule access error, try to following
# heuristic:
# - Guess the supposed necessary company to access the record via the method
# _get_mail_redirect_suggested_company
# - If no company, then redirect to the messaging
# - Merge the suggested company with the companies on the cookie
# - Make a new access test if it succeeds, redirect to the record. Otherwise,
# redirect to the messaging.
suggested_company = record_sudo._get_mail_redirect_suggested_company()
if not suggested_company:
raise AccessError('')
cids = cids + [suggested_company.id]
record_sudo.with_user(uid).with_context(allowed_company_ids=cids).check_access_rule('read')
except AccessError:
return cls._redirect_to_messaging()
else:
record_action = record_sudo.get_access_action(access_uid=uid)
else:
record_action = record_sudo.get_access_action()
if record_action['type'] == 'ir.actions.act_url' and record_action.get('target_type') != 'public':
url_params = {
'model': model,
'id': res_id,
'active_id': res_id,
'action': record_action.get('id'),
}
view_id = record_sudo.get_formview_id()
if view_id:
url_params['view_id'] = view_id
url = '/web/login?redirect=#%s' % url_encode(url_params)
return request.redirect(url)
record_action.pop('target_type', None)
# the record has an URL redirection: use it directly
if record_action['type'] == 'ir.actions.act_url':
return request.redirect(record_action['url'])
# other choice: act_window (no support of anything else currently)
elif not record_action['type'] == 'ir.actions.act_window':
return cls._redirect_to_messaging()
url_params = {
'model': model,
'id': res_id,
'active_id': res_id,
'action': record_action.get('id'),
}
view_id = record_sudo.get_formview_id()
if view_id:
url_params['view_id'] = view_id
if cids:
url_params['cids'] = ','.join([str(cid) for cid in cids])
url = '/web?#%s' % url_encode(url_params)
return request.redirect(url)
@http.route('/mail/view', type='http', auth='public')
def mail_action_view(self, model=None, res_id=None, access_token=None, **kwargs):
""" Generic access point from notification emails. The heuristic to
choose where to redirect the user is the following :
- find a public URL
- if none found
- users with a read access are redirected to the document
- users without read access are redirected to the Messaging
- not logged users are redirected to the login page
models that have an access_token may apply variations on this.
"""
# ==============================================================================================
# This block of code disappeared on saas-11.3 to be reintroduced by TBE.
# This is needed because after a migration from an older version to saas-11.3, the link
# received by mail with a message_id no longer work.
# So this block of code is needed to guarantee the backward compatibility of those links.
if kwargs.get('message_id'):
try:
message = request.env['mail.message'].sudo().browse(int(kwargs['message_id'])).exists()
except:
message = request.env['mail.message']
if message:
model, res_id = message.model, message.res_id
# ==============================================================================================
if res_id and isinstance(res_id, str):
try:
res_id = int(res_id)
except ValueError:
res_id = False
return self._redirect_to_record(model, res_id, access_token, **kwargs)
| 46.245509 | 7,723 |
1,628 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
import ipaddress
from odoo import _, SUPERUSER_ID
from odoo.http import request
from odoo.addons.web.controllers import main as web
def _admin_password_warn(uid):
""" Admin still has `admin` password, flash a message via chatter.
Uses a private mail.channel from the system (/ odoobot) to the user, as
using a more generic mail.thread could send an email which is undesirable
Uses mail.channel directly because using mail.thread might send an email instead.
"""
if request.params['password'] != 'admin':
return
if ipaddress.ip_address(request.httprequest.remote_addr).is_private:
return
env = request.env(user=SUPERUSER_ID, su=True)
admin = env.ref('base.partner_admin')
if uid not in admin.user_ids.ids:
return
has_demo = bool(env['ir.module.module'].search_count([('demo', '=', True)]))
if has_demo:
return
user = request.env(user=uid)['res.users']
MailChannel = env(context=user.context_get())['mail.channel']
MailChannel.browse(MailChannel.channel_get([admin.id])['id'])\
.message_post(
body=_("Your password is the default (admin)! If this system is exposed to untrusted users it is important to change it immediately for security reasons. I will keep nagging you about it!"),
message_type='comment',
subtype_xmlid='mail.mt_comment'
)
class Home(web.Home):
def _login_redirect(self, uid, redirect=None):
if request.params.get('login_success'):
_admin_password_warn(uid)
return super()._login_redirect(uid, redirect)
| 38.761905 | 1,628 |
3,377 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import SUPERUSER_ID, tools
from odoo.http import request, route
from odoo.addons.bus.controllers.main import BusController
class MailChatController(BusController):
def _default_request_uid(self):
""" For Anonymous people, they receive the access right of SUPERUSER_ID since they have NO access (auth=none)
!!! Each time a method from this controller is call, there is a check if the user (who can be anonymous and Sudo access)
can access to the resource.
"""
return request.session.uid and request.session.uid or SUPERUSER_ID
# --------------------------
# Extends BUS Controller Poll
# --------------------------
def _poll(self, dbname, channels, last, options):
channels = list(channels) # do not alter original list
guest_sudo = request.env['mail.guest']._get_guest_from_request(request).sudo()
mail_channels = request.env['mail.channel']
if request.session.uid:
partner = request.env.user.partner_id
mail_channels = partner.channel_ids
channels.append(partner)
elif guest_sudo:
if 'bus_inactivity' in options:
guest_sudo.env['bus.presence'].update(inactivity_period=options.get('bus_inactivity'), identity_field='guest_id', identity_value=guest_sudo.id)
mail_channels = guest_sudo.channel_ids
channels.append(guest_sudo)
for mail_channel in mail_channels:
channels.append(mail_channel)
return super()._poll(dbname, channels, last, options)
# --------------------------
# Anonymous routes (Common Methods)
# --------------------------
@route('/mail/chat_post', type="json", auth="public", cors="*")
def mail_chat_post(self, uuid, message_content, **kwargs):
mail_channel = request.env["mail.channel"].sudo().search([('uuid', '=', uuid)], limit=1)
if not mail_channel:
return False
# find the author from the user session
if request.session.uid:
author = request.env['res.users'].sudo().browse(request.session.uid).partner_id
author_id = author.id
email_from = author.email_formatted
else: # If Public User, use catchall email from company
author_id = False
email_from = mail_channel.anonymous_name or mail_channel.create_uid.company_id.catchall_formatted
# post a message without adding followers to the channel. email_from=False avoid to get author from email data
body = tools.plaintext2html(message_content)
message = mail_channel.with_context(mail_create_nosubscribe=True).message_post(
author_id=author_id,
email_from=email_from,
body=body,
message_type='comment',
subtype_xmlid='mail.mt_comment'
)
return message.id if message else False
@route(['/mail/chat_history'], type="json", auth="public", cors="*")
def mail_chat_history(self, uuid, last_id=False, limit=20):
channel = request.env["mail.channel"].sudo().search([('uuid', '=', uuid)], limit=1)
if not channel:
return []
else:
return channel._channel_fetch_message(last_id, limit)
| 46.902778 | 3,377 |
35,481 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
from collections import defaultdict
from datetime import datetime, timedelta
from psycopg2 import IntegrityError
from psycopg2.errorcodes import UNIQUE_VIOLATION
from odoo import http
from odoo.exceptions import AccessError, UserError
from odoo.http import request
from odoo.tools import consteq, file_open
from odoo.tools.misc import get_lang
from odoo.tools.translate import _
from werkzeug.exceptions import NotFound
class DiscussController(http.Controller):
# --------------------------------------------------------------------------
# Public Pages
# --------------------------------------------------------------------------
@http.route([
'/chat/<string:create_token>',
'/chat/<string:create_token>/<string:channel_name>',
], methods=['GET'], type='http', auth='public')
def discuss_channel_chat_from_token(self, create_token, channel_name=None, **kwargs):
return self._response_discuss_channel_from_token(create_token=create_token, channel_name=channel_name)
@http.route([
'/meet/<string:create_token>',
'/meet/<string:create_token>/<string:channel_name>',
], methods=['GET'], type='http', auth='public')
def discuss_channel_meet_from_token(self, create_token, channel_name=None, **kwargs):
return self._response_discuss_channel_from_token(create_token=create_token, channel_name=channel_name, default_display_mode='video_full_screen')
@http.route('/chat/<int:channel_id>/<string:invitation_token>', methods=['GET'], type='http', auth='public')
def discuss_channel_invitation(self, channel_id, invitation_token, **kwargs):
channel_sudo = request.env['mail.channel'].browse(channel_id).sudo().exists()
if not channel_sudo or not channel_sudo.uuid or not consteq(channel_sudo.uuid, invitation_token):
raise NotFound()
return self._response_discuss_channel_invitation(channel_sudo=channel_sudo)
@http.route('/discuss/channel/<int:channel_id>', methods=['GET'], type='http', auth='public')
def discuss_channel(self, channel_id, **kwargs):
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(channel_id))
return self._response_discuss_public_channel_template(channel_sudo=channel_partner_sudo.channel_id)
def _response_discuss_channel_from_token(self, create_token, channel_name=None, default_display_mode=False):
if not request.env['ir.config_parameter'].sudo().get_param('mail.chat_from_token'):
raise NotFound()
channel_sudo = request.env['mail.channel'].sudo().search([('uuid', '=', create_token)])
if not channel_sudo:
try:
channel_sudo = channel_sudo.create({
'default_display_mode': default_display_mode,
'name': channel_name or create_token,
'public': 'public',
'uuid': create_token,
})
except IntegrityError as e:
if e.pgcode != UNIQUE_VIOLATION:
raise
# concurrent insert attempt: another request created the channel.
# commit the current transaction and get the channel.
request.env.cr.commit()
channel_sudo = channel_sudo.search([('uuid', '=', create_token)])
return self._response_discuss_channel_invitation(channel_sudo=channel_sudo, is_channel_token_secret=False)
def _response_discuss_channel_invitation(self, channel_sudo, is_channel_token_secret=True):
if channel_sudo.channel_type == 'chat':
raise NotFound()
discuss_public_view_data = {
'isChannelTokenSecret': is_channel_token_secret,
}
add_guest_cookie = False
channel_partner_sudo = channel_sudo.env['mail.channel.partner']._get_as_sudo_from_request(request=request, channel_id=channel_sudo.id)
if channel_partner_sudo:
channel_sudo = channel_partner_sudo.channel_id # ensure guest is in context
else:
if not channel_sudo.env.user._is_public():
try:
channel_sudo.add_members([channel_sudo.env.user.partner_id.id])
except UserError:
raise NotFound()
else:
guest = channel_sudo.env['mail.guest']._get_guest_from_request(request)
if guest:
channel_sudo = channel_sudo.with_context(guest=guest)
try:
channel_sudo.add_members(guest_ids=[guest.id])
except UserError:
raise NotFound()
else:
if channel_sudo.public == 'groups':
raise NotFound()
guest = channel_sudo.env['mail.guest'].create({
'country_id': channel_sudo.env['res.country'].search([('code', '=', request.session.get('geoip', {}).get('country_code'))], limit=1).id,
'lang': get_lang(channel_sudo.env).code,
'name': _("Guest"),
'timezone': channel_sudo.env['mail.guest']._get_timezone_from_request(request),
})
add_guest_cookie = True
discuss_public_view_data.update({
'shouldAddGuestAsMemberOnJoin': True,
'shouldDisplayWelcomeViewInitially': True,
})
channel_sudo = channel_sudo.with_context(guest=guest)
response = self._response_discuss_public_channel_template(channel_sudo=channel_sudo, discuss_public_view_data=discuss_public_view_data)
if add_guest_cookie:
# Discuss Guest ID: every route in this file will make use of it to authenticate
# the guest through `_get_as_sudo_from_request` or `_get_as_sudo_from_request_or_raise`.
expiration_date = datetime.now() + timedelta(days=365)
response.set_cookie(guest._cookie_name, f"{guest.id}{guest._cookie_separator}{guest.access_token}", httponly=True, expires=expiration_date)
return response
def _response_discuss_public_channel_template(self, channel_sudo, discuss_public_view_data=None):
discuss_public_view_data = discuss_public_view_data or {}
return request.render('mail.discuss_public_channel_template', {
'data': {
'channelData': channel_sudo.channel_info()[0],
'discussPublicViewData': dict({
'channel': [('insert', {'id': channel_sudo.id, 'model': 'mail.channel'})],
'shouldDisplayWelcomeViewInitially': channel_sudo.default_display_mode == 'video_full_screen',
}, **discuss_public_view_data),
},
'session_info': channel_sudo.env['ir.http'].session_info(),
})
# --------------------------------------------------------------------------
# Semi-Static Content (GET requests with possible cache)
# --------------------------------------------------------------------------
@http.route('/mail/channel/<int:channel_id>/partner/<int:partner_id>/avatar_128', methods=['GET'], type='http', auth='public')
def mail_channel_partner_avatar_128(self, channel_id, partner_id, **kwargs):
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request(request=request, channel_id=channel_id)
if not channel_partner_sudo or not channel_partner_sudo.env['mail.channel.partner'].search([('channel_id', '=', channel_id), ('partner_id', '=', partner_id)], limit=1):
if request.env.user.share:
placeholder = channel_partner_sudo.env['res.partner'].browse(partner_id).exists()._avatar_get_placeholder()
return channel_partner_sudo.env['ir.http']._placeholder_image_get_response(placeholder)
return channel_partner_sudo.sudo(False).env['ir.http']._content_image(model='res.partner', res_id=partner_id, field='avatar_128')
return channel_partner_sudo.env['ir.http']._content_image(model='res.partner', res_id=partner_id, field='avatar_128')
@http.route('/mail/channel/<int:channel_id>/guest/<int:guest_id>/avatar_128', methods=['GET'], type='http', auth='public')
def mail_channel_guest_avatar_128(self, channel_id, guest_id, **kwargs):
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request(request=request, channel_id=channel_id)
if not channel_partner_sudo or not channel_partner_sudo.env['mail.channel.partner'].search([('channel_id', '=', channel_id), ('guest_id', '=', guest_id)], limit=1):
if request.env.user.share:
placeholder = channel_partner_sudo.env['mail.guest'].browse(guest_id).exists()._avatar_get_placeholder()
return channel_partner_sudo.env['ir.http']._placeholder_image_get_response(placeholder)
return channel_partner_sudo.sudo(False).env['ir.http']._content_image(model='mail.guest', res_id=guest_id, field='avatar_128')
return channel_partner_sudo.env['ir.http']._content_image(model='mail.guest', res_id=guest_id, field='avatar_128')
@http.route('/mail/channel/<int:channel_id>/attachment/<int:attachment_id>', methods=['GET'], type='http', auth='public')
def mail_channel_attachment(self, channel_id, attachment_id, download=None, **kwargs):
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(channel_id))
if not channel_partner_sudo.env['ir.attachment'].search([('id', '=', int(attachment_id)), ('res_id', '=', int(channel_id)), ('res_model', '=', 'mail.channel')], limit=1):
raise NotFound()
return channel_partner_sudo.env['ir.http']._get_content_common(res_id=int(attachment_id), download=download)
@http.route([
'/mail/channel/<int:channel_id>/image/<int:attachment_id>',
'/mail/channel/<int:channel_id>/image/<int:attachment_id>/<int:width>x<int:height>',
], methods=['GET'], type='http', auth='public')
def fetch_image(self, channel_id, attachment_id, width=0, height=0, **kwargs):
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(channel_id))
if not channel_partner_sudo.env['ir.attachment'].search([('id', '=', int(attachment_id)), ('res_id', '=', int(channel_id)), ('res_model', '=', 'mail.channel')], limit=1):
raise NotFound()
return channel_partner_sudo.env['ir.http']._content_image(res_id=int(attachment_id), height=int(height), width=int(width))
# --------------------------------------------------------------------------
# Client Initialization
# --------------------------------------------------------------------------
@http.route('/mail/init_messaging', methods=['POST'], type='json', auth='public')
def mail_init_messaging(self, **kwargs):
if not request.env.user.sudo()._is_public():
return request.env.user.sudo(request.env.user.has_group('base.group_portal'))._init_messaging()
guest = request.env['mail.guest']._get_guest_from_request(request)
if guest:
return guest.sudo()._init_messaging()
raise NotFound()
@http.route('/mail/load_message_failures', methods=['POST'], type='json', auth='user')
def mail_load_message_failures(self, **kwargs):
return request.env.user.partner_id._message_fetch_failed()
# --------------------------------------------------------------------------
# Mailbox
# --------------------------------------------------------------------------
@http.route('/mail/inbox/messages', methods=['POST'], type='json', auth='user')
def discuss_inbox_messages(self, max_id=None, min_id=None, limit=30, **kwargs):
return request.env['mail.message']._message_fetch(domain=[('needaction', '=', True)], max_id=max_id, min_id=min_id, limit=limit)
@http.route('/mail/history/messages', methods=['POST'], type='json', auth='user')
def discuss_history_messages(self, max_id=None, min_id=None, limit=30, **kwargs):
return request.env['mail.message']._message_fetch(domain=[('needaction', '=', False)], max_id=max_id, min_id=min_id, limit=limit)
@http.route('/mail/starred/messages', methods=['POST'], type='json', auth='user')
def discuss_starred_messages(self, max_id=None, min_id=None, limit=30, **kwargs):
return request.env['mail.message']._message_fetch(domain=[('starred_partner_ids', 'in', [request.env.user.partner_id.id])], max_id=max_id, min_id=min_id, limit=limit)
# --------------------------------------------------------------------------
# Thread API (channel/chatter common)
# --------------------------------------------------------------------------
def _get_allowed_message_post_params(self):
return {'attachment_ids', 'body', 'message_type', 'partner_ids', 'subtype_xmlid', 'parent_id'}
@http.route('/mail/message/post', methods=['POST'], type='json', auth='public')
def mail_message_post(self, thread_model, thread_id, post_data, **kwargs):
guest = request.env['mail.guest']._get_guest_from_request(request)
guest.env['ir.attachment'].browse(post_data.get('attachment_ids', []))._check_attachments_access(post_data.get('attachment_tokens'))
if thread_model == 'mail.channel':
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(thread_id))
thread = channel_partner_sudo.channel_id
else:
thread = request.env[thread_model].browse(int(thread_id)).exists()
return thread.message_post(**{key: value for key, value in post_data.items() if key in self._get_allowed_message_post_params()}).message_format()[0]
@http.route('/mail/message/update_content', methods=['POST'], type='json', auth='public')
def mail_message_update_content(self, message_id, body, attachment_ids, attachment_tokens=None, **kwargs):
guest = request.env['mail.guest']._get_guest_from_request(request)
guest.env['ir.attachment'].browse(attachment_ids)._check_attachments_access(attachment_tokens)
message_sudo = guest.env['mail.message'].browse(message_id).sudo().exists()
if not message_sudo.is_current_user_or_guest_author and not guest.env.user._is_admin():
raise NotFound()
message_sudo._update_content(body=body, attachment_ids=attachment_ids)
return {
'id': message_sudo.id,
'body': message_sudo.body,
'attachments': [('insert-and-replace', message_sudo.attachment_ids.sorted()._attachment_format(commands=True))],
}
@http.route('/mail/attachment/upload', methods=['POST'], type='http', auth='public')
def mail_attachment_upload(self, ufile, thread_id, thread_model, is_pending=False, **kwargs):
channel_partner = request.env['mail.channel.partner']
if thread_model == 'mail.channel':
channel_partner = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(thread_id))
vals = {
'name': ufile.filename,
'raw': ufile.read(),
'res_id': int(thread_id),
'res_model': thread_model,
}
if is_pending and is_pending != 'false':
# Add this point, the message related to the uploaded file does
# not exist yet, so we use those placeholder values instead.
vals.update({
'res_id': 0,
'res_model': 'mail.compose.message',
})
if channel_partner.env.user.share:
# Only generate the access token if absolutely necessary (= not for internal user).
vals['access_token'] = channel_partner.env['ir.attachment']._generate_access_token()
try:
attachment = channel_partner.env['ir.attachment'].create(vals)
attachment._post_add_create()
attachmentData = {
'filename': ufile.filename,
'id': attachment.id,
'mimetype': attachment.mimetype,
'name': attachment.name,
'size': attachment.file_size
}
if attachment.access_token:
attachmentData['accessToken'] = attachment.access_token
except AccessError:
attachmentData = {'error': _("You are not allowed to upload an attachment here.")}
return request.make_response(
data=json.dumps(attachmentData),
headers=[('Content-Type', 'application/json')]
)
@http.route('/mail/attachment/delete', methods=['POST'], type='json', auth='public')
def mail_attachment_delete(self, attachment_id, access_token=None, **kwargs):
attachment_sudo = request.env['ir.attachment'].browse(int(attachment_id)).sudo().exists()
if not attachment_sudo:
target = request.env.user.partner_id
request.env['bus.bus']._sendone(target, 'ir.attachment/delete', {'id': attachment_id})
return
if not request.env.user.share:
# Check through standard access rights/rules for internal users.
attachment_sudo.sudo(False)._delete_and_notify()
return
# For non-internal users 2 cases are supported:
# - Either the attachment is linked to a message: verify the request is made by the author of the message (portal user or guest).
# - Either a valid access token is given: also verify the message is pending (because unfortunately in portal a token is also provided to guest for viewing others' attachments).
guest = request.env['mail.guest']._get_guest_from_request(request)
message_sudo = guest.env['mail.message'].sudo().search([('attachment_ids', 'in', attachment_sudo.ids)], limit=1)
if message_sudo:
if not message_sudo.is_current_user_or_guest_author:
raise NotFound()
else:
if not access_token or not attachment_sudo.access_token or not consteq(access_token, attachment_sudo.access_token):
raise NotFound()
if attachment_sudo.res_model != 'mail.compose.message' or attachment_sudo.res_id != 0:
raise NotFound()
attachment_sudo._delete_and_notify()
@http.route('/mail/message/add_reaction', methods=['POST'], type='json', auth='public')
def mail_message_add_reaction(self, message_id, content):
guest_sudo = request.env['mail.guest']._get_guest_from_request(request).sudo()
message_sudo = guest_sudo.env['mail.message'].browse(int(message_id)).exists()
if not message_sudo:
raise NotFound()
if request.env.user.sudo()._is_public():
if not guest_sudo or not message_sudo.model == 'mail.channel' or message_sudo.res_id not in guest_sudo.channel_ids.ids:
raise NotFound()
message_sudo._message_add_reaction(content=content)
guests = [('insert', {'id': guest_sudo.id})]
partners = []
else:
message_sudo.sudo(False)._message_add_reaction(content=content)
guests = []
partners = [('insert', {'id': request.env.user.partner_id.id})]
reactions = message_sudo.env['mail.message.reaction'].search([('message_id', '=', message_sudo.id), ('content', '=', content)])
return {
'id': message_sudo.id,
'messageReactionGroups': [('insert' if len(reactions) > 0 else 'insert-and-unlink', {
'messageId': message_sudo.id,
'content': content,
'count': len(reactions),
'guests': guests,
'partners': partners,
})],
}
@http.route('/mail/message/remove_reaction', methods=['POST'], type='json', auth='public')
def mail_message_remove_reaction(self, message_id, content):
guest_sudo = request.env['mail.guest']._get_guest_from_request(request).sudo()
message_sudo = guest_sudo.env['mail.message'].browse(int(message_id)).exists()
if not message_sudo:
raise NotFound()
if request.env.user.sudo()._is_public():
if not guest_sudo or not message_sudo.model == 'mail.channel' or message_sudo.res_id not in guest_sudo.channel_ids.ids:
raise NotFound()
message_sudo._message_remove_reaction(content=content)
guests = [('insert-and-unlink', {'id': guest_sudo.id})]
partners = []
else:
message_sudo.sudo(False)._message_remove_reaction(content=content)
guests = []
partners = [('insert-and-unlink', {'id': request.env.user.partner_id.id})]
reactions = message_sudo.env['mail.message.reaction'].search([('message_id', '=', message_sudo.id), ('content', '=', content)])
return {
'id': message_sudo.id,
'messageReactionGroups': [('insert' if len(reactions) > 0 else 'insert-and-unlink', {
'messageId': message_sudo.id,
'content': content,
'count': len(reactions),
'guests': guests,
'partners': partners,
})],
}
# --------------------------------------------------------------------------
# Channel API
# --------------------------------------------------------------------------
@http.route('/mail/channel/add_guest_as_member', methods=['POST'], type='json', auth='public')
def mail_channel_add_guest_as_member(self, channel_id, channel_uuid, **kwargs):
channel_sudo = request.env['mail.channel'].browse(int(channel_id)).sudo().exists()
if not channel_sudo or not channel_sudo.uuid or not consteq(channel_sudo.uuid, channel_uuid):
raise NotFound()
if channel_sudo.channel_type == 'chat':
raise NotFound()
guest = channel_sudo.env['mail.guest']._get_guest_from_request(request)
# Only guests should take this route.
if not guest:
raise NotFound()
channel_partner = channel_sudo.env['mail.channel.partner']._get_as_sudo_from_request(request=request, channel_id=channel_id)
# Do not add the guest to channel members if they are already member.
if not channel_partner:
channel_sudo = channel_sudo.with_context(guest=guest)
try:
channel_sudo.add_members(guest_ids=[guest.id])
except UserError:
raise NotFound()
@http.route('/mail/channel/messages', methods=['POST'], type='json', auth='public')
def mail_channel_messages(self, channel_id, max_id=None, min_id=None, limit=30, **kwargs):
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(channel_id))
return channel_partner_sudo.env['mail.message']._message_fetch(domain=[
('res_id', '=', channel_id),
('model', '=', 'mail.channel'),
('message_type', '!=', 'user_notification'),
], max_id=max_id, min_id=min_id, limit=limit)
@http.route('/mail/channel/set_last_seen_message', methods=['POST'], type='json', auth='public')
def mail_channel_mark_as_seen(self, channel_id, last_message_id, **kwargs):
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(channel_id))
return channel_partner_sudo.channel_id._channel_seen(int(last_message_id))
@http.route('/mail/channel/ping', methods=['POST'], type='json', auth='public')
def channel_ping(self, channel_id, rtc_session_id=None, check_rtc_session_ids=None):
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(channel_id))
if rtc_session_id:
channel_partner_sudo.channel_id.rtc_session_ids.filtered_domain([
('id', '=', int(rtc_session_id)),
('channel_partner_id', '=', channel_partner_sudo.id),
]).write({}) # update write_date
current_rtc_sessions, outdated_rtc_sessions = channel_partner_sudo._rtc_sync_sessions(check_rtc_session_ids=check_rtc_session_ids)
return {'rtcSessions': [
('insert', [rtc_session_sudo._mail_rtc_session_format(complete_info=False) for rtc_session_sudo in current_rtc_sessions]),
('insert-and-unlink', [{'id': missing_rtc_session_sudo.id} for missing_rtc_session_sudo in outdated_rtc_sessions]),
]}
# --------------------------------------------------------------------------
# Chatter API
# --------------------------------------------------------------------------
@http.route('/mail/thread/data', methods=['POST'], type='json', auth='user')
def mail_thread_data(self, thread_model, thread_id, request_list, **kwargs):
res = {}
thread = request.env[thread_model].with_context(active_test=False).search([('id', '=', thread_id)])
if 'attachments' in request_list:
res['attachments'] = thread.env['ir.attachment'].search([('res_id', '=', thread.id), ('res_model', '=', thread._name)], order='id desc')._attachment_format(commands=True)
return res
@http.route('/mail/thread/messages', methods=['POST'], type='json', auth='user')
def mail_thread_messages(self, thread_model, thread_id, max_id=None, min_id=None, limit=30, **kwargs):
return request.env['mail.message']._message_fetch(domain=[
('res_id', '=', int(thread_id)),
('model', '=', thread_model),
('message_type', '!=', 'user_notification'),
], max_id=max_id, min_id=min_id, limit=limit)
@http.route('/mail/read_followers', methods=['POST'], type='json', auth='user')
def read_followers(self, res_model, res_id):
request.env['mail.followers'].check_access_rights("read")
request.env[res_model].check_access_rights("read")
request.env[res_model].browse(res_id).check_access_rule("read")
follower_recs = request.env['mail.followers'].search([('res_model', '=', res_model), ('res_id', '=', res_id)])
followers = []
follower_id = None
for follower in follower_recs:
if follower.partner_id == request.env.user.partner_id:
follower_id = follower.id
followers.append({
'id': follower.id,
'partner_id': follower.partner_id.id,
'name': follower.name,
'display_name': follower.display_name,
'email': follower.email,
'is_active': follower.is_active,
# When editing the followers, the "pencil" icon that leads to the edition of subtypes
# should be always be displayed and not only when "debug" mode is activated.
'is_editable': True,
'partner': follower.partner_id.mail_partner_format()[follower.partner_id],
})
return {
'followers': followers,
'subtypes': self.read_subscription_data(follower_id) if follower_id else None
}
@http.route('/mail/read_subscription_data', methods=['POST'], type='json', auth='user')
def read_subscription_data(self, follower_id):
""" Computes:
- message_subtype_data: data about document subtypes: which are
available, which are followed if any """
request.env['mail.followers'].check_access_rights("read")
follower = request.env['mail.followers'].sudo().browse(follower_id)
follower.ensure_one()
request.env[follower.res_model].check_access_rights("read")
record = request.env[follower.res_model].browse(follower.res_id)
record.check_access_rule("read")
# find current model subtypes, add them to a dictionary
subtypes = record._mail_get_message_subtypes()
followed_subtypes_ids = set(follower.subtype_ids.ids)
subtypes_list = [{
'name': subtype.name,
'res_model': subtype.res_model,
'sequence': subtype.sequence,
'default': subtype.default,
'internal': subtype.internal,
'followed': subtype.id in followed_subtypes_ids,
'parent_model': subtype.parent_id.res_model,
'id': subtype.id
} for subtype in subtypes]
return sorted(subtypes_list,
key=lambda it: (it['parent_model'] or '', it['res_model'] or '', it['internal'], it['sequence']))
@http.route('/mail/get_suggested_recipients', methods=['POST'], type='json', auth='user')
def message_get_suggested_recipients(self, model, res_ids):
records = request.env[model].browse(res_ids)
try:
records.check_access_rule('read')
records.check_access_rights('read')
except Exception:
return {}
return records._message_get_suggested_recipients()
# --------------------------------------------------------------------------
# RTC API TODO move check logic in routes.
# --------------------------------------------------------------------------
@http.route('/mail/rtc/session/notify_call_members', methods=['POST'], type="json", auth="public")
def session_call_notify(self, peer_notifications):
""" Sends content to other session of the same channel, only works if the user is the user of that session.
This is used to send peer to peer information between sessions.
:param peer_notifications: list of tuple with the following elements:
- int sender_session_id: id of the session from which the content is sent
- list target_session_ids: list of the ids of the sessions that should receive the content
- string content: the content to send to the other sessions
"""
guest = request.env['mail.guest']._get_guest_from_request(request)
notifications_by_session = defaultdict(list)
for sender_session_id, target_session_ids, content in peer_notifications:
session_sudo = guest.env['mail.channel.rtc.session'].sudo().browse(int(sender_session_id)).exists()
if not session_sudo or (session_sudo.guest_id and session_sudo.guest_id != guest) or (session_sudo.partner_id and session_sudo.partner_id != request.env.user.partner_id):
continue
notifications_by_session[session_sudo].append(([int(sid) for sid in target_session_ids], content))
for session_sudo, notifications in notifications_by_session.items():
session_sudo._notify_peers(notifications)
@http.route('/mail/rtc/session/update_and_broadcast', methods=['POST'], type="json", auth="public")
def session_update_and_broadcast(self, session_id, values):
""" Update a RTC session and broadcasts the changes to the members of its channel,
only works of the user is the user of that session.
:param int session_id: id of the session to update
:param dict values: write dict for the fields to update
"""
if request.env.user._is_public():
guest = request.env['mail.guest']._get_guest_from_request(request)
if guest:
session = guest.env['mail.channel.rtc.session'].sudo().browse(int(session_id)).exists()
if session and session.guest_id == guest:
session._update_and_broadcast(values)
return
return
session = request.env['mail.channel.rtc.session'].sudo().browse(int(session_id)).exists()
if session and session.partner_id == request.env.user.partner_id:
session._update_and_broadcast(values)
@http.route('/mail/rtc/channel/join_call', methods=['POST'], type="json", auth="public")
def channel_call_join(self, channel_id, check_rtc_session_ids=None):
""" Joins the RTC call of a channel if the user is a member of that channel
:param int channel_id: id of the channel to join
"""
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(channel_id))
return channel_partner_sudo._rtc_join_call(check_rtc_session_ids=check_rtc_session_ids)
@http.route('/mail/rtc/channel/leave_call', methods=['POST'], type="json", auth="public")
def channel_call_leave(self, channel_id):
""" Disconnects the current user from a rtc call and clears any invitation sent to that user on this channel
:param int channel_id: id of the channel from which to disconnect
"""
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(channel_id))
return channel_partner_sudo._rtc_leave_call()
@http.route('/mail/rtc/channel/cancel_call_invitation', methods=['POST'], type="json", auth="public")
def channel_call_cancel_invitation(self, channel_id, partner_ids=None, guest_ids=None):
""" Sends invitations to join the RTC call to all connected members of the thread who are not already invited.
:param list partner_ids: list of the partner ids to invite
:param list guest_ids: list of the guest ids to invite
if either partner_ids or guest_ids is set, only the specified ids will be invited.
"""
channel_partner_sudo = request.env['mail.channel.partner']._get_as_sudo_from_request_or_raise(request=request, channel_id=int(channel_id))
return channel_partner_sudo.channel_id._rtc_cancel_invitations(partner_ids=partner_ids, guest_ids=guest_ids)
@http.route('/mail/rtc/audio_worklet_processor', methods=['GET'], type='http', auth='public')
def audio_worklet_processor(self):
""" Returns a JS file that declares a WorkletProcessor class in
a WorkletGlobalScope, which means that it cannot be added to the
bundles like other assets.
"""
return request.make_response(
file_open('mail/static/src/worklets/audio_processor.js', 'rb').read(),
headers=[
('Content-Type', 'application/javascript'),
('Cache-Control', 'max-age=%s' % http.STATIC_CACHE),
]
)
# --------------------------------------------------------------------------
# Guest API
# --------------------------------------------------------------------------
@http.route('/mail/guest/update_name', methods=['POST'], type='json', auth='public')
def mail_guest_update_name(self, guest_id, name):
guest = request.env['mail.guest']._get_guest_from_request(request)
guest_to_rename_sudo = guest.env['mail.guest'].browse(guest_id).sudo().exists()
if not guest_to_rename_sudo:
raise NotFound()
if guest_to_rename_sudo != guest and not request.env.user._is_admin():
raise NotFound()
guest_to_rename_sudo._update_name(name)
| 59.531879 | 35,481 |
1,650 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Sales and Warehouse Management',
'version': '1.0',
'category': 'Hidden',
'summary': 'Quotation, Sales Orders, Delivery & Invoicing Control',
'description': """
Manage sales quotations and orders
==================================
This module makes the link between the sales and warehouses management applications.
Preferences
-----------
* Shipping: Choice of delivery at once or partial delivery
* Invoicing: choose how invoices will be paid
* Incoterms: International Commercial terms
""",
'depends': ['sale', 'stock_account'],
'data': [
'security/sale_stock_security.xml',
'security/ir.model.access.csv',
'views/sale_order_views.xml',
'views/stock_views.xml',
'views/res_config_settings_views.xml',
'views/sale_stock_portal_template.xml',
'views/stock_production_lot_views.xml',
'views/res_users_views.xml',
'report/report_stock_forecasted.xml',
'report/sale_order_report_templates.xml',
'report/stock_report_deliveryslip.xml',
'data/mail_templates.xml',
'data/sale_stock_data.xml',
'wizard/stock_rules_report_views.xml',
'wizard/sale_order_cancel_views.xml',
],
'demo': ['data/sale_order_demo.xml'],
'installable': True,
'auto_install': True,
'assets': {
'web.assets_backend': [
'sale_stock/static/src/js/**/*',
],
'web.assets_qweb': [
'sale_stock/static/src/xml/**/*',
],
},
'license': 'LGPL-3',
}
| 31.730769 | 1,650 |
1,871 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo.addons.sale_stock.tests.test_anglo_saxon_valuation_reconciliation import TestValuationReconciliation
from odoo.tests import tagged
@tagged('post_install', '-at_install')
class TestAngloSaxonAccounting(TestValuationReconciliation):
def test_cogs_should_use_price_from_the_right_company(self):
"""
Reproduce the flow of creating an invoice from a sale order with company A
and posting the invoice with both companies selected and company B as the main.
"""
company_a_data = self.company_data
company_b_data = self.company_data_2
companies_with_b_first = company_b_data['company'] + company_a_data['company']
product = self.test_product_delivery
# set different cost price for the same product in the 2 companies
company_a_standard_price = 20.0
product.with_company(company_a_data['company']).standard_price = company_a_standard_price
company_b_standard_price = 10.0
product.with_company(company_b_data['company']).standard_price = company_b_standard_price
# create sale order with company A in draft (by default, self.env.user.company_id is company A)
company_a_order = self._create_sale(product, '2021-01-01')
company_a_invoice = self._create_invoice_for_so(company_a_order, product, '2021-01-10')
# Post the invoice from company A with company B
company_a_invoice.with_context(allowed_company_ids=companies_with_b_first.ids).action_post()
# check cost used for anglo_saxon_line is from company A
anglo_saxon_lines = company_a_invoice.line_ids.filtered('is_anglo_saxon_line')
self.assertRecordValues(anglo_saxon_lines, [
{'debit': 0.0, 'credit': company_a_standard_price},
{'debit': company_a_standard_price, 'credit': 0.0},
])
| 51.972222 | 1,871 |
6,545 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo import fields, Command
from odoo.addons.account.tests.common import AccountTestInvoicingCommon
from odoo.tests import tagged, Form
from odoo.exceptions import UserError
@tagged('post_install', '-at_install')
class TestAccruedStockSaleOrders(AccountTestInvoicingCommon):
@classmethod
def setUpClass(cls, chart_template_ref=None):
super().setUpClass(chart_template_ref=chart_template_ref)
uom_unit = cls.env.ref('uom.product_uom_unit')
cls.product_order = cls.env['product.product'].create({
'name': "Product",
'list_price': 30.0,
'type': 'consu',
'uom_id': uom_unit.id,
'uom_po_id': uom_unit.id,
'invoice_policy': 'delivery',
})
cls.sale_order = cls.env['sale.order'].with_context(tracking_disable=True).create({
'partner_id': cls.partner_a.id,
'order_line': [
Command.create({
'name': cls.product_order.name,
'product_id': cls.product_order.id,
'product_uom_qty': 10.0,
'product_uom': cls.product_order.uom_id.id,
'price_unit': cls.product_order.list_price,
'tax_id': False,
})
]
})
cls.sale_order.action_confirm()
cls.account_expense = cls.company_data['default_account_expense']
cls.account_revenue = cls.company_data['default_account_revenue']
def test_sale_stock_accruals(self):
# deliver 2 on 2020-01-02
pick = self.sale_order.picking_ids
pick.move_lines.write({'quantity_done': 2})
pick.button_validate()
wiz_act = pick.button_validate()
wiz = Form(self.env[wiz_act['res_model']].with_context(wiz_act['context'])).save()
wiz.process()
pick.move_lines.write({'date': fields.Date.to_date('2020-01-02')})
# deliver 3 on 2020-01-06
pick = pick.copy()
pick.move_lines.write({'quantity_done': 3})
wiz_act = pick.button_validate()
pick.move_lines.write({'date': fields.Date.to_date('2020-01-06')})
wizard = self.env['account.accrued.orders.wizard'].with_context({
'active_model': 'sale.order',
'active_ids': self.sale_order.ids,
}).create({
'account_id': self.account_expense.id,
'date': '2020-01-01',
})
# nothing to invoice on 2020-01-01
with self.assertRaises(UserError):
wizard.create_entries()
# 2 to invoice on 2020-01-04
wizard.date = fields.Date.to_date('2020-01-04')
self.assertRecordValues(self.env['account.move'].search(wizard.create_entries()['domain']).line_ids, [
# reverse move lines
{'account_id': self.account_revenue.id, 'debit': 60, 'credit': 0},
{'account_id': wizard.account_id.id, 'debit': 0, 'credit': 60},
# move lines
{'account_id': self.account_revenue.id, 'debit': 0, 'credit': 60},
{'account_id': wizard.account_id.id, 'debit': 60, 'credit': 0},
])
# 5 to invoice on 2020-01-07
wizard.date = fields.Date.to_date('2020-01-07')
self.assertRecordValues(self.env['account.move'].search(wizard.create_entries()['domain']).line_ids, [
# reverse move lines
{'account_id': self.account_revenue.id, 'debit': 150, 'credit': 0},
{'account_id': wizard.account_id.id, 'debit': 0, 'credit': 150},
# move lines
{'account_id': self.account_revenue.id, 'debit': 0, 'credit': 150},
{'account_id': wizard.account_id.id, 'debit': 150, 'credit': 0},
])
def test_sale_stock_invoiced_accrued_entries(self):
# deliver 2 on 2020-01-02
pick = self.sale_order.picking_ids
pick.move_lines.write({'quantity_done': 2})
pick.button_validate()
wiz_act = pick.button_validate()
wiz = Form(self.env[wiz_act['res_model']].with_context(wiz_act['context'])).save()
wiz.process()
pick.move_lines.write({'date': fields.Date.to_date('2020-01-02')})
# invoice on 2020-01-04
inv = self.sale_order._create_invoices()
inv.invoice_date = fields.Date.to_date('2020-01-04')
inv.action_post()
# deliver 3 on 2020-01-06
pick = pick.copy()
pick.move_lines.write({'quantity_done': 3})
wiz_act = pick.button_validate()
pick.move_lines.write({'date': fields.Date.to_date('2020-01-06')})
# invoice on 2020-01-08
inv = self.sale_order._create_invoices()
inv.invoice_date = fields.Date.to_date('2020-01-08')
inv.action_post()
wizard = self.env['account.accrued.orders.wizard'].with_context({
'active_model': 'sale.order',
'active_ids': self.sale_order.ids,
}).create({
'account_id': self.company_data['default_account_expense'].id,
'date': '2020-01-02',
})
# 2 to invoice on 2020-01-07
self.assertRecordValues(self.env['account.move'].search(wizard.create_entries()['domain']).line_ids, [
# reverse move lines
{'account_id': self.account_revenue.id, 'debit': 60, 'credit': 0},
{'account_id': wizard.account_id.id, 'debit': 0, 'credit': 60},
# move lines
{'account_id': self.account_revenue.id, 'debit': 0, 'credit': 60},
{'account_id': wizard.account_id.id, 'debit': 60, 'credit': 0},
])
# nothing to invoice on 2020-01-05
wizard.date = fields.Date.to_date('2020-01-05')
with self.assertRaises(UserError):
wizard.create_entries()
# 3 to invoice on 2020-01-07
wizard.date = fields.Date.to_date('2020-01-07')
self.assertRecordValues(self.env['account.move'].search(wizard.create_entries()['domain']).line_ids, [
# reverse move lines
{'account_id': self.account_revenue.id, 'debit': 90, 'credit': 0},
{'account_id': wizard.account_id.id, 'debit': 0, 'credit': 90},
# move lines
{'account_id': self.account_revenue.id, 'debit': 0, 'credit': 90},
{'account_id': wizard.account_id.id, 'debit': 90, 'credit': 0},
])
# nothing to invoice on 2020-01-09
wizard.date = fields.Date.to_date('2020-01-09')
with self.assertRaises(UserError):
wizard.create_entries()
| 43.344371 | 6,545 |
2,084 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests import HttpCase, tagged, Form
from odoo.addons.sale.tests.common import TestSaleCommon
from odoo.addons.mail.tests.common import mail_new_test_user
@tagged('post_install', '-at_install')
class TestControllersAccessRights(HttpCase, TestSaleCommon):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.portal_user = mail_new_test_user(cls.env, login='jimmy-portal', groups='base.group_portal')
def test_SO_and_DO_portal_acess(self):
""" Ensure that it is possible to open both SO and DO, either using the access token
or being connected as portal user"""
so_form = Form(self.env['sale.order'])
so_form.partner_id = self.portal_user.partner_id
with so_form.order_line.new() as line:
line.product_id = self.product_a
so = so_form.save()
so.action_confirm()
picking = so.picking_ids
# Try to open SO/DO using the access token or being connected as portal user
for login in (None, self.portal_user.login):
so_url = '/my/orders/%s' % so.id
picking_url = '/my/picking/pdf/%s' % picking.id
self.authenticate(login, login)
if not login:
so._portal_ensure_token()
so_token = so.access_token
so_url = '%s?access_token=%s' % (so_url, so_token)
picking_url = '%s?access_token=%s' % (picking_url, so_token)
response = self.url_open(
url=so_url,
allow_redirects=False,
)
self.assertEqual(response.status_code, 200, 'Should be correct %s' % ('with a connected user' if login else 'using access token'))
response = self.url_open(
url=picking_url,
allow_redirects=False,
)
self.assertEqual(response.status_code, 200, 'Should be correct %s' % ('with a connected user' if login else 'using access token'))
| 41.68 | 2,084 |
75,965 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests import Form, tagged
from odoo.addons.stock_account.tests.test_anglo_saxon_valuation_reconciliation_common import ValuationReconciliationTestCommon
from odoo.exceptions import UserError
@tagged('post_install', '-at_install')
class TestAngloSaxonValuation(ValuationReconciliationTestCommon):
@classmethod
def setUpClass(cls, chart_template_ref=None):
super().setUpClass(chart_template_ref=chart_template_ref)
cls.env.user.company_id.anglo_saxon_accounting = True
cls.product = cls.env['product.product'].create({
'name': 'product',
'type': 'product',
'categ_id': cls.stock_account_product_categ.id,
})
def _inv_adj_two_units(self):
self.env['stock.quant'].with_context(inventory_mode=True).create({
'product_id': self.product.id, # tracking serial
'inventory_quantity': 2,
'location_id': self.company_data['default_warehouse'].lot_stock_id.id,
}).action_apply_inventory()
def _so_and_confirm_two_units(self):
sale_order = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 2.0,
'product_uom': self.product.uom_id.id,
'price_unit': 12,
'tax_id': False, # no love taxes amls
})],
})
sale_order.action_confirm()
return sale_order
def _fifo_in_one_eight_one_ten(self):
# Put two items in stock.
in_move_1 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 1,
'price_unit': 8,
})
in_move_1._action_confirm()
in_move_1.quantity_done = 1
in_move_1._action_done()
in_move_2 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 1,
'price_unit': 10,
})
in_move_2._action_confirm()
in_move_2.quantity_done = 1
in_move_2._action_done()
# -------------------------------------------------------------------------
# Standard Ordered
# -------------------------------------------------------------------------
def test_standard_ordered_invoice_pre_delivery(self):
"""Standard price set to 10. Get 2 units in stock. Sale order 2@12. Standard price set
to 14. Invoice 2 without delivering. The amount in Stock OUT and COGS should be 14*2.
"""
self.product.categ_id.property_cost_method = 'standard'
self.product.invoice_policy = 'order'
self.product.standard_price = 10.0
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# standard price to 14
self.product.standard_price = 14.0
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 28)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 28)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
def test_standard_ordered_invoice_post_partial_delivery_1(self):
"""Standard price set to 10. Get 2 units in stock. Sale order 2@12. Deliver 1, invoice 1,
change the standard price to 14, deliver one, change the standard price to 16, invoice 1.
The amounts used in Stock OUT and COGS should be 10 then 14."""
self.product.categ_id.property_cost_method = 'standard'
self.product.invoice_policy = 'order'
self.product.standard_price = 10.0
# Put two items in stock.
sale_order = self._so_and_confirm_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 1
wiz = sale_order.picking_ids.button_validate()
wiz = Form(self.env[wiz['res_model']].with_context(wiz['context'])).save()
wiz.process()
# Invoice 1
invoice = sale_order._create_invoices()
invoice_form = Form(invoice)
with invoice_form.invoice_line_ids.edit(0) as invoice_line:
invoice_line.quantity = 1
invoice_form.save()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 10)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 10)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 12)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 12)
# change the standard price to 14
self.product.standard_price = 14.0
# deliver the backorder
sale_order.picking_ids[0].move_lines.quantity_done = 1
sale_order.picking_ids[0].button_validate()
# change the standard price to 16
self.product.standard_price = 16.0
# invoice 1
invoice2 = sale_order._create_invoices()
invoice2.action_post()
amls = invoice2.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 14)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 14)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 12)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 12)
def test_standard_ordered_invoice_post_delivery(self):
"""Standard price set to 10. Get 2 units in stock. Sale order 2@12. Deliver 1, change the
standard price to 14, deliver one, invoice 2. The amounts used in Stock OUT and COGS should
be 12*2."""
self.product.categ_id.property_cost_method = 'standard'
self.product.invoice_policy = 'order'
self.product.standard_price = 10
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 1
wiz = sale_order.picking_ids.button_validate()
wiz = Form(self.env[wiz['res_model']].with_context(wiz['context'])).save()
wiz.process()
# change the standard price to 14
self.product.standard_price = 14.0
# deliver the backorder
sale_order.picking_ids.filtered('backorder_id').move_lines.quantity_done = 1
sale_order.picking_ids.filtered('backorder_id').button_validate()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 24)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 24)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
# -------------------------------------------------------------------------
# Standard Delivered
# -------------------------------------------------------------------------
def test_standard_delivered_invoice_pre_delivery(self):
"""Not possible to invoice pre delivery."""
self.product.categ_id.property_cost_method = 'standard'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Invoice the sale order.
# Nothing delivered = nothing to invoice.
with self.assertRaises(UserError):
sale_order._create_invoices()
def test_standard_delivered_invoice_post_partial_delivery(self):
"""Standard price set to 10. Get 2 units in stock. Sale order 2@12. Deliver 1, invoice 1,
change the standard price to 14, deliver one, change the standard price to 16, invoice 1.
The amounts used in Stock OUT and COGS should be 10 then 14."""
self.product.categ_id.property_cost_method = 'standard'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
# Put two items in stock.
sale_order = self._so_and_confirm_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 1
wiz = sale_order.picking_ids.button_validate()
wiz = Form(self.env[wiz['res_model']].with_context(wiz['context'])).save()
wiz.process()
# Invoice 1
invoice = sale_order._create_invoices()
invoice_form = Form(invoice)
with invoice_form.invoice_line_ids.edit(0) as invoice_line:
invoice_line.quantity = 1
invoice_form.save()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 10)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 10)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 12)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 12)
# change the standard price to 14
self.product.standard_price = 14.0
# deliver the backorder
sale_order.picking_ids[0].move_lines.quantity_done = 1
sale_order.picking_ids[0].button_validate()
# change the standard price to 16
self.product.standard_price = 16.0
# invoice 1
invoice2 = sale_order._create_invoices()
invoice2.action_post()
amls = invoice2.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 14)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 14)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 12)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 12)
def test_standard_delivered_invoice_post_delivery(self):
"""Standard price set to 10. Get 2 units in stock. Sale order 2@12. Deliver 1, change the
standard price to 14, deliver one, invoice 2. The amounts used in Stock OUT and COGS should
be 12*2."""
self.product.categ_id.property_cost_method = 'standard'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 1
wiz = sale_order.picking_ids.button_validate()
wiz = Form(self.env[wiz['res_model']].with_context(wiz['context'])).save()
wiz.process()
# change the standard price to 14
self.product.standard_price = 14.0
# deliver the backorder
sale_order.picking_ids.filtered('backorder_id').move_lines.quantity_done = 1
sale_order.picking_ids.filtered('backorder_id').button_validate()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 24)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 24)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
# -------------------------------------------------------------------------
# AVCO Ordered
# -------------------------------------------------------------------------
def test_avco_ordered_invoice_pre_delivery(self):
"""Standard price set to 10. Sale order 2@12. Invoice without delivering."""
self.product.categ_id.property_cost_method = 'average'
self.product.invoice_policy = 'order'
self.product.standard_price = 10
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 20)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 20)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
def test_avco_ordered_invoice_post_partial_delivery(self):
"""Standard price set to 10. Sale order 2@12. Invoice after delivering 1."""
self.product.categ_id.property_cost_method = 'average'
self.product.invoice_policy = 'order'
self.product.standard_price = 10
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 1
wiz = sale_order.picking_ids.button_validate()
wiz = Form(self.env[wiz['res_model']].with_context(wiz['context'])).save()
wiz.process()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 20)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 20)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
def test_avco_ordered_invoice_post_delivery(self):
"""Standard price set to 10. Sale order 2@12. Invoice after full delivery."""
self.product.categ_id.property_cost_method = 'average'
self.product.invoice_policy = 'order'
self.product.standard_price = 10
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 2
sale_order.picking_ids.button_validate()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 20)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 20)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
def test_avco_ordered_return_and_receipt(self):
""" Sell and deliver some products before the user encodes the products receipt """
product = self.product
product.invoice_policy = 'order'
product.type = 'product'
product.categ_id.property_cost_method = 'average'
product.categ_id.property_valuation = 'real_time'
product.list_price = 100
product.standard_price = 50
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'order_line': [(0, 0, {
'name': product.name,
'product_id': product.id,
'product_uom_qty': 5.0,
'product_uom': product.uom_id.id,
'price_unit': product.list_price})],
})
so.action_confirm()
pick = so.picking_ids
pick.move_lines.write({'quantity_done': 5})
pick.button_validate()
product.standard_price = 40
stock_return_picking_form = Form(self.env['stock.return.picking']
.with_context(active_ids=pick.ids, active_id=pick.sorted().ids[0], active_model='stock.picking'))
return_wiz = stock_return_picking_form.save()
return_wiz.product_return_moves.quantity = 1
return_wiz.product_return_moves.to_refund = False
res = return_wiz.create_returns()
return_pick = self.env['stock.picking'].browse(res['res_id'])
return_pick.move_lines.write({'quantity_done': 1})
return_pick.button_validate()
picking = self.env['stock.picking'].create({
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'picking_type_id': self.company_data['default_warehouse'].in_type_id.id,
})
# We don't set the price_unit so that the `standard_price` will be used (see _get_price_unit()):
self.env['stock.move'].create({
'name': 'test_immediate_validate_1',
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'picking_id': picking.id,
'product_id': product.id,
'product_uom': product.uom_id.id,
'quantity_done': 1,
})
picking.button_validate()
invoice = so._create_invoices()
invoice.action_post()
self.assertEqual(invoice.state, 'posted')
# -------------------------------------------------------------------------
# AVCO Delivered
# -------------------------------------------------------------------------
def test_avco_delivered_invoice_pre_delivery(self):
"""Standard price set to 10. Sale order 2@12. Invoice without delivering. """
self.product.categ_id.property_cost_method = 'average'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Invoice the sale order.
# Nothing delivered = nothing to invoice.
with self.assertRaises(UserError):
sale_order._create_invoices()
def test_avco_delivered_invoice_post_partial_delivery(self):
"""Standard price set to 10. Sale order 2@12. Invoice after delivering 1."""
self.product.categ_id.property_cost_method = 'average'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 1
wiz = sale_order.picking_ids.button_validate()
wiz = Form(self.env[wiz['res_model']].with_context(wiz['context'])).save()
wiz.process()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 10)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 10)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 12)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 12)
def test_avco_delivered_invoice_post_delivery(self):
"""Standard price set to 10. Sale order 2@12. Invoice after full delivery."""
self.product.categ_id.property_cost_method = 'average'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
# Put two items in stock.
self._inv_adj_two_units()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 2
sale_order.picking_ids.button_validate()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 20)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 20)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
def test_avco_partially_owned_and_delivered_invoice_post_delivery(self):
"""
Standard price set to 10. Sale order 2@12. One of the delivered
products was owned by an external partner. Invoice after full delivery.
"""
self.product.categ_id.property_cost_method = 'average'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
self.env['stock.quant']._update_available_quantity(self.product, self.company_data['default_warehouse'].lot_stock_id, 1, owner_id=self.partner_b)
self.env['stock.quant']._update_available_quantity(self.product, self.company_data['default_warehouse'].lot_stock_id, 1)
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver both products (there should be two SML)
sale_order.picking_ids.move_line_ids.qty_done = 1
sale_order.picking_ids.button_validate()
# Invoice one by one
invoice01 = sale_order._create_invoices()
with Form(invoice01) as invoice_form:
with invoice_form.invoice_line_ids.edit(0) as line_form:
line_form.quantity = 1
invoice01.action_post()
invoice02 = sale_order._create_invoices()
invoice02.action_post()
# COGS should ignore the owned product
self.assertRecordValues(invoice01.line_ids, [
# pylint: disable=bad-whitespace
{'account_id': self.company_data['default_account_revenue'].id, 'debit': 0, 'credit': 12},
{'account_id': self.company_data['default_account_receivable'].id, 'debit': 12, 'credit': 0},
{'account_id': self.company_data['default_account_stock_out'].id, 'debit': 0, 'credit': 10},
{'account_id': self.company_data['default_account_expense'].id, 'debit': 10, 'credit': 0},
])
self.assertRecordValues(invoice02.line_ids, [
# pylint: disable=bad-whitespace
{'account_id': self.company_data['default_account_revenue'].id, 'debit': 0, 'credit': 12},
{'account_id': self.company_data['default_account_receivable'].id, 'debit': 12, 'credit': 0},
])
def test_avco_fully_owned_and_delivered_invoice_post_delivery(self):
"""
Standard price set to 10. Sale order 2@12. The products are owned by an
external partner. Invoice after full delivery.
"""
self.product.categ_id.property_cost_method = 'average'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
self.env['stock.quant']._update_available_quantity(self.product, self.company_data['default_warehouse'].lot_stock_id, 2, owner_id=self.partner_b)
sale_order = self._so_and_confirm_two_units()
sale_order.picking_ids.move_line_ids.qty_done = 2
sale_order.picking_ids.button_validate()
invoice = sale_order._create_invoices()
invoice.action_post()
# COGS should not exist because the products are owned by an external partner
amls = invoice.line_ids
self.assertRecordValues(amls, [
# pylint: disable=bad-whitespace
{'account_id': self.company_data['default_account_revenue'].id, 'debit': 0, 'credit': 24},
{'account_id': self.company_data['default_account_receivable'].id, 'debit': 24, 'credit': 0},
])
# -------------------------------------------------------------------------
# FIFO Ordered
# -------------------------------------------------------------------------
def test_fifo_ordered_invoice_pre_delivery(self):
"""Receive at 8 then at 10. Sale order 2@12. Invoice without delivering.
As no standard price is set, the Stock OUT and COGS amounts are 0."""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'order'
self._fifo_in_one_eight_one_ten()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertAlmostEqual(stock_out_aml.credit, 16)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertAlmostEqual(cogs_aml.debit, 16)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
def test_fifo_ordered_invoice_post_partial_delivery(self):
"""Receive 1@8, 1@10, so 2@12, standard price 12, deliver 1, invoice 2: the COGS amount
should be 20: 1 really delivered at 10 and the other valued at the standard price 10."""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'order'
self._fifo_in_one_eight_one_ten()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 1
wiz = sale_order.picking_ids.button_validate()
wiz = Form(self.env[wiz['res_model']].with_context(wiz['context'])).save()
wiz.process()
# upate the standard price to 12
self.product.standard_price = 12
# Invoice 2
invoice = sale_order._create_invoices()
invoice_form = Form(invoice)
with invoice_form.invoice_line_ids.edit(0) as invoice_line:
invoice_line.quantity = 2
invoice_form.save()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 20)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 20)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
def test_fifo_ordered_invoice_post_delivery(self):
"""Receive at 8 then at 10. Sale order 2@12. Invoice after delivering everything."""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'order'
self._fifo_in_one_eight_one_ten()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 2
sale_order.picking_ids.button_validate()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 18)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 18)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
# -------------------------------------------------------------------------
# FIFO Delivered
# -------------------------------------------------------------------------
def test_fifo_delivered_invoice_pre_delivery(self):
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
self._fifo_in_one_eight_one_ten()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Invoice the sale order.
# Nothing delivered = nothing to invoice.
with self.assertRaises(UserError):
invoice_id = sale_order._create_invoices()
def test_fifo_delivered_invoice_post_partial_delivery(self):
"""Receive 1@8, 1@10, so 2@12, standard price 12, deliver 1, invoice 2: the price used should be 10:
one at 8 and one at 10."""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'delivery'
self._fifo_in_one_eight_one_ten()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 1
wiz = sale_order.picking_ids.button_validate()
wiz = Form(self.env[wiz['res_model']].with_context(wiz['context'])).save()
wiz.process()
# upate the standard price to 12
self.product.standard_price = 12
# Invoice 2
invoice = sale_order._create_invoices()
invoice_form = Form(invoice)
with invoice_form.invoice_line_ids.edit(0) as invoice_line:
invoice_line.quantity = 2
invoice_form.save()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 20)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 20)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
def test_fifo_delivered_invoice_post_delivery(self):
"""Receive at 8 then at 10. Sale order 2@12. Invoice after delivering everything."""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
self._fifo_in_one_eight_one_ten()
# Create and confirm a sale order for 2@12
sale_order = self._so_and_confirm_two_units()
# Deliver one.
sale_order.picking_ids.move_lines.quantity_done = 2
sale_order.picking_ids.button_validate()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 18)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 18)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 24)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 24)
def test_fifo_delivered_invoice_post_delivery_2(self):
"""Receive at 8 then at 10. Sale order 10@12 and deliver without receiving the 2 missing.
receive 2@12. Invoice."""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
in_move_1 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 8,
'price_unit': 10,
})
in_move_1._action_confirm()
in_move_1.quantity_done = 8
in_move_1._action_done()
# Create and confirm a sale order for 2@12
sale_order = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 10.0,
'product_uom': self.product.uom_id.id,
'price_unit': 12,
'tax_id': False, # no love taxes amls
})],
})
sale_order.action_confirm()
# Deliver 10
sale_order.picking_ids.move_lines.quantity_done = 10
sale_order.picking_ids.button_validate()
# Make the second receipt
in_move_2 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 2,
'price_unit': 12,
})
in_move_2._action_confirm()
in_move_2.quantity_done = 2
in_move_2._action_done()
self.assertEqual(self.product.stock_valuation_layer_ids[-1].value, -4) # we sent two at 10 but they should have been sent at 12
self.assertEqual(self.product.stock_valuation_layer_ids[-1].quantity, 0)
self.assertEqual(sale_order.order_line.move_ids.stock_valuation_layer_ids[-1].quantity, 0)
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Check the resulting accounting entries
amls = invoice.line_ids
self.assertEqual(len(amls), 4)
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 104)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 104)
self.assertEqual(cogs_aml.credit, 0)
receivable_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml.debit, 120)
self.assertEqual(receivable_aml.credit, 0)
income_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml.debit, 0)
self.assertEqual(income_aml.credit, 120)
def test_fifo_delivered_invoice_post_delivery_3(self):
"""Receive 5@8, receive 8@12, sale 1@20, deliver, sale 6@20, deliver. Make sure no rouding
issues appear on the second invoice."""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'delivery'
# +5@8
in_move_1 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 5,
'price_unit': 8,
})
in_move_1._action_confirm()
in_move_1.quantity_done = 5
in_move_1._action_done()
# +8@12
in_move_2 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 8,
'price_unit': 12,
})
in_move_2._action_confirm()
in_move_2.quantity_done = 8
in_move_2._action_done()
# sale 1@20, deliver, invoice
sale_order = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 1,
'product_uom': self.product.uom_id.id,
'price_unit': 20,
'tax_id': False,
})],
})
sale_order.action_confirm()
sale_order.picking_ids.move_lines.quantity_done = 1
sale_order.picking_ids.button_validate()
invoice = sale_order._create_invoices()
invoice.action_post()
# sale 6@20, deliver, invoice
sale_order = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 6,
'product_uom': self.product.uom_id.id,
'price_unit': 20,
'tax_id': False,
})],
})
sale_order.action_confirm()
sale_order.picking_ids.move_lines.quantity_done = 6
sale_order.picking_ids.button_validate()
invoice = sale_order._create_invoices()
invoice.action_post()
# check the last anglo saxon invoice line
amls = invoice.line_ids
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 56)
self.assertEqual(cogs_aml.credit, 0)
def test_fifo_delivered_invoice_post_delivery_4(self):
"""Receive 8@10. Sale order 10@12. Deliver and also invoice it without receiving the 2 missing.
Now, receive 2@12. Make sure price difference is correctly reflected in expense account."""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'delivery'
self.product.standard_price = 10
in_move_1 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 8,
'price_unit': 10,
})
in_move_1._action_confirm()
in_move_1.quantity_done = 8
in_move_1._action_done()
# Create and confirm a sale order for 10@12
sale_order = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 10.0,
'product_uom': self.product.uom_id.id,
'price_unit': 12,
'tax_id': False, # no love taxes amls
})],
})
sale_order.action_confirm()
# Deliver 10
sale_order.picking_ids.move_lines.quantity_done = 10
sale_order.picking_ids.button_validate()
# Invoice the sale order.
invoice = sale_order._create_invoices()
invoice.action_post()
# Make the second receipt
in_move_2 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 2,
'price_unit': 12,
})
in_move_2._action_confirm()
in_move_2.quantity_done = 2
in_move_2._action_done()
# check the last anglo saxon move line
revalued_anglo_expense_amls = sale_order.picking_ids.mapped('move_lines.stock_valuation_layer_ids')[-1].stock_move_id.account_move_ids[-1].mapped('line_ids')
revalued_cogs_aml = revalued_anglo_expense_amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(revalued_cogs_aml.debit, 4, 'Price difference should have correctly reflected in expense account.')
def test_fifo_delivered_invoice_post_delivery_with_return(self):
"""Receive 2@10. SO1 2@12. Return 1 from SO1. SO2 1@12. Receive 1@20.
Re-deliver returned from SO1. Invoice after delivering everything."""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'delivery'
# Receive 2@10.
in_move_1 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 2,
'price_unit': 10,
})
in_move_1._action_confirm()
in_move_1.quantity_done = 2
in_move_1._action_done()
# Create, confirm and deliver a sale order for 2@12 (SO1)
so_1 = self._so_and_confirm_two_units()
so_1.picking_ids.move_lines.quantity_done = 2
so_1.picking_ids.button_validate()
# Return 1 from SO1
stock_return_picking_form = Form(
self.env['stock.return.picking'].with_context(
active_ids=so_1.picking_ids.ids, active_id=so_1.picking_ids.ids[0], active_model='stock.picking')
)
stock_return_picking = stock_return_picking_form.save()
stock_return_picking.product_return_moves.quantity = 1.0
stock_return_picking_action = stock_return_picking.create_returns()
return_pick = self.env['stock.picking'].browse(stock_return_picking_action['res_id'])
return_pick.action_assign()
return_pick.move_lines.quantity_done = 1
return_pick._action_done()
# Create, confirm and deliver a sale order for 1@12 (SO2)
so_2 = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 1.0,
'product_uom': self.product.uom_id.id,
'price_unit': 12,
'tax_id': False, # no love taxes amls
})],
})
so_2.action_confirm()
so_2.picking_ids.move_lines.quantity_done = 1
so_2.picking_ids.button_validate()
# Receive 1@20
in_move_2 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 1,
'price_unit': 20,
})
in_move_2._action_confirm()
in_move_2.quantity_done = 1
in_move_2._action_done()
# Re-deliver returned 1 from SO1
stock_redeliver_picking_form = Form(
self.env['stock.return.picking'].with_context(
active_ids=return_pick.ids, active_id=return_pick.ids[0], active_model='stock.picking')
)
stock_redeliver_picking = stock_redeliver_picking_form.save()
stock_redeliver_picking.product_return_moves.quantity = 1.0
stock_redeliver_picking_action = stock_redeliver_picking.create_returns()
redeliver_pick = self.env['stock.picking'].browse(stock_redeliver_picking_action['res_id'])
redeliver_pick.action_assign()
redeliver_pick.move_lines.quantity_done = 1
redeliver_pick._action_done()
# Invoice the sale orders
invoice_1 = so_1._create_invoices()
invoice_1.action_post()
invoice_2 = so_2._create_invoices()
invoice_2.action_post()
# Check the resulting accounting entries
amls_1 = invoice_1.line_ids
self.assertEqual(len(amls_1), 4)
stock_out_aml_1 = amls_1.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml_1.debit, 0)
self.assertEqual(stock_out_aml_1.credit, 30)
cogs_aml_1 = amls_1.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml_1.debit, 30)
self.assertEqual(cogs_aml_1.credit, 0)
receivable_aml_1 = amls_1.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml_1.debit, 24)
self.assertEqual(receivable_aml_1.credit, 0)
income_aml_1 = amls_1.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml_1.debit, 0)
self.assertEqual(income_aml_1.credit, 24)
amls_2 = invoice_2.line_ids
self.assertEqual(len(amls_2), 4)
stock_out_aml_2 = amls_2.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml_2.debit, 0)
self.assertEqual(stock_out_aml_2.credit, 10)
cogs_aml_2 = amls_2.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml_2.debit, 10)
self.assertEqual(cogs_aml_2.credit, 0)
receivable_aml_2 = amls_2.filtered(lambda aml: aml.account_id == self.company_data['default_account_receivable'])
self.assertEqual(receivable_aml_2.debit, 12)
self.assertEqual(receivable_aml_2.credit, 0)
income_aml_2 = amls_2.filtered(lambda aml: aml.account_id == self.company_data['default_account_revenue'])
self.assertEqual(income_aml_2.debit, 0)
self.assertEqual(income_aml_2.credit, 12)
def test_fifo_uom_computation(self):
self.env.company.anglo_saxon_accounting = True
self.product.categ_id.property_cost_method = 'fifo'
self.product.categ_id.property_valuation = 'real_time'
quantity = 50.0
self.product.list_price = 1.5
self.product.standard_price = 2.0
unit_12 = self.env['uom.uom'].create({
'name': 'Pack of 12 units',
'category_id': self.product.uom_id.category_id.id,
'uom_type': 'bigger',
'factor_inv': 12,
'rounding': 1,
})
# Create, confirm and deliver a sale order for [email protected] without reception with std_price = 2.0 (SO1)
so_1 = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 1,
'product_uom': unit_12.id,
'price_unit': 18,
'tax_id': False, # no love taxes amls
})],
})
so_1.action_confirm()
so_1.picking_ids.move_lines.quantity_done = 12
so_1.picking_ids.button_validate()
# Invoice the sale order.
invoice_1 = so_1._create_invoices()
invoice_1.action_post()
"""
Invoice 1
Correct Journal Items
Name Debit Credit
Product Sales 0.00$ 18.00$
Account Receivable 18.00$ 0.00$
Default Account Stock Out 0.00$ 24.00$
Expenses 24.00$ 0.00$
"""
aml = invoice_1.line_ids
# Product Sales
self.assertEqual(aml[0].debit, 0,0)
self.assertEqual(aml[0].credit, 18,0)
# Account Receivable
self.assertEqual(aml[1].debit, 18,0)
self.assertEqual(aml[1].credit, 0,0)
# Default Account Stock Out
self.assertEqual(aml[2].debit, 0,0)
self.assertEqual(aml[2].credit, 24,0)
# Expenses
self.assertEqual(aml[3].debit, 24,0)
self.assertEqual(aml[3].credit, 0,0)
# Create stock move 1
in_move_1 = self.env['stock.move'].create({
'name': 'a',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': quantity,
'price_unit': 1.0,
})
in_move_1._action_confirm()
in_move_1.quantity_done = quantity
in_move_1._action_done()
# Create, confirm and deliver a sale order for [email protected] with reception (50 * 1.0, 50 * 0.0)(SO2)
so_2 = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 1,
'product_uom': unit_12.id,
'price_unit': 18,
'tax_id': False, # no love taxes amls
})],
})
so_2.action_confirm()
so_2.picking_ids.move_lines.quantity_done = 12
so_2.picking_ids.button_validate()
# Invoice the sale order.
invoice_2 = so_2._create_invoices()
invoice_2.action_post()
"""
Invoice 2
Correct Journal Items
Name Debit Credit
Product Sales 0.00$ 18.0$
Account Receivable 18.00$ 0.0$
Default Account Stock Out 0.00$ 12.0$
Expenses 12.00$ 0.0$
"""
aml = invoice_2.line_ids
# Product Sales
self.assertEqual(aml[0].debit, 0,0)
self.assertEqual(aml[0].credit, 18,0)
# Account Receivable
self.assertEqual(aml[1].debit, 18,0)
self.assertEqual(aml[1].credit, 0,0)
# Default Account Stock Out
self.assertEqual(aml[2].debit, 0,0)
self.assertEqual(aml[2].credit, 12,0)
# Expenses
self.assertEqual(aml[3].debit, 12,0)
self.assertEqual(aml[3].credit, 0,0)
def test_fifo_return_and_credit_note(self):
"""
When posting a credit note for a returned product, the value of the anglo-saxo lines
should be based on the returned product's value
"""
self.product.categ_id.property_cost_method = 'fifo'
# Receive one @10, one @20 and one @60
in_moves = self.env['stock.move'].create([{
'name': 'IN move @%s' % p,
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 1,
'price_unit': p,
} for p in [10, 20, 60]])
in_moves._action_confirm()
in_moves.quantity_done = 1
in_moves._action_done()
# Sell 3 units
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 3.0,
'product_uom': self.product.uom_id.id,
'price_unit': 100,
'tax_id': False,
})],
})
so.action_confirm()
# Deliver 1@10, then 1@20 and then 1@60
pickings = []
picking = so.picking_ids
while picking:
pickings.append(picking)
picking.move_lines.quantity_done = 1
action = picking.button_validate()
if isinstance(action, dict):
wizard = Form(self.env[action['res_model']].with_context(action['context'])).save()
wizard.process()
picking = picking.backorder_ids
invoice = so._create_invoices()
invoice.action_post()
# Receive one @100
in_moves = self.env['stock.move'].create({
'name': 'IN move @100',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 1,
'price_unit': 100,
})
in_moves._action_confirm()
in_moves.quantity_done = 1
in_moves._action_done()
# Return the second picking (i.e. 1@20)
ctx = {'active_id': pickings[1].id, 'active_model': 'stock.picking'}
return_wizard = Form(self.env['stock.return.picking'].with_context(ctx)).save()
return_picking_id, dummy = return_wizard._create_returns()
return_picking = self.env['stock.picking'].browse(return_picking_id)
return_picking.move_lines.quantity_done = 1
return_picking.button_validate()
# Add a credit note for the returned product
ctx = {'active_model': 'account.move', 'active_ids': invoice.ids}
refund_wizard = self.env['account.move.reversal'].with_context(ctx).create({
'refund_method': 'refund',
'journal_id': invoice.journal_id.id,
})
action = refund_wizard.reverse_moves()
reverse_invoice = self.env['account.move'].browse(action['res_id'])
with Form(reverse_invoice) as reverse_invoice_form:
with reverse_invoice_form.invoice_line_ids.edit(0) as line:
line.quantity = 1
reverse_invoice.action_post()
amls = reverse_invoice.line_ids
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 20, 'Should be to the value of the returned product')
self.assertEqual(stock_out_aml.credit, 0)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 0)
self.assertEqual(cogs_aml.credit, 20, 'Should be to the value of the returned product')
def test_fifo_return_and_create_invoice(self):
"""
When creating an invoice for a returned product, the value of the anglo-saxo lines
should be based on the returned product's value
"""
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'delivery'
# Receive one @10, one @20 and one @60
in_moves = self.env['stock.move'].create([{
'name': 'IN move @%s' % p,
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 1,
'price_unit': p,
} for p in [10, 20, 60]])
in_moves._action_confirm()
in_moves.quantity_done = 1
in_moves._action_done()
# Sell 3 units
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 3.0,
'product_uom': self.product.uom_id.id,
'price_unit': 100,
'tax_id': False,
})],
})
so.action_confirm()
# Deliver 1@10, then 1@20 and then 1@60
pickings = []
picking = so.picking_ids
while picking:
pickings.append(picking)
picking.move_lines.quantity_done = 1
action = picking.button_validate()
if isinstance(action, dict):
wizard = Form(self.env[action['res_model']].with_context(action['context'])).save()
wizard.process()
picking = picking.backorder_ids
invoice = so._create_invoices()
invoice.action_post()
# Receive one @100
in_moves = self.env['stock.move'].create({
'name': 'IN move @100',
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 1,
'price_unit': 100,
})
in_moves._action_confirm()
in_moves.quantity_done = 1
in_moves._action_done()
# Return the second picking (i.e. 1@20)
ctx = {'active_id': pickings[1].id, 'active_model': 'stock.picking'}
return_wizard = Form(self.env['stock.return.picking'].with_context(ctx)).save()
return_picking_id, dummy = return_wizard._create_returns()
return_picking = self.env['stock.picking'].browse(return_picking_id)
return_picking.move_lines.quantity_done = 1
return_picking.button_validate()
# Create a new invoice for the returned product
ctx = {'active_model': 'sale.order', 'active_ids': so.ids}
create_invoice_wizard = self.env['sale.advance.payment.inv'].with_context(ctx).create({'advance_payment_method': 'delivered'})
create_invoice_wizard.create_invoices()
reverse_invoice = so.invoice_ids[-1]
with Form(reverse_invoice) as reverse_invoice_form:
with reverse_invoice_form.invoice_line_ids.edit(0) as line:
line.quantity = 1
reverse_invoice.action_post()
amls = reverse_invoice.line_ids
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 20, 'Should be to the value of the returned product')
self.assertEqual(stock_out_aml.credit, 0)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 0)
self.assertEqual(cogs_aml.credit, 20, 'Should be to the value of the returned product')
def test_fifo_several_invoices_reset_repost(self):
self.product.categ_id.property_cost_method = 'fifo'
self.product.invoice_policy = 'delivery'
svl_values = [10, 15, 65]
total_value = sum(svl_values)
in_moves = self.env['stock.move'].create([{
'name': 'IN move @%s' % p,
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 1,
'price_unit': p,
} for p in svl_values])
in_moves._action_confirm()
in_moves.quantity_done = 1
in_moves._action_done()
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 3.0,
'product_uom': self.product.uom_id.id,
'price_unit': 100,
'tax_id': False,
})],
})
so.action_confirm()
# Deliver one by one, so it creates an out-SVL each time.
# Then invoice the delivered quantity
invoices = self.env['account.move']
picking = so.picking_ids
while picking:
picking.move_lines.quantity_done = 1
action = picking.button_validate()
if isinstance(action, dict):
wizard = Form(self.env[action['res_model']].with_context(action['context'])).save()
wizard.process()
picking = picking.backorder_ids
invoice = so._create_invoices()
invoice.action_post()
invoices |= invoice
out_account = self.product.categ_id.property_stock_account_output_categ_id
invoice01, _invoice02, invoice03 = invoices
cogs = invoices.line_ids.filtered(lambda l: l.account_id == out_account)
self.assertEqual(cogs.mapped('credit'), svl_values)
# Reset and repost each invoice
for i, inv in enumerate(invoices):
inv.button_draft()
inv.action_post()
cogs = invoices.line_ids.filtered(lambda l: l.account_id == out_account)
self.assertEqual(cogs.mapped('credit'), svl_values, 'Incorrect values while posting again invoice %s' % (i + 1))
# Reset and repost all invoices (we only check the total value as the
# distribution changes but does not really matter)
invoices.button_draft()
invoices.action_post()
cogs = invoices.line_ids.filtered(lambda l: l.account_id == out_account)
self.assertEqual(sum(cogs.mapped('credit')), total_value)
# Reset and repost few invoices (we only check the total value as the
# distribution changes but does not really matter)
(invoice01 | invoice03).button_draft()
(invoice01 | invoice03).action_post()
cogs = invoices.line_ids.filtered(lambda l: l.account_id == out_account)
self.assertEqual(sum(cogs.mapped('credit')), total_value)
def test_fifo_reverse_and_create_new_invoice(self):
"""
FIFO automated
Receive 1@10, 1@50
Deliver 1
Post the invoice, add a credit note with option 'new draft inv'
Post the second invoice
COGS should be based on the delivered product
"""
self.product.categ_id.property_cost_method = 'fifo'
in_moves = self.env['stock.move'].create([{
'name': 'IN move @%s' % p,
'product_id': self.product.id,
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_uom': self.product.uom_id.id,
'product_uom_qty': 1,
'price_unit': p,
} for p in [10, 50]])
in_moves._action_confirm()
in_moves.quantity_done = 1
in_moves._action_done()
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 1.0,
'product_uom': self.product.uom_id.id,
'price_unit': 100,
'tax_id': False,
})],
})
so.action_confirm()
picking = so.picking_ids
picking.move_lines.quantity_done = 1.0
picking.button_validate()
invoice01 = so._create_invoices()
invoice01.action_post()
move_reversal = self.env['account.move.reversal'].with_context(active_model="account.move", active_ids=invoice01.ids).create({
'refund_method': 'modify',
'journal_id': invoice01.journal_id.id,
})
reversal = move_reversal.reverse_moves()
invoice02 = self.env['account.move'].browse(reversal['res_id'])
invoice02.action_post()
amls = invoice02.line_ids
stock_out_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_stock_out'])
self.assertEqual(stock_out_aml.debit, 0)
self.assertEqual(stock_out_aml.credit, 10)
cogs_aml = amls.filtered(lambda aml: aml.account_id == self.company_data['default_account_expense'])
self.assertEqual(cogs_aml.debit, 10)
self.assertEqual(cogs_aml.credit, 0)
| 44.976317 | 75,965 |
6,814 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.stock_account.tests.test_anglo_saxon_valuation_reconciliation_common import ValuationReconciliationTestCommon
from odoo.tests import Form, tagged
@tagged('post_install', '-at_install')
class TestValuationReconciliation(ValuationReconciliationTestCommon):
@classmethod
def setUpClass(cls, chart_template_ref=None):
super().setUpClass(chart_template_ref=chart_template_ref)
# Set the invoice_policy to delivery to have an accurate COGS entry.
cls.test_product_delivery.invoice_policy = 'delivery'
def _create_sale(self, product, date, quantity=1.0):
rslt = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'currency_id': self.currency_data['currency'].id,
'order_line': [
(0, 0, {
'name': product.name,
'product_id': product.id,
'product_uom_qty': quantity,
'product_uom': product.uom_po_id.id,
'price_unit': 66.0,
})],
'date_order': date,
})
rslt.action_confirm()
return rslt
def _create_invoice_for_so(self, sale_order, product, date, quantity=1.0):
rslt = self.env['account.move'].create({
'partner_id': self.partner_a.id,
'currency_id': self.currency_data['currency'].id,
'move_type': 'out_invoice',
'invoice_date': date,
'invoice_line_ids': [(0, 0, {
'name': 'test line',
'account_id': self.company_data['default_account_revenue'].id,
'price_unit': 66.0,
'quantity': quantity,
'discount': 0.0,
'product_uom_id': product.uom_id.id,
'product_id': product.id,
'sale_line_ids': [(6, 0, sale_order.order_line.ids)],
})],
})
sale_order.invoice_ids += rslt
return rslt
def _set_initial_stock_for_product(self, product):
move1 = self.env['stock.move'].create({
'name': 'Initial stock',
'location_id': self.env.ref('stock.stock_location_suppliers').id,
'location_dest_id': self.company_data['default_warehouse'].lot_stock_id.id,
'product_id': product.id,
'product_uom': product.uom_id.id,
'product_uom_qty': 11,
'price_unit': 13,
})
move1._action_confirm()
move1._action_assign()
move1.move_line_ids.qty_done = 11
move1._action_done()
def test_shipment_invoice(self):
""" Tests the case into which we send the goods to the customer before
making the invoice
"""
test_product = self.test_product_delivery
self._set_initial_stock_for_product(test_product)
sale_order = self._create_sale(test_product, '2108-01-01')
self._process_pickings(sale_order.picking_ids)
invoice = self._create_invoice_for_so(sale_order, test_product, '2018-02-12')
invoice.action_post()
picking = self.env['stock.picking'].search([('sale_id', '=', sale_order.id)])
self.check_reconciliation(invoice, picking, operation='sale')
def test_invoice_shipment(self):
""" Tests the case into which we make the invoice first, and then send
the goods to our customer.
"""
test_product = self.test_product_delivery
#since the invoice come first, the COGS will use the standard price on product
self.test_product_delivery.standard_price = 13
self._set_initial_stock_for_product(test_product)
sale_order = self._create_sale(test_product, '2018-01-01')
invoice = self._create_invoice_for_so(sale_order, test_product, '2018-02-03')
invoice.action_post()
self._process_pickings(sale_order.picking_ids)
picking = self.env['stock.picking'].search([('sale_id', '=', sale_order.id)])
self.check_reconciliation(invoice, picking, operation='sale')
#return the goods and refund the invoice
stock_return_picking_form = Form(self.env['stock.return.picking']
.with_context(active_ids=picking.ids, active_id=picking.ids[0],
active_model='stock.picking'))
stock_return_picking = stock_return_picking_form.save()
stock_return_picking.product_return_moves.quantity = 1.0
stock_return_picking_action = stock_return_picking.create_returns()
return_pick = self.env['stock.picking'].browse(stock_return_picking_action['res_id'])
return_pick.action_assign()
return_pick.move_lines.quantity_done = 1
return_pick._action_done()
refund_invoice_wiz = self.env['account.move.reversal'].with_context(active_model='account.move', active_ids=[invoice.id]).create({
'reason': 'test_invoice_shipment_refund',
'refund_method': 'cancel',
'journal_id': invoice.journal_id.id,
})
refund_invoice = self.env['account.move'].browse(refund_invoice_wiz.reverse_moves()['res_id'])
self.assertEqual(invoice.payment_state, 'reversed', "Invoice should be in 'reversed' state.")
self.assertEqual(refund_invoice.payment_state, 'paid', "Refund should be in 'paid' state.")
self.check_reconciliation(refund_invoice, return_pick, operation='sale')
def test_multiple_shipments_invoices(self):
""" Tests the case into which we deliver part of the goods first, then 2 invoices at different rates, and finally the remaining quantities
"""
test_product = self.test_product_delivery
self._set_initial_stock_for_product(test_product)
sale_order = self._create_sale(test_product, '2018-01-01', quantity=5)
self._process_pickings(sale_order.picking_ids, quantity=2.0)
picking = self.env['stock.picking'].search([('sale_id', '=', sale_order.id)], order="id asc", limit=1)
invoice = self._create_invoice_for_so(sale_order, test_product, '2018-02-03', quantity=3)
invoice.action_post()
self.check_reconciliation(invoice, picking, full_reconcile=False, operation='sale')
invoice2 = self._create_invoice_for_so(sale_order, test_product, '2018-03-12', quantity=2)
invoice2.action_post()
self.check_reconciliation(invoice2, picking, full_reconcile=False, operation='sale')
self._process_pickings(sale_order.picking_ids.filtered(lambda x: x.state != 'done'), quantity=3.0)
picking = self.env['stock.picking'].search([('sale_id', '=', sale_order.id)], order='id desc', limit=1)
self.check_reconciliation(invoice2, picking, operation='sale')
| 46.671233 | 6,814 |
3,782 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.stock_account.tests.test_anglo_saxon_valuation_reconciliation_common import ValuationReconciliationTestCommon
from odoo.addons.sale.tests.common import TestSaleCommon
from odoo.tests import tagged
@tagged('post_install', '-at_install')
class TestSaleStockMultiCompany(TestSaleCommon, ValuationReconciliationTestCommon):
@classmethod
def setUpClass(cls, chart_template_ref=None):
super().setUpClass(chart_template_ref=chart_template_ref)
cls.warehouse_A = cls.company_data['default_warehouse']
cls.warehouse_A2 = cls.env['stock.warehouse'].create({
'name': 'WH B',
'code': 'WHB',
'company_id': cls.env.company.id,
'partner_id': cls.env.company.partner_id.id,
})
cls.warehouse_B = cls.company_data_2['default_warehouse']
cls.env.user.groups_id |= cls.env.ref('stock.group_stock_user')
cls.env.user.groups_id |= cls.env.ref('stock.group_stock_multi_locations')
cls.env.user.groups_id |= cls.env.ref('sales_team.group_sale_salesman')
cls.env.user.with_company(cls.company_data['company']).property_warehouse_id = cls.warehouse_A.id
cls.env.user.with_company(cls.company_data_2['company']).property_warehouse_id = cls.warehouse_B.id
def test_warehouse_definition_on_so(self):
partner = self.partner_a
product = self.test_product_order
sale_order_vals = {
'partner_id': partner.id,
'partner_invoice_id': partner.id,
'partner_shipping_id': partner.id,
'user_id': False,
'company_id': self.env.company.id,
'order_line': [(0, 0, {
'name': product.name,
'product_id': product.id,
'product_uom_qty': 10,
'product_uom': product.uom_id.id,
'price_unit': product.list_price})],
'pricelist_id': self.company_data['default_pricelist'].id,
}
sale_order = self.env['sale.order']
so_no_user = sale_order.create(sale_order_vals)
self.assertFalse(so_no_user.user_id.property_warehouse_id)
self.assertEqual(so_no_user.warehouse_id.id, self.warehouse_A.id)
sale_order_vals2 = {
'partner_id': partner.id,
'partner_invoice_id': partner.id,
'partner_shipping_id': partner.id,
'company_id': self.env.company.id,
'order_line': [(0, 0, {
'name': product.name,
'product_id': product.id,
'product_uom_qty': 10,
'product_uom': product.uom_id.id,
'price_unit': product.list_price})],
'pricelist_id': self.company_data['default_pricelist'].id,
}
so_company_A = sale_order.with_company(self.env.company).create(sale_order_vals2)
self.assertEqual(so_company_A.warehouse_id.id, self.warehouse_A.id)
sale_order_vals3 = {
'partner_id': partner.id,
'partner_invoice_id': partner.id,
'partner_shipping_id': partner.id,
'company_id': self.company_data_2['company'].id,
'order_line': [(0, 0, {
'name': product.name,
'product_id': product.id,
'product_uom_qty': 10,
'product_uom': product.uom_id.id,
'price_unit': product.list_price})],
'pricelist_id': self.company_data['default_pricelist'].id,
}
so_company_B = sale_order.with_company(self.company_data_2['company']).create(sale_order_vals3)
self.assertEqual(so_company_B.warehouse_id.id, self.warehouse_B.id)
| 43.976744 | 3,782 |
21,520 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta
from odoo.tools import html2plaintext
from odoo.tests.common import Form, tagged
from odoo.addons.stock.tests.test_report import TestReportsCommon
from odoo.addons.sale.tests.common import TestSaleCommon
class TestSaleStockReports(TestReportsCommon):
def test_report_forecast_1_sale_order_replenishment(self):
""" Create and confirm two sale orders: one for the next week and one
for tomorrow. Then check in the report it's the most urgent who is
linked to the qty. on stock.
"""
# make sure first picking doesn't auto-assign
self.picking_type_out.reservation_method = 'manual'
today = datetime.today()
# Put some quantity in stock.
quant_vals = {
'product_id': self.product.id,
'product_uom_id': self.product.uom_id.id,
'location_id': self.stock_location.id,
'quantity': 5,
'reserved_quantity': 0,
}
self.env['stock.quant'].create(quant_vals)
# Create a first SO for the next week.
so_form = Form(self.env['sale.order'])
so_form.partner_id = self.partner
# so_form.validity_date = today + timedelta(days=7)
with so_form.order_line.new() as so_line:
so_line.product_id = self.product
so_line.product_uom_qty = 5
so_1 = so_form.save()
so_1.action_confirm()
so_1.picking_ids.scheduled_date = today + timedelta(days=7)
# Create a second SO for tomorrow.
so_form = Form(self.env['sale.order'])
so_form.partner_id = self.partner
# so_form.validity_date = today + timedelta(days=1)
with so_form.order_line.new() as so_line:
so_line.product_id = self.product
so_line.product_uom_qty = 5
so_2 = so_form.save()
so_2.action_confirm()
so_2.picking_ids.scheduled_date = today + timedelta(days=1)
report_values, docs, lines = self.get_report_forecast(product_template_ids=self.product_template.ids)
self.assertEqual(len(lines), 2)
line_1 = lines[0]
line_2 = lines[1]
self.assertEqual(line_1['quantity'], 5)
self.assertTrue(line_1['replenishment_filled'])
self.assertEqual(line_1['document_out'].id, so_2.id)
self.assertEqual(line_2['quantity'], 5)
self.assertEqual(line_2['replenishment_filled'], False)
self.assertEqual(line_2['document_out'].id, so_1.id)
def test_report_forecast_2_report_line_corresponding_to_so_line_highlighted(self):
""" When accessing the report from a SO line, checks if the correct SO line is highlighted in the report
"""
# We create 2 identical SO
so_form = Form(self.env['sale.order'])
so_form.partner_id = self.partner
with so_form.order_line.new() as line:
line.product_id = self.product
line.product_uom_qty = 5
so1 = so_form.save()
so1.action_confirm()
so2 = so1.copy()
so2.action_confirm()
# Check for both SO if the highlight (is_matched) corresponds to the correct SO
for so in [so1, so2]:
context = {"move_to_match_ids": so.order_line.move_ids.ids}
_, _, lines = self.get_report_forecast(product_template_ids=self.product_template.ids, context=context)
for line in lines:
if line['document_out'] == so:
self.assertTrue(line['is_matched'], "The corresponding SO line should be matched in the forecast report.")
else:
self.assertFalse(line['is_matched'], "A line of the forecast report not linked to the SO shoud not be matched.")
@tagged('post_install', '-at_install')
class TestSaleStockInvoices(TestSaleCommon):
def setUp(self):
super(TestSaleStockInvoices, self).setUp()
self.product_by_lot = self.env['product.product'].create({
'name': 'Product By Lot',
'type': 'product',
'tracking': 'lot',
})
self.product_by_usn = self.env['product.product'].create({
'name': 'Product By USN',
'type': 'product',
'tracking': 'serial',
})
self.warehouse = self.env['stock.warehouse'].search([('company_id', '=', self.env.company.id)], limit=1)
self.stock_location = self.warehouse.lot_stock_id
lot = self.env['stock.production.lot'].create({
'name': 'LOT0001',
'product_id': self.product_by_lot.id,
'company_id': self.env.company.id,
})
self.usn01 = self.env['stock.production.lot'].create({
'name': 'USN0001',
'product_id': self.product_by_usn.id,
'company_id': self.env.company.id,
})
self.usn02 = self.env['stock.production.lot'].create({
'name': 'USN0002',
'product_id': self.product_by_usn.id,
'company_id': self.env.company.id,
})
self.env['stock.quant']._update_available_quantity(self.product_by_lot, self.stock_location, 10, lot_id=lot)
self.env['stock.quant']._update_available_quantity(self.product_by_usn, self.stock_location, 1, lot_id=self.usn01)
self.env['stock.quant']._update_available_quantity(self.product_by_usn, self.stock_location, 1, lot_id=self.usn02)
def test_invoice_less_than_delivered(self):
"""
Suppose the lots are printed on the invoices.
A user invoice a tracked product with a smaller quantity than delivered.
On the invoice, the quantity of the used lot should be the invoiced one.
"""
display_lots = self.env.ref('stock_account.group_lot_on_invoice')
display_uom = self.env.ref('uom.group_uom')
self.env.user.write({'groups_id': [(4, display_lots.id), (4, display_uom.id)]})
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': self.product_by_lot.name, 'product_id': self.product_by_lot.id, 'product_uom_qty': 5}),
],
})
so.action_confirm()
picking = so.picking_ids
picking.move_lines.quantity_done = 5
picking.button_validate()
invoice = so._create_invoices()
with Form(invoice) as form:
with form.invoice_line_ids.edit(0) as line:
line.quantity = 2
invoice.action_post()
report = self.env['ir.actions.report']._get_report_from_name('account.report_invoice_with_payments')
html = report._render_qweb_html(invoice.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By Lot\n2.00\nUnits\nLOT0001', "There should be a line that specifies 2 x LOT0001")
def test_invoice_before_delivery(self):
"""
Suppose the lots are printed on the invoices.
The user sells a tracked product, its invoicing policy is "Ordered quantities"
A user invoice a tracked product with a smaller quantity than delivered.
On the invoice, the quantity of the used lot should be the invoiced one.
"""
display_lots = self.env.ref('stock_account.group_lot_on_invoice')
display_uom = self.env.ref('uom.group_uom')
self.env.user.write({'groups_id': [(4, display_lots.id), (4, display_uom.id)]})
self.product_by_lot.invoice_policy = "order"
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': self.product_by_lot.name, 'product_id': self.product_by_lot.id, 'product_uom_qty': 4}),
],
})
so.action_confirm()
invoice = so._create_invoices()
invoice.action_post()
picking = so.picking_ids
picking.move_lines.quantity_done = 4
picking.button_validate()
report = self.env['ir.actions.report']._get_report_from_name('account.report_invoice_with_payments')
html = report._render_qweb_html(invoice.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By Lot\n4.00\nUnits\nLOT0001', "There should be a line that specifies 4 x LOT0001")
def test_backorder_and_several_invoices(self):
"""
Suppose the lots are printed on the invoices.
The user sells 2 tracked-by-usn products, he delivers 1 product and invoices it
Then, he delivers the other one and invoices it too. Each invoice should have the
correct USN
"""
report = self.env['ir.actions.report']._get_report_from_name('account.report_invoice_with_payments')
display_lots = self.env.ref('stock_account.group_lot_on_invoice')
display_uom = self.env.ref('uom.group_uom')
self.env.user.write({'groups_id': [(4, display_lots.id), (4, display_uom.id)]})
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': self.product_by_usn.name, 'product_id': self.product_by_usn.id, 'product_uom_qty': 2}),
],
})
so.action_confirm()
picking = so.picking_ids
picking.move_lines.move_line_ids[0].qty_done = 1
picking.button_validate()
action = picking.button_validate()
wizard = Form(self.env[action['res_model']].with_context(action['context'])).save()
wizard.process()
invoice01 = so._create_invoices()
with Form(invoice01) as form:
with form.invoice_line_ids.edit(0) as line:
line.quantity = 1
invoice01.action_post()
backorder = picking.backorder_ids
backorder.move_lines.move_line_ids.qty_done = 1
backorder.button_validate()
html = report._render_qweb_html(invoice01.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0001', "There should be a line that specifies 1 x USN0001")
self.assertNotIn('USN0002', text)
invoice02 = so._create_invoices()
invoice02.action_post()
html = report._render_qweb_html(invoice02.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0002', "There should be a line that specifies 1 x USN0002")
self.assertNotIn('USN0001', text)
# Posting the second invoice shouldn't change the result of the first one
html = report._render_qweb_html(invoice01.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0001', "There should still be a line that specifies 1 x USN0001")
self.assertNotIn('USN0002', text)
# Resetting and posting again the first invoice shouldn't change the results
invoice01.button_draft()
invoice01.action_post()
html = report._render_qweb_html(invoice01.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0001', "There should still be a line that specifies 1 x USN0001")
self.assertNotIn('USN0002', text)
html = report._render_qweb_html(invoice02.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0002', "There should be a line that specifies 1 x USN0002")
self.assertNotIn('USN0001', text)
def test_invoice_with_several_returns(self):
"""
Mix of returns and partial invoice
- Product P tracked by lot
- SO with 10 x P
- Deliver 10 x Lot01
- Return 10 x Lot01
- Deliver 03 x Lot02
- Invoice 02 x P
- Deliver 05 x Lot02 + 02 x Lot03
- Invoice 08 x P
"""
report = self.env['ir.actions.report']._get_report_from_name('account.report_invoice_with_payments')
display_lots = self.env.ref('stock_account.group_lot_on_invoice')
display_uom = self.env.ref('uom.group_uom')
self.env.user.write({'groups_id': [(4, display_lots.id), (4, display_uom.id)]})
lot01 = self.env['stock.production.lot'].search([('name', '=', 'LOT0001')])
lot02, lot03 = self.env['stock.production.lot'].create([{
'name': name,
'product_id': self.product_by_lot.id,
'company_id': self.env.company.id,
} for name in ['LOT0002', 'LOT0003']])
self.env['stock.quant']._update_available_quantity(self.product_by_lot, self.stock_location, 8, lot_id=lot02)
self.env['stock.quant']._update_available_quantity(self.product_by_lot, self.stock_location, 2, lot_id=lot03)
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': self.product_by_lot.name, 'product_id': self.product_by_lot.id, 'product_uom_qty': 10}),
],
})
so.action_confirm()
# Deliver 10 x LOT0001
delivery01 = so.picking_ids
delivery01.move_lines.quantity_done = 10
delivery01.button_validate()
self.assertEqual(delivery01.move_line_ids.lot_id.name, 'LOT0001')
# Return delivery01 (-> 10 x LOT0001)
return_form = Form(self.env['stock.return.picking'].with_context(active_ids=[delivery01.id], active_id=delivery01.id, active_model='stock.picking'))
return_wizard = return_form.save()
action = return_wizard.create_returns()
pick_return = self.env['stock.picking'].browse(action['res_id'])
move_form = Form(pick_return.move_lines, view='stock.view_stock_move_nosuggest_operations')
with move_form.move_line_nosuggest_ids.new() as line:
line.lot_id = lot01
line.qty_done = 10
move_form.save()
pick_return.button_validate()
# Return pick_return
return_form = Form(self.env['stock.return.picking'].with_context(active_ids=[pick_return.id], active_id=pick_return.id, active_model='stock.picking'))
return_wizard = return_form.save()
action = return_wizard.create_returns()
delivery02 = self.env['stock.picking'].browse(action['res_id'])
# Deliver 3 x LOT0002
delivery02.do_unreserve()
move_form = Form(delivery02.move_lines, view='stock.view_stock_move_nosuggest_operations')
with move_form.move_line_nosuggest_ids.new() as line:
line.lot_id = lot02
line.qty_done = 3
move_form.save()
action = delivery02.button_validate()
wizard = Form(self.env[action['res_model']].with_context(action['context'])).save()
wizard.process()
# Invoice 2 x P
invoice01 = so._create_invoices()
with Form(invoice01) as form:
with form.invoice_line_ids.edit(0) as line:
line.quantity = 2
invoice01.action_post()
html = report._render_qweb_html(invoice01.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By Lot\n2.00\nUnits\nLOT0002', "There should be a line that specifies 2 x LOT0002")
self.assertNotIn('LOT0001', text)
# Deliver 5 x LOT0002 + 2 x LOT0003
delivery03 = delivery02.backorder_ids
delivery03.do_unreserve()
move_form = Form(delivery03.move_lines, view='stock.view_stock_move_nosuggest_operations')
with move_form.move_line_nosuggest_ids.new() as line:
line.lot_id = lot02
line.qty_done = 5
with move_form.move_line_nosuggest_ids.new() as line:
line.lot_id = lot03
line.qty_done = 2
move_form.save()
delivery03.button_validate()
# Invoice 8 x P
invoice02 = so._create_invoices()
invoice02.action_post()
html = report._render_qweb_html(invoice02.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By Lot\n6.00\nUnits\nLOT0002', "There should be a line that specifies 6 x LOT0002")
self.assertRegex(text, r'Product By Lot\n2.00\nUnits\nLOT0003', "There should be a line that specifies 2 x LOT0003")
self.assertNotIn('LOT0001', text)
def test_refund_cancel_invoices(self):
"""
Suppose the lots are printed on the invoices.
The user sells 2 tracked-by-usn products, he delivers 2 products and invoices them
Then he adds credit notes and issues a full refund. Receive the products.
The reversed invoice should also have correct USN
"""
report = self.env['ir.actions.report']._get_report_from_name('account.report_invoice_with_payments')
display_lots = self.env.ref('stock_account.group_lot_on_invoice')
display_uom = self.env.ref('uom.group_uom')
self.env.user.write({'groups_id': [(4, display_lots.id), (4, display_uom.id)]})
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': self.product_by_usn.name, 'product_id': self.product_by_usn.id, 'product_uom_qty': 2}),
],
})
so.action_confirm()
picking = so.picking_ids
picking.move_lines.move_line_ids[0].qty_done = 1
picking.move_lines.move_line_ids[1].qty_done = 1
picking.button_validate()
invoice01 = so._create_invoices()
invoice01.action_post()
html = report._render_qweb_html(invoice01.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0001', "There should be a line that specifies 1 x USN0001")
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0002', "There should be a line that specifies 1 x USN0002")
# Refund the invoice
refund_wizard = self.env['account.move.reversal'].with_context(active_model="account.move", active_ids=invoice01.ids).create({
'refund_method': 'cancel',
'journal_id': invoice01.journal_id.id,
})
res = refund_wizard.reverse_moves()
refund_invoice = self.env['account.move'].browse(res['res_id'])
# recieve the returned product
stock_return_picking_form = Form(self.env['stock.return.picking'].with_context(active_ids=picking.ids, active_id=picking.sorted().ids[0], active_model='stock.picking'))
return_wiz = stock_return_picking_form.save()
res = return_wiz.create_returns()
pick_return = self.env['stock.picking'].browse(res['res_id'])
move_form = Form(pick_return.move_lines, view='stock.view_stock_move_nosuggest_operations')
with move_form.move_line_nosuggest_ids.new() as line:
line.lot_id = self.usn01
line.qty_done = 1
with move_form.move_line_nosuggest_ids.new() as line:
line.lot_id = self.usn02
line.qty_done = 1
move_form.save()
pick_return.button_validate()
# reversed invoice
html = report._render_qweb_html(refund_invoice.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0001', "There should be a line that specifies 1 x USN0001")
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0002', "There should be a line that specifies 1 x USN0002")
def test_refund_modify_invoices(self):
"""
Suppose the lots are printed on the invoices.
The user sells 1 tracked-by-usn products, he delivers 1 and invoices it
Then he adds credit notes and issues full refund and new draft invoice.
The new draft invoice should have correct USN
"""
report = self.env['ir.actions.report']._get_report_from_name('account.report_invoice_with_payments')
display_lots = self.env.ref('stock_account.group_lot_on_invoice')
display_uom = self.env.ref('uom.group_uom')
self.env.user.write({'groups_id': [(4, display_lots.id), (4, display_uom.id)]})
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': self.product_by_usn.name, 'product_id': self.product_by_usn.id, 'product_uom_qty': 1}),
],
})
so.action_confirm()
picking = so.picking_ids
picking.move_lines.move_line_ids[0].qty_done = 1
picking.button_validate()
invoice01 = so._create_invoices()
invoice01.action_post()
html = report._render_qweb_html(invoice01.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0001', "There should be a line that specifies 1 x USN0001")
# Refund the invoice with full refund and new draft invoice
refund_wizard = self.env['account.move.reversal'].with_context(active_model="account.move", active_ids=invoice01.ids).create({
'refund_method': 'modify',
'journal_id': invoice01.journal_id.id,
})
res = refund_wizard.reverse_moves()
invoice02 = self.env['account.move'].browse(res['res_id'])
invoice02.action_post()
# new draft invoice
html = report._render_qweb_html(invoice02.ids)[0]
text = html2plaintext(html)
self.assertRegex(text, r'Product By USN\n1.00\nUnits\nUSN0001', "There should be a line that specifies 1 x USN0001")
| 45.690021 | 21,520 |
75,272 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta
from odoo.addons.stock_account.tests.test_anglo_saxon_valuation_reconciliation_common import ValuationReconciliationTestCommon
from odoo.addons.sale.tests.common import TestSaleCommon
from odoo.exceptions import UserError
from odoo.tests import Form, tagged
@tagged('post_install', '-at_install')
class TestSaleStock(TestSaleCommon, ValuationReconciliationTestCommon):
def _get_new_sale_order(self, amount=10.0, product=False):
""" Creates and returns a sale order with one default order line.
:param float amount: quantity of product for the order line (10 by default)
"""
product = product or self.company_data['product_delivery_no']
sale_order_vals = {
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'order_line': [(0, 0, {
'name': product.name,
'product_id': product.id,
'product_uom_qty': amount,
'product_uom': product.uom_id.id,
'price_unit': product.list_price})],
'pricelist_id': self.company_data['default_pricelist'].id,
}
sale_order = self.env['sale.order'].create(sale_order_vals)
return sale_order
def test_00_sale_stock_invoice(self):
"""
Test SO's changes when playing around with stock moves, quants, pack operations, pickings
and whatever other model there is in stock with "invoice on delivery" products
"""
self.so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': p.name,
'product_id': p.id,
'product_uom_qty': 2,
'product_uom': p.uom_id.id,
'price_unit': p.list_price,
}) for p in (
self.company_data['product_order_no'],
self.company_data['product_service_delivery'],
self.company_data['product_service_order'],
self.company_data['product_delivery_no'],
)],
'pricelist_id': self.company_data['default_pricelist'].id,
'picking_policy': 'direct',
})
# confirm our standard so, check the picking
self.so.action_confirm()
self.assertTrue(self.so.picking_ids, 'Sale Stock: no picking created for "invoice on delivery" storable products')
# invoice on order
self.so._create_invoices()
# deliver partially, check the so's invoice_status and delivered quantities
self.assertEqual(self.so.invoice_status, 'no', 'Sale Stock: so invoice_status should be "nothing to invoice" after invoicing')
pick = self.so.picking_ids
pick.move_lines.write({'quantity_done': 1})
wiz_act = pick.button_validate()
wiz = Form(self.env[wiz_act['res_model']].with_context(wiz_act['context'])).save()
wiz.process()
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so invoice_status should be "to invoice" after partial delivery')
del_qties = [sol.qty_delivered for sol in self.so.order_line]
del_qties_truth = [1.0 if sol.product_id.type in ['product', 'consu'] else 0.0 for sol in self.so.order_line]
self.assertEqual(del_qties, del_qties_truth, 'Sale Stock: delivered quantities are wrong after partial delivery')
# invoice on delivery: only storable products
inv_1 = self.so._create_invoices()
self.assertTrue(all([il.product_id.invoice_policy == 'delivery' for il in inv_1.invoice_line_ids]),
'Sale Stock: invoice should only contain "invoice on delivery" products')
# complete the delivery and check invoice_status again
self.assertEqual(self.so.invoice_status, 'no',
'Sale Stock: so invoice_status should be "nothing to invoice" after partial delivery and invoicing')
self.assertEqual(len(self.so.picking_ids), 2, 'Sale Stock: number of pickings should be 2')
pick_2 = self.so.picking_ids.filtered('backorder_id')
pick_2.move_lines.write({'quantity_done': 1})
self.assertTrue(pick_2.button_validate(), 'Sale Stock: second picking should be final without need for a backorder')
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so invoice_status should be "to invoice" after complete delivery')
del_qties = [sol.qty_delivered for sol in self.so.order_line]
del_qties_truth = [2.0 if sol.product_id.type in ['product', 'consu'] else 0.0 for sol in self.so.order_line]
self.assertEqual(del_qties, del_qties_truth, 'Sale Stock: delivered quantities are wrong after complete delivery')
# Without timesheet, we manually set the delivered qty for the product serv_del
self.so.order_line.sorted()[1]['qty_delivered'] = 2.0
# There is a bug with `new` and `_origin`
# If you create a first new from a record, then change a value on the origin record, than create another new,
# this other new wont have the updated value of the origin record, but the one from the previous new
# Here the problem lies in the use of `new` in `move = self_ctx.new(new_vals)`,
# and the fact this method is called multiple times in the same transaction test case.
# Here, we update `qty_delivered` on the origin record, but the `new` records which are in cache with this order line
# as origin are not updated, nor the fields that depends on it.
self.so.flush()
for field in self.env['sale.order.line']._fields.values():
for res_id in list(self.env.cache._data[field]):
if not res_id:
self.env.cache._data[field].pop(res_id)
inv_id = self.so._create_invoices()
self.assertEqual(self.so.invoice_status, 'invoiced',
'Sale Stock: so invoice_status should be "fully invoiced" after complete delivery and invoicing')
def test_01_sale_stock_order(self):
"""
Test SO's changes when playing around with stock moves, quants, pack operations, pickings
and whatever other model there is in stock with "invoice on order" products
"""
# let's cheat and put all our products to "invoice on order"
self.so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'order_line': [(0, 0, {
'name': p.name,
'product_id': p.id,
'product_uom_qty': 2,
'product_uom': p.uom_id.id,
'price_unit': p.list_price,
}) for p in (
self.company_data['product_order_no'],
self.company_data['product_service_delivery'],
self.company_data['product_service_order'],
self.company_data['product_delivery_no'],
)],
'pricelist_id': self.company_data['default_pricelist'].id,
'picking_policy': 'direct',
})
for sol in self.so.order_line:
sol.product_id.invoice_policy = 'order'
# confirm our standard so, check the picking
self.so.order_line._compute_product_updatable()
self.assertTrue(self.so.order_line.sorted()[0].product_updatable)
self.so.action_confirm()
self.so.order_line._compute_product_updatable()
self.assertFalse(self.so.order_line.sorted()[0].product_updatable)
self.assertTrue(self.so.picking_ids, 'Sale Stock: no picking created for "invoice on order" storable products')
# let's do an invoice for a deposit of 5%
advance_product = self.env['product.product'].create({
'name': 'Deposit',
'type': 'service',
'invoice_policy': 'order',
})
adv_wiz = self.env['sale.advance.payment.inv'].with_context(active_ids=[self.so.id]).create({
'advance_payment_method': 'percentage',
'amount': 5.0,
'product_id': advance_product.id,
})
act = adv_wiz.with_context(open_invoices=True).create_invoices()
inv = self.env['account.move'].browse(act['res_id'])
self.assertEqual(inv.amount_untaxed, self.so.amount_untaxed * 5.0 / 100.0, 'Sale Stock: deposit invoice is wrong')
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so should be to invoice after invoicing deposit')
# invoice on order: everything should be invoiced
self.so._create_invoices(final=True)
self.assertEqual(self.so.invoice_status, 'invoiced', 'Sale Stock: so should be fully invoiced after second invoice')
# deliver, check the delivered quantities
pick = self.so.picking_ids
pick.move_lines.write({'quantity_done': 2})
self.assertTrue(pick.button_validate(), 'Sale Stock: complete delivery should not need a backorder')
del_qties = [sol.qty_delivered for sol in self.so.order_line]
del_qties_truth = [2.0 if sol.product_id.type in ['product', 'consu'] else 0.0 for sol in self.so.order_line]
self.assertEqual(del_qties, del_qties_truth, 'Sale Stock: delivered quantities are wrong after partial delivery')
# invoice on delivery: nothing to invoice
with self.assertRaises(UserError):
self.so._create_invoices()
def test_02_sale_stock_return(self):
"""
Test a SO with a product invoiced on delivery. Deliver and invoice the SO, then do a return
of the picking. Check that a refund invoice is well generated.
"""
# intial so
self.product = self.company_data['product_delivery_no']
so_vals = {
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'order_line': [(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 5.0,
'product_uom': self.product.uom_id.id,
'price_unit': self.product.list_price})],
'pricelist_id': self.company_data['default_pricelist'].id,
}
self.so = self.env['sale.order'].create(so_vals)
# confirm our standard so, check the picking
self.so.action_confirm()
self.assertTrue(self.so.picking_ids, 'Sale Stock: no picking created for "invoice on delivery" storable products')
# invoice in on delivery, nothing should be invoiced
self.assertEqual(self.so.invoice_status, 'no', 'Sale Stock: so invoice_status should be "no" instead of "%s".' % self.so.invoice_status)
# deliver completely
pick = self.so.picking_ids
pick.move_lines.write({'quantity_done': 5})
pick.button_validate()
# Check quantity delivered
del_qty = sum(sol.qty_delivered for sol in self.so.order_line)
self.assertEqual(del_qty, 5.0, 'Sale Stock: delivered quantity should be 5.0 instead of %s after complete delivery' % del_qty)
# Check invoice
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so invoice_status should be "to invoice" instead of "%s" before invoicing' % self.so.invoice_status)
self.inv_1 = self.so._create_invoices()
self.assertEqual(self.so.invoice_status, 'invoiced', 'Sale Stock: so invoice_status should be "invoiced" instead of "%s" after invoicing' % self.so.invoice_status)
self.assertEqual(len(self.inv_1), 1, 'Sale Stock: only one invoice instead of "%s" should be created' % len(self.inv_1))
self.assertEqual(self.inv_1.amount_untaxed, self.inv_1.amount_untaxed, 'Sale Stock: amount in SO and invoice should be the same')
self.inv_1.action_post()
# Create return picking
stock_return_picking_form = Form(self.env['stock.return.picking']
.with_context(active_ids=pick.ids, active_id=pick.sorted().ids[0],
active_model='stock.picking'))
return_wiz = stock_return_picking_form.save()
return_wiz.product_return_moves.quantity = 2.0 # Return only 2
return_wiz.product_return_moves.to_refund = True # Refund these 2
res = return_wiz.create_returns()
return_pick = self.env['stock.picking'].browse(res['res_id'])
# Validate picking
return_pick.move_lines.write({'quantity_done': 2})
return_pick.button_validate()
# Check invoice
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so invoice_status should be "to invoice" instead of "%s" after picking return' % self.so.invoice_status)
self.assertAlmostEqual(self.so.order_line.sorted()[0].qty_delivered, 3.0, msg='Sale Stock: delivered quantity should be 3.0 instead of "%s" after picking return' % self.so.order_line.sorted()[0].qty_delivered)
# let's do an invoice with refunds
adv_wiz = self.env['sale.advance.payment.inv'].with_context(active_ids=[self.so.id]).create({
'advance_payment_method': 'delivered',
})
adv_wiz.with_context(open_invoices=True).create_invoices()
self.inv_2 = self.so.invoice_ids.filtered(lambda r: r.state == 'draft')
self.assertAlmostEqual(self.inv_2.invoice_line_ids.sorted()[0].quantity, 2.0, msg='Sale Stock: refund quantity on the invoice should be 2.0 instead of "%s".' % self.inv_2.invoice_line_ids.sorted()[0].quantity)
self.assertEqual(self.so.invoice_status, 'no', 'Sale Stock: so invoice_status should be "no" instead of "%s" after invoicing the return' % self.so.invoice_status)
def test_03_sale_stock_delivery_partial(self):
"""
Test a SO with a product invoiced on delivery. Deliver partially and invoice the SO, when
the SO is set on 'done', the SO should be fully invoiced.
"""
# intial so
self.product = self.company_data['product_delivery_no']
so_vals = {
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'order_line': [(0, 0, {
'name': self.product.name,
'product_id': self.product.id,
'product_uom_qty': 5.0,
'product_uom': self.product.uom_id.id,
'price_unit': self.product.list_price})],
'pricelist_id': self.company_data['default_pricelist'].id,
}
self.so = self.env['sale.order'].create(so_vals)
# confirm our standard so, check the picking
self.so.action_confirm()
self.assertTrue(self.so.picking_ids, 'Sale Stock: no picking created for "invoice on delivery" storable products')
# invoice in on delivery, nothing should be invoiced
self.assertEqual(self.so.invoice_status, 'no', 'Sale Stock: so invoice_status should be "nothing to invoice"')
# deliver partially
pick = self.so.picking_ids
pick.move_lines.write({'quantity_done': 4})
res_dict = pick.button_validate()
wizard = Form(self.env[(res_dict.get('res_model'))].with_context(res_dict['context'])).save()
wizard.process_cancel_backorder()
#Check Exception error is logged on SO
activity = self.env['mail.activity'].search([('res_id', '=', self.so.id), ('res_model', '=', 'sale.order')])
self.assertEqual(len(activity), 1, 'When no backorder is created for a partial delivery, a warning error should be logged in its origin SO')
# Check quantity delivered
del_qty = sum(sol.qty_delivered for sol in self.so.order_line)
self.assertEqual(del_qty, 4.0, 'Sale Stock: delivered quantity should be 4.0 after partial delivery')
# Check invoice
self.assertEqual(self.so.invoice_status, 'to invoice', 'Sale Stock: so invoice_status should be "to invoice" before invoicing')
self.inv_1 = self.so._create_invoices()
self.assertEqual(self.so.invoice_status, 'no', 'Sale Stock: so invoice_status should be "no" after invoicing')
self.assertEqual(len(self.inv_1), 1, 'Sale Stock: only one invoice should be created')
self.assertEqual(self.inv_1.amount_untaxed, self.inv_1.amount_untaxed, 'Sale Stock: amount in SO and invoice should be the same')
self.so.action_done()
self.assertEqual(self.so.invoice_status, 'invoiced', 'Sale Stock: so invoice_status should be "invoiced" when set to done')
def test_04_create_picking_update_saleorderline(self):
"""
Test that updating multiple sale order lines after a successful delivery creates a single picking containing
the new move lines.
"""
# sell two products
item1 = self.company_data['product_order_no'] # consumable
item1.type = 'consu'
item2 = self.company_data['product_delivery_no'] # storable
item2.type = 'product' # storable
self.so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': item1.name, 'product_id': item1.id, 'product_uom_qty': 1, 'product_uom': item1.uom_id.id, 'price_unit': item1.list_price}),
(0, 0, {'name': item2.name, 'product_id': item2.id, 'product_uom_qty': 1, 'product_uom': item2.uom_id.id, 'price_unit': item2.list_price}),
],
})
self.so.action_confirm()
# deliver them
# One of the move is for a consumable product, thus is assigned. The second one is for a
# storable product, thus is unavailable. Hitting `button_validate` will first ask to
# process all the reserved quantities and, if the user chose to process, a second wizard
# will ask to create a backorder for the unavailable product.
self.assertEqual(len(self.so.picking_ids), 1)
res_dict = self.so.picking_ids.sorted()[0].button_validate()
wizard = Form(self.env[(res_dict.get('res_model'))].with_context(res_dict['context'])).save()
self.assertEqual(wizard._name, 'stock.immediate.transfer')
res_dict = wizard.process()
wizard = Form(self.env[(res_dict.get('res_model'))].with_context(res_dict['context'])).save()
self.assertEqual(wizard._name, 'stock.backorder.confirmation')
wizard.process()
# Now, the original picking is done and there is a new one (the backorder).
self.assertEqual(len(self.so.picking_ids), 2)
for picking in self.so.picking_ids:
move = picking.move_lines
if picking.backorder_id:
self.assertEqual(move.product_id.id, item2.id)
self.assertEqual(move.state, 'confirmed')
else:
self.assertEqual(picking.move_lines.product_id.id, item1.id)
self.assertEqual(move.state, 'done')
# update the two original sale order lines
self.so.write({
'order_line': [
(1, self.so.order_line.sorted()[0].id, {'product_uom_qty': 2}),
(1, self.so.order_line.sorted()[1].id, {'product_uom_qty': 2}),
]
})
# a single picking should be created for the new delivery
self.assertEqual(len(self.so.picking_ids), 2)
backorder = self.so.picking_ids.filtered(lambda p: p.backorder_id)
self.assertEqual(len(backorder.move_lines), 2)
for backorder_move in backorder.move_lines:
if backorder_move.product_id.id == item1.id:
self.assertEqual(backorder_move.product_qty, 1)
elif backorder_move.product_id.id == item2.id:
self.assertEqual(backorder_move.product_qty, 2)
# add a new sale order lines
self.so.write({
'order_line': [
(0, 0, {'name': item1.name, 'product_id': item1.id, 'product_uom_qty': 1, 'product_uom': item1.uom_id.id, 'price_unit': item1.list_price}),
]
})
self.assertEqual(sum(backorder.move_lines.filtered(lambda m: m.product_id.id == item1.id).mapped('product_qty')), 2)
def test_05_create_picking_update_saleorderline(self):
""" Same test than test_04 but only with enough products in stock so that the reservation
is successful.
"""
# sell two products
item1 = self.company_data['product_order_no'] # consumable
item1.type = 'consu' # consumable
item2 = self.company_data['product_delivery_no'] # storable
item2.type = 'product' # storable
self.env['stock.quant']._update_available_quantity(item2, self.company_data['default_warehouse'].lot_stock_id, 2)
self.so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': item1.name, 'product_id': item1.id, 'product_uom_qty': 1, 'product_uom': item1.uom_id.id, 'price_unit': item1.list_price}),
(0, 0, {'name': item2.name, 'product_id': item2.id, 'product_uom_qty': 1, 'product_uom': item2.uom_id.id, 'price_unit': item2.list_price}),
],
})
self.so.action_confirm()
# deliver them
self.assertEqual(len(self.so.picking_ids), 1)
res_dict = self.so.picking_ids.sorted()[0].button_validate()
wizard = Form(self.env[(res_dict.get('res_model'))].with_context(res_dict['context'])).save()
wizard.process()
self.assertEqual(self.so.picking_ids.sorted()[0].state, "done")
# update the two original sale order lines
self.so.write({
'order_line': [
(1, self.so.order_line.sorted()[0].id, {'product_uom_qty': 2}),
(1, self.so.order_line.sorted()[1].id, {'product_uom_qty': 2}),
]
})
# a single picking should be created for the new delivery
self.assertEqual(len(self.so.picking_ids), 2)
def test_05_confirm_cancel_confirm(self):
""" Confirm a sale order, cancel it, set to quotation, change the
partner, confirm it again: the second delivery order should have
the new partner.
"""
item1 = self.company_data['product_order_no']
partner1 = self.partner_a.id
partner2 = self.env['res.partner'].create({'name': 'Another Test Partner'})
so1 = self.env['sale.order'].create({
'partner_id': partner1,
'order_line': [(0, 0, {
'name': item1.name,
'product_id': item1.id,
'product_uom_qty': 1,
'product_uom': item1.uom_id.id,
'price_unit': item1.list_price,
})],
})
so1.action_confirm()
self.assertEqual(len(so1.picking_ids), 1)
self.assertEqual(so1.picking_ids.partner_id.id, partner1)
so1.action_cancel()
so1.action_draft()
so1.partner_id = partner2
so1.partner_shipping_id = partner2 # set by an onchange
so1.action_confirm()
self.assertEqual(len(so1.picking_ids), 2)
picking2 = so1.picking_ids.filtered(lambda p: p.state != 'cancel')
self.assertEqual(picking2.partner_id.id, partner2.id)
def test_06_uom(self):
""" Sell a dozen of products stocked in units. Check that the quantities on the sale order
lines as well as the delivered quantities are handled in dozen while the moves themselves
are handled in units. Edit the ordered quantities, check that the quantites are correctly
updated on the moves. Edit the ir.config_parameter to propagate the uom of the sale order
lines to the moves and edit a last time the ordered quantities. Deliver, check the
quantities.
"""
uom_unit = self.env.ref('uom.product_uom_unit')
uom_dozen = self.env.ref('uom.product_uom_dozen')
item1 = self.company_data['product_order_no']
self.assertEqual(item1.uom_id.id, uom_unit.id)
# sell a dozen
so1 = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [(0, 0, {
'name': item1.name,
'product_id': item1.id,
'product_uom_qty': 1,
'product_uom': uom_dozen.id,
'price_unit': item1.list_price,
})],
})
so1.action_confirm()
# the move should be 12 units
# note: move.product_qty = computed field, always in the uom of the quant
# move.product_uom_qty = stored field representing the initial demand in move.product_uom
move1 = so1.picking_ids.move_lines[0]
self.assertEqual(move1.product_uom_qty, 12)
self.assertEqual(move1.product_uom.id, uom_unit.id)
self.assertEqual(move1.product_qty, 12)
# edit the so line, sell 2 dozen, the move should now be 24 units
so1.write({
'order_line': [
(1, so1.order_line.id, {'product_uom_qty': 2}),
]
})
# The above will create a second move, and then the two moves will be merged in _merge_moves`
# The picking moves are not well sorted because the new move has just been created, and this influences the resulting move,
# in which move the twos are merged.
# But, this doesn't seem really important which is the resulting move, but in this test we have to ensure
# we use the resulting move to compare the qty.
# ```
# for moves in moves_to_merge:
# # link all move lines to record 0 (the one we will keep).
# moves.mapped('move_line_ids').write({'move_id': moves[0].id})
# # merge move data
# moves[0].write(moves._merge_moves_fields())
# # update merged moves dicts
# moves_to_unlink |= moves[1:]
# ```
move1 = so1.picking_ids.move_lines[0]
self.assertEqual(move1.product_uom_qty, 24)
self.assertEqual(move1.product_uom.id, uom_unit.id)
self.assertEqual(move1.product_qty, 24)
# force the propagation of the uom, sell 3 dozen
self.env['ir.config_parameter'].sudo().set_param('stock.propagate_uom', '1')
so1.write({
'order_line': [
(1, so1.order_line.id, {'product_uom_qty': 3}),
]
})
move2 = so1.picking_ids.move_lines.filtered(lambda m: m.product_uom.id == uom_dozen.id)
self.assertEqual(move2.product_uom_qty, 1)
self.assertEqual(move2.product_uom.id, uom_dozen.id)
self.assertEqual(move2.product_qty, 12)
# deliver everything
move1.quantity_done = 24
move2.quantity_done = 1
so1.picking_ids.button_validate()
# check the delivered quantity
self.assertEqual(so1.order_line.qty_delivered, 3.0)
def test_07_forced_qties(self):
""" Make multiple sale order lines of the same product which isn't available in stock. On
the picking, create new move lines (through the detailed operations view). See that the move
lines are correctly dispatched through the moves.
"""
uom_unit = self.env.ref('uom.product_uom_unit')
uom_dozen = self.env.ref('uom.product_uom_dozen')
item1 = self.company_data['product_order_no']
self.assertEqual(item1.uom_id.id, uom_unit.id)
# sell a dozen
so1 = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': item1.name,
'product_id': item1.id,
'product_uom_qty': 1,
'product_uom': uom_dozen.id,
'price_unit': item1.list_price,
}),
(0, 0, {
'name': item1.name,
'product_id': item1.id,
'product_uom_qty': 1,
'product_uom': uom_dozen.id,
'price_unit': item1.list_price,
}),
(0, 0, {
'name': item1.name,
'product_id': item1.id,
'product_uom_qty': 1,
'product_uom': uom_dozen.id,
'price_unit': item1.list_price,
}),
],
})
so1.action_confirm()
self.assertEqual(len(so1.picking_ids.move_lines), 3)
so1.picking_ids.write({
'move_line_ids': [
(0, 0, {
'product_id': item1.id,
'product_uom_qty': 0,
'qty_done': 1,
'product_uom_id': uom_dozen.id,
'location_id': so1.picking_ids.location_id.id,
'location_dest_id': so1.picking_ids.location_dest_id.id,
}),
(0, 0, {
'product_id': item1.id,
'product_uom_qty': 0,
'qty_done': 1,
'product_uom_id': uom_dozen.id,
'location_id': so1.picking_ids.location_id.id,
'location_dest_id': so1.picking_ids.location_dest_id.id,
}),
(0, 0, {
'product_id': item1.id,
'product_uom_qty': 0,
'qty_done': 1,
'product_uom_id': uom_dozen.id,
'location_id': so1.picking_ids.location_id.id,
'location_dest_id': so1.picking_ids.location_dest_id.id,
}),
],
})
so1.picking_ids.button_validate()
self.assertEqual(so1.picking_ids.state, 'done')
self.assertEqual(so1.order_line.mapped('qty_delivered'), [1, 1, 1])
def test_08_quantities(self):
"""Change the picking code of the receipts to internal. Make a SO for 10 units, go to the
picking and return 5, edit the SO line to 15 units.
The purpose of the test is to check the consistencies across the delivered quantities and the
procurement quantities.
"""
# Change the code of the picking type receipt
self.env['stock.picking.type'].search([('code', '=', 'incoming')]).write({'code': 'internal'})
# Sell and deliver 10 units
item1 = self.company_data['product_order_no']
uom_unit = self.env.ref('uom.product_uom_unit')
so1 = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': item1.name,
'product_id': item1.id,
'product_uom_qty': 10,
'product_uom': uom_unit.id,
'price_unit': item1.list_price,
}),
],
})
so1.action_confirm()
picking = so1.picking_ids
wiz_act = picking.button_validate()
wiz = Form(self.env[wiz_act['res_model']].with_context(wiz_act['context'])).save()
wiz.process()
# Return 5 units
stock_return_picking_form = Form(self.env['stock.return.picking'].with_context(
active_ids=picking.ids,
active_id=picking.sorted().ids[0],
active_model='stock.picking'
))
return_wiz = stock_return_picking_form.save()
for return_move in return_wiz.product_return_moves:
return_move.write({
'quantity': 5,
'to_refund': True
})
res = return_wiz.create_returns()
return_pick = self.env['stock.picking'].browse(res['res_id'])
wiz_act = return_pick.button_validate()
wiz = Form(self.env[wiz_act['res_model']].with_context(wiz_act['context'])).save()
wiz.process()
self.assertEqual(so1.order_line.qty_delivered, 5)
# Deliver 15 instead of 10.
so1.write({
'order_line': [
(1, so1.order_line.sorted()[0].id, {'product_uom_qty': 15}),
]
})
# A new move of 10 unit (15 - 5 units)
self.assertEqual(so1.order_line.qty_delivered, 5)
self.assertEqual(so1.picking_ids.sorted('id')[-1].move_lines.product_qty, 10)
def test_09_qty_available(self):
""" create a sale order in warehouse1, change to warehouse2 and check the
available quantities on sale order lines are well updated """
# sell two products
item1 = self.company_data['product_order_no']
item1.type = 'product'
warehouse1 = self.company_data['default_warehouse']
self.env['stock.quant']._update_available_quantity(item1, warehouse1.lot_stock_id, 10)
self.env['stock.quant']._update_reserved_quantity(item1, warehouse1.lot_stock_id, 3)
warehouse2 = self.env['stock.warehouse'].create({
'partner_id': self.partner_a.id,
'name': 'Zizizatestwarehouse',
'code': 'Test',
})
self.env['stock.quant']._update_available_quantity(item1, warehouse2.lot_stock_id, 5)
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': item1.name, 'product_id': item1.id, 'product_uom_qty': 1, 'product_uom': item1.uom_id.id, 'price_unit': item1.list_price}),
],
})
line = so.order_line[0]
self.assertAlmostEqual(line.scheduled_date, datetime.now(), delta=timedelta(seconds=10))
self.assertEqual(line.virtual_available_at_date, 10)
self.assertEqual(line.free_qty_today, 7)
self.assertEqual(line.qty_available_today, 10)
self.assertEqual(line.warehouse_id, warehouse1)
self.assertEqual(line.qty_to_deliver, 1)
so.warehouse_id = warehouse2
# invalidate product cache to ensure qty_available is recomputed
# bc warehouse isn't in the depends_context of qty_available
line.product_id.invalidate_cache()
self.assertEqual(line.virtual_available_at_date, 5)
self.assertEqual(line.free_qty_today, 5)
self.assertEqual(line.qty_available_today, 5)
self.assertEqual(line.warehouse_id, warehouse2)
self.assertEqual(line.qty_to_deliver, 1)
def test_10_qty_available(self):
"""create a sale order containing three times the same product. The
quantity available should be different for the 3 lines"""
item1 = self.company_data['product_order_no']
item1.type = 'product'
self.env['stock.quant']._update_available_quantity(item1, self.company_data['default_warehouse'].lot_stock_id, 10)
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': item1.name, 'product_id': item1.id, 'product_uom_qty': 5, 'product_uom': item1.uom_id.id, 'price_unit': item1.list_price}),
(0, 0, {'name': item1.name, 'product_id': item1.id, 'product_uom_qty': 5, 'product_uom': item1.uom_id.id, 'price_unit': item1.list_price}),
(0, 0, {'name': item1.name, 'product_id': item1.id, 'product_uom_qty': 5, 'product_uom': item1.uom_id.id, 'price_unit': item1.list_price}),
],
})
self.assertEqual(so.order_line.mapped('free_qty_today'), [10, 5, 0])
def test_11_return_with_refund(self):
""" Creates a sale order, valids it and its delivery, then creates a
return. The return must refund by default and the sale order delivered
quantity must be updated.
"""
# Creates a sale order for 10 products.
sale_order = self._get_new_sale_order()
# Valids the sale order, then valids the delivery.
sale_order.action_confirm()
self.assertTrue(sale_order.picking_ids)
self.assertEqual(sale_order.order_line.qty_delivered, 0)
picking = sale_order.picking_ids
picking.move_lines.write({'quantity_done': 10})
picking.button_validate()
# Checks the delivery amount (must be 10).
self.assertEqual(sale_order.order_line.qty_delivered, 10)
# Creates a return from the delivery picking.
return_picking_form = Form(self.env['stock.return.picking']
.with_context(active_ids=picking.ids, active_id=picking.id,
active_model='stock.picking'))
return_wizard = return_picking_form.save()
# Checks the field `to_refund` is checked (must be checked by default).
self.assertEqual(return_wizard.product_return_moves.to_refund, True)
self.assertEqual(return_wizard.product_return_moves.quantity, 10)
# Valids the return picking.
res = return_wizard.create_returns()
return_picking = self.env['stock.picking'].browse(res['res_id'])
return_picking.move_lines.write({'quantity_done': 10})
return_picking.button_validate()
# Checks the delivery amount (must be 0).
self.assertEqual(sale_order.order_line.qty_delivered, 0)
def test_12_return_without_refund(self):
""" Do the exact thing than in `test_11_return_with_refund` except we
set on False the refund and checks the sale order delivered quantity
isn't changed.
"""
# Creates a sale order for 10 products.
sale_order = self._get_new_sale_order()
# Valids the sale order, then valids the delivery.
sale_order.action_confirm()
self.assertTrue(sale_order.picking_ids)
self.assertEqual(sale_order.order_line.qty_delivered, 0)
picking = sale_order.picking_ids
picking.move_lines.write({'quantity_done': 10})
picking.button_validate()
# Checks the delivery amount (must be 10).
self.assertEqual(sale_order.order_line.qty_delivered, 10)
# Creates a return from the delivery picking.
return_picking_form = Form(self.env['stock.return.picking']
.with_context(active_ids=picking.ids, active_id=picking.id,
active_model='stock.picking'))
return_wizard = return_picking_form.save()
# Checks the field `to_refund` is checked, then unchecks it.
self.assertEqual(return_wizard.product_return_moves.to_refund, True)
self.assertEqual(return_wizard.product_return_moves.quantity, 10)
return_wizard.product_return_moves.to_refund = False
# Valids the return picking.
res = return_wizard.create_returns()
return_picking = self.env['stock.picking'].browse(res['res_id'])
return_picking.move_lines.write({'quantity_done': 10})
return_picking.button_validate()
# Checks the delivery amount (must still be 10).
self.assertEqual(sale_order.order_line.qty_delivered, 10)
def test_13_delivered_qty(self):
""" Creates a sale order, valids it and adds a new move line in the delivery for a
product with an invoicing policy on 'order', then checks a new SO line was created.
After that, creates a second sale order and does the same thing but with a product
with and invoicing policy on 'ordered'.
"""
product_inv_on_delivered = self.company_data['product_delivery_no']
# Configure a product with invoicing policy on order.
product_inv_on_order = self.env['product.product'].create({
'name': 'Shenaniffluffy',
'type': 'consu',
'invoice_policy': 'order',
'list_price': 55.0,
})
# Creates a sale order for 3 products invoiced on qty. delivered.
sale_order = self._get_new_sale_order(amount=3)
# Confirms the sale order, then increases the delivered qty., adds a new
# line and valids the delivery.
sale_order.action_confirm()
self.assertTrue(sale_order.picking_ids)
self.assertEqual(len(sale_order.order_line), 1)
self.assertEqual(sale_order.order_line.qty_delivered, 0)
picking = sale_order.picking_ids
initial_product = sale_order.order_line.product_id
picking_form = Form(picking)
with picking_form.move_line_ids_without_package.edit(0) as move:
move.qty_done = 5
with picking_form.move_line_ids_without_package.new() as new_move:
new_move.product_id = product_inv_on_order
new_move.qty_done = 5
picking = picking_form.save()
picking.button_validate()
# Check a new sale order line was correctly created.
self.assertEqual(len(sale_order.order_line), 2)
so_line_1 = sale_order.order_line[0]
so_line_2 = sale_order.order_line[1]
self.assertEqual(so_line_1.product_id.id, product_inv_on_delivered.id)
self.assertEqual(so_line_1.product_uom_qty, 3)
self.assertEqual(so_line_1.qty_delivered, 5)
self.assertEqual(so_line_1.price_unit, 70.0)
self.assertEqual(so_line_2.product_id.id, product_inv_on_order.id)
self.assertEqual(so_line_2.product_uom_qty, 0)
self.assertEqual(so_line_2.qty_delivered, 5)
self.assertEqual(
so_line_2.price_unit, 0,
"Shouldn't get the product price as the invoice policy is on qty. ordered")
# Check the picking didn't change
self.assertRecordValues(sale_order.picking_ids.move_lines, [
{'product_id': initial_product.id, 'quantity_done': 5},
{'product_id': product_inv_on_order.id, 'quantity_done': 5},
])
# Creates a second sale order for 3 product invoiced on qty. ordered.
sale_order = self._get_new_sale_order(product=product_inv_on_order, amount=3)
# Confirms the sale order, then increases the delivered qty., adds a new
# line and valids the delivery.
sale_order.action_confirm()
self.assertTrue(sale_order.picking_ids)
self.assertEqual(len(sale_order.order_line), 1)
self.assertEqual(sale_order.order_line.qty_delivered, 0)
picking = sale_order.picking_ids
picking_form = Form(picking)
with picking_form.move_line_ids_without_package.edit(0) as move:
move.qty_done = 5
with picking_form.move_line_ids_without_package.new() as new_move:
new_move.product_id = product_inv_on_delivered
new_move.qty_done = 5
picking = picking_form.save()
picking.button_validate()
# Check a new sale order line was correctly created.
self.assertEqual(len(sale_order.order_line), 2)
so_line_1 = sale_order.order_line[0]
so_line_2 = sale_order.order_line[1]
self.assertEqual(so_line_1.product_id.id, product_inv_on_order.id)
self.assertEqual(so_line_1.product_uom_qty, 3)
self.assertEqual(so_line_1.qty_delivered, 5)
self.assertEqual(so_line_1.price_unit, 55.0)
self.assertEqual(so_line_2.product_id.id, product_inv_on_delivered.id)
self.assertEqual(so_line_2.product_uom_qty, 0)
self.assertEqual(so_line_2.qty_delivered, 5)
self.assertEqual(
so_line_2.price_unit, 70.0,
"Should get the product price as the invoice policy is on qty. delivered")
def test_14_delivered_qty_in_multistep(self):
""" Creates a sale order with delivery in two-step. Process the pick &
ship and check we don't have extra SO line. Then, do the same but with
adding a extra move and check only one extra SO line was created.
"""
# Set the delivery in two steps.
warehouse = self.company_data['default_warehouse']
warehouse.delivery_steps = 'pick_ship'
# Configure a product with invoicing policy on order.
product_inv_on_order = self.env['product.product'].create({
'name': 'Shenaniffluffy',
'type': 'consu',
'invoice_policy': 'order',
'list_price': 55.0,
})
# Create a sale order.
sale_order = self._get_new_sale_order()
# Confirms the sale order, then valids pick and delivery.
sale_order.action_confirm()
self.assertTrue(sale_order.picking_ids)
self.assertEqual(len(sale_order.order_line), 1)
self.assertEqual(sale_order.order_line.qty_delivered, 0)
pick = sale_order.picking_ids.filtered(lambda p: p.picking_type_code == 'internal')
delivery = sale_order.picking_ids.filtered(lambda p: p.picking_type_code == 'outgoing')
picking_form = Form(pick)
with picking_form.move_line_ids_without_package.edit(0) as move:
move.qty_done = 10
pick = picking_form.save()
pick.button_validate()
picking_form = Form(delivery)
with picking_form.move_line_ids_without_package.edit(0) as move:
move.qty_done = 10
delivery = picking_form.save()
delivery.button_validate()
# Check no new sale order line was created.
self.assertEqual(len(sale_order.order_line), 1)
self.assertEqual(sale_order.order_line.product_uom_qty, 10)
self.assertEqual(sale_order.order_line.qty_delivered, 10)
self.assertEqual(sale_order.order_line.price_unit, 70.0)
# Creates a second sale order.
sale_order = self._get_new_sale_order()
# Confirms the sale order then add a new line for an another product in the pick/out.
sale_order.action_confirm()
self.assertTrue(sale_order.picking_ids)
self.assertEqual(len(sale_order.order_line), 1)
self.assertEqual(sale_order.order_line.qty_delivered, 0)
pick = sale_order.picking_ids.filtered(lambda p: p.picking_type_code == 'internal')
delivery = sale_order.picking_ids.filtered(lambda p: p.picking_type_code == 'outgoing')
picking_form = Form(pick)
with picking_form.move_line_ids_without_package.edit(0) as move:
move.qty_done = 10
with picking_form.move_line_ids_without_package.new() as new_move:
new_move.product_id = product_inv_on_order
new_move.qty_done = 10
pick = picking_form.save()
pick.button_validate()
picking_form = Form(delivery)
with picking_form.move_line_ids_without_package.edit(0) as move:
move.qty_done = 10
with picking_form.move_line_ids_without_package.new() as new_move:
new_move.product_id = product_inv_on_order
new_move.qty_done = 10
delivery = picking_form.save()
delivery.button_validate()
# Check a new sale order line was correctly created.
self.assertEqual(len(sale_order.order_line), 2)
so_line_1 = sale_order.order_line[0]
so_line_2 = sale_order.order_line[1]
self.assertEqual(so_line_1.product_id.id, self.company_data['product_delivery_no'].id)
self.assertEqual(so_line_1.product_uom_qty, 10)
self.assertEqual(so_line_1.qty_delivered, 10)
self.assertEqual(so_line_1.price_unit, 70.0)
self.assertEqual(so_line_2.product_id.id, product_inv_on_order.id)
self.assertEqual(so_line_2.product_uom_qty, 0)
self.assertEqual(so_line_2.qty_delivered, 10)
self.assertEqual(so_line_2.price_unit, 0)
def test_08_sale_return_qty_and_cancel(self):
"""
Test a SO with a product on delivery with a 5 quantity.
Create two invoices: one for 3 quantity and one for 2 quantity
Then cancel Sale order, it won't raise any warning, it should be cancelled.
"""
partner = self.partner_a
product = self.company_data['product_delivery_no']
so_vals = {
'partner_id': partner.id,
'partner_invoice_id': partner.id,
'partner_shipping_id': partner.id,
'order_line': [(0, 0, {
'name': product.name,
'product_id': product.id,
'product_uom_qty': 5.0,
'product_uom': product.uom_id.id,
'price_unit': product.list_price})],
'pricelist_id': self.company_data['default_pricelist'].id,
}
so = self.env['sale.order'].create(so_vals)
# confirm the so
so.action_confirm()
# deliver partially
pick = so.picking_ids
pick.move_lines.write({'quantity_done': 3})
wiz_act = pick.button_validate()
wiz = Form(self.env[wiz_act['res_model']].with_context(wiz_act['context'])).save()
wiz.process()
# create invoice for 3 quantity and post it
inv_1 = so._create_invoices()
inv_1.action_post()
self.assertEqual(inv_1.state, 'posted', 'invoice should be in posted state')
pick_2 = so.picking_ids.filtered('backorder_id')
pick_2.move_lines.write({'quantity_done': 2})
pick_2.button_validate()
# create invoice for remaining 2 quantity
inv_2 = so._create_invoices()
self.assertEqual(inv_2.state, 'draft', 'invoice should be in draft state')
# check the status of invoices after cancelling the order
so.action_cancel()
wizard = self.env['sale.order.cancel'].with_context({'order_id': so.id}).create({'order_id': so.id})
wizard.action_cancel()
self.assertEqual(inv_1.state, 'posted', 'A posted invoice state should remain posted')
self.assertEqual(inv_2.state, 'cancel', 'A drafted invoice state should be cancelled')
def test_reservation_method_w_sale(self):
picking_type_out = self.company_data['default_warehouse'].out_type_id
# make sure generated picking will auto-assign
picking_type_out.reservation_method = 'at_confirm'
product = self.company_data['product_delivery_no']
product.type = 'product'
self.env['stock.quant']._update_available_quantity(product, self.company_data['default_warehouse'].lot_stock_id, 20)
sale_order1 = self._get_new_sale_order(amount=10.0)
# Validate the sale order, picking should automatically assign stock
sale_order1.action_confirm()
picking1 = sale_order1.picking_ids
self.assertTrue(picking1)
self.assertEqual(picking1.state, 'assigned')
picking1.unlink()
# make sure generated picking will does not auto-assign
picking_type_out.reservation_method = 'manual'
sale_order2 = self._get_new_sale_order(amount=10.0)
# Validate the sale order, picking should not automatically assign stock
sale_order2.action_confirm()
picking2 = sale_order2.picking_ids
self.assertTrue(picking2)
self.assertEqual(picking2.state, 'confirmed')
picking2.unlink()
# make sure generated picking auto-assigns according to (picking) scheduled date
picking_type_out.reservation_method = 'by_date'
picking_type_out.reservation_days_before = 2
# too early for scheduled date => don't auto-assign
sale_order3 = self._get_new_sale_order(amount=10.0)
sale_order3.commitment_date = datetime.now() + timedelta(days=10)
sale_order3.action_confirm()
picking3 = sale_order3.picking_ids
self.assertTrue(picking3)
self.assertEqual(picking3.state, 'confirmed')
picking3.unlink()
# within scheduled date + reservation days before => auto-assign
sale_order4 = self._get_new_sale_order(amount=10.0)
sale_order4.commitment_date = datetime.now() + timedelta(days=1)
sale_order4.action_confirm()
self.assertTrue(sale_order4.picking_ids)
self.assertEqual(sale_order4.picking_ids.state, 'assigned')
def test_packaging_propagation(self):
"""Create a SO with lines using packaging, check the packaging propagate
to its move.
"""
warehouse = self.company_data['default_warehouse']
warehouse.delivery_steps = 'pick_pack_ship'
product = self.env['product.product'].create({
'name': 'Product with packaging',
'type': 'product',
})
packOf10 = self.env['product.packaging'].create({
'name': 'PackOf10',
'product_id': product.id,
'qty': 10
})
packOf20 = self.env['product.packaging'].create({
'name': 'PackOf20',
'product_id': product.id,
'qty': 20
})
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'product_id': product.id,
'product_uom_qty': 10.0,
'product_uom': product.uom_id.id,
'product_packaging_id': packOf10.id,
})],
})
so.action_confirm()
pick = so.order_line.move_ids
pack = pick.move_orig_ids
ship = pack.move_orig_ids
self.assertEqual(pick.product_packaging_id, packOf10)
self.assertEqual(pack.product_packaging_id, packOf10)
self.assertEqual(ship.product_packaging_id, packOf10)
so.order_line[0].write({
'product_packaging_id': packOf20.id,
'product_uom_qty': 20
})
self.assertEqual(so.order_line.move_ids.product_packaging_id, packOf20)
self.assertEqual(pick.product_packaging_id, packOf20)
self.assertEqual(pack.product_packaging_id, packOf20)
self.assertEqual(ship.product_packaging_id, packOf20)
so.order_line[0].write({'product_packaging_id': False})
self.assertFalse(pick.product_packaging_id)
self.assertFalse(pack.product_packaging_id)
self.assertFalse(ship.product_packaging_id)
def test_15_cancel_delivery(self):
""" Suppose the option "Lock Confirmed Sales" enabled and a product with the invoicing
policy set to "Delivered quantities". When cancelling the delivery of such a product, the
invoice status of the associated SO should be 'Nothing to Invoice'
"""
group_auto_done = self.env.ref('sale.group_auto_done_setting')
self.env.user.groups_id = [(4, group_auto_done.id)]
product = self.product_a
product.invoice_policy = 'delivery'
partner = self.partner_a
so = self.env['sale.order'].create({
'partner_id': partner.id,
'partner_invoice_id': partner.id,
'partner_shipping_id': partner.id,
'order_line': [(0, 0, {
'name': product.name,
'product_id': product.id,
'product_uom_qty': 2,
'product_uom': product.uom_id.id,
'price_unit': product.list_price
})],
'pricelist_id': self.env.ref('product.list0').id,
})
so.action_confirm()
self.assertEqual(so.state, 'done')
so.picking_ids.action_cancel()
self.assertEqual(so.invoice_status, 'no')
def test_16_multi_uom(self):
yards_uom = self.env['uom.uom'].create({
'category_id': self.env.ref('uom.uom_categ_length').id,
'name': 'Yards',
'factor_inv': 0.9144,
'uom_type': 'bigger',
})
product = self.env.ref('product.product_product_11').copy({
'uom_id': self.env.ref('uom.product_uom_meter').id,
'uom_po_id': yards_uom.id,
})
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {
'name': product.name,
'product_id': product.id,
'product_uom_qty': 4.0,
'product_uom': yards_uom.id,
'price_unit': 1.0,
})
],
})
so.action_confirm()
picking = so.picking_ids[0]
picking.move_lines.write({'quantity_done': 3.66})
picking.button_validate()
self.assertEqual(so.order_line.mapped('qty_delivered'), [4.0], 'Sale: no conversion error on delivery in different uom"')
def test_17_qty_update_propagation(self):
""" Creates a sale order, then modifies the sale order lines qty and verifies
that quantity changes are correctly propagated to the picking and delivery picking.
"""
# Set the delivery in two steps.
warehouse = self.company_data['default_warehouse']
warehouse.delivery_steps = 'pick_ship'
# Sell a product.
product = self.company_data['product_delivery_no'] # storable
product.type = 'product' # storable
self.env['stock.quant']._update_available_quantity(product, self.company_data['default_warehouse'].lot_stock_id, 50)
sale_order = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'order_line': [
(0, 0, {'name': product.name, 'product_id': product.id, 'product_uom_qty': 50, 'product_uom': product.uom_id.id, 'price_unit': product.list_price}),
],
})
sale_order.action_confirm()
# Check picking created
self.assertEqual(len(sale_order.picking_ids), 2, 'A picking and a delivery picking should have been created.')
customer_location = self.env.ref('stock.stock_location_customers')
move_pick = sale_order.picking_ids.filtered(lambda p: p.location_dest_id.id != customer_location.id).move_lines
move_out = sale_order.picking_ids.filtered(lambda p: p.location_dest_id.id == customer_location.id).move_lines
self.assertEqual(len(move_out), 1, 'Only one move should be created for a single product.')
self.assertEqual(move_out.product_uom_qty, 50, 'The move quantity should be the same as the quantity sold.')
# Decrease the quantity in the sale order and check the move has been updated.
sale_order.write({
'order_line': [
(1, sale_order.order_line.id, {'product_uom_qty': 30}),
]
})
self.assertEqual(move_pick.product_uom_qty, 30, 'The move quantity should have been decreased as the sale order line was.')
self.assertEqual(move_out.product_uom_qty, 30, 'The move quantity should have been decreased as the sale order line and the pick line were.')
self.assertEqual(len(sale_order.picking_ids), 2, 'No additionnal picking should have been created.')
# Increase the quantity in the sale order and check the move has been updated.
sale_order.write({
'order_line': [
(1, sale_order.order_line.id, {'product_uom_qty': 40})
]
})
self.assertEqual(move_pick.product_uom_qty, 40, 'The move quantity should have been increased as the sale order line was.')
self.assertEqual(move_out.product_uom_qty, 40, 'The move quantity should have been increased as the sale order line and the pick line were.')
def test_18_deliver_more_and_multi_uom(self):
"""
Deliver an additional product with a UoM different than its default one
This UoM should be the same on the generated SO line
"""
uom_m_id = self.ref("uom.product_uom_meter")
uom_km_id = self.ref("uom.product_uom_km")
self.product_b.write({
'uom_id': uom_m_id,
'uom_po_id': uom_m_id,
})
so = self._get_new_sale_order(product=self.product_a)
so.action_confirm()
picking = so.picking_ids
self.env['stock.move'].create({
'picking_id': picking.id,
'location_id': picking.location_id.id,
'location_dest_id': picking.location_dest_id.id,
'name': self.product_b.name,
'product_id': self.product_b.id,
'product_uom_qty': 1,
'product_uom': uom_km_id,
})
action = picking.button_validate()
wizard = Form(self.env[action['res_model']].with_context(action['context'])).save()
wizard.process()
self.assertEqual(so.order_line[1].product_id, self.product_b)
self.assertEqual(so.order_line[1].qty_delivered, 1)
self.assertEqual(so.order_line[1].product_uom.id, uom_km_id)
def test_multiple_returns(self):
# Creates a sale order for 10 products.
sale_order = self._get_new_sale_order()
# Valids the sale order, then valids the delivery.
sale_order.action_confirm()
picking = sale_order.picking_ids
picking.move_lines.write({'quantity_done': 10})
picking.button_validate()
# Creates a return from the delivery picking.
return_picking_form = Form(self.env['stock.return.picking']
.with_context(active_ids=picking.ids, active_id=picking.id,
active_model='stock.picking'))
return_wizard = return_picking_form.save()
# Check that the correct quantity is set on the retrun
self.assertEqual(return_wizard.product_return_moves.quantity, 10)
return_wizard.product_return_moves.quantity = 2
# Valids the return picking.
res = return_wizard.create_returns()
return_picking = self.env['stock.picking'].browse(res['res_id'])
return_picking.move_lines.write({'quantity_done': 2})
return_picking.button_validate()
# Creates a second return from the delivery picking.
return_picking_form = Form(self.env['stock.return.picking']
.with_context(active_ids=picking.ids, active_id=picking.id,
active_model='stock.picking'))
return_wizard = return_picking_form.save()
# Check that the remaining quantity is set on the retrun
self.assertEqual(return_wizard.product_return_moves.quantity, 8)
def test_return_with_mto_and_multisteps(self):
"""
Suppose a product P and a 3-steps delivery.
Sell 5 x P, process pick & pack pickings and then decrease the qty on
the SO line:
- the ship picking should be updated
- there should be a return R1 for the pack picking
- there should be a return R2 for the pick picking
- it should be possible to reserve R1
"""
warehouse = self.env['stock.warehouse'].search([('company_id', '=', self.env.company.id)], limit=1)
warehouse.delivery_steps = 'pick_pack_ship'
stock_location = warehouse.lot_stock_id
pack_location, out_location, custo_location = warehouse.delivery_route_id.rule_ids.location_id
product = self.env['product.product'].create({
'name': 'SuperProduct',
'type': 'product',
})
self.env['stock.quant']._update_available_quantity(product, stock_location, 5)
so_form = Form(self.env['sale.order'])
so_form.partner_id = self.partner_a
with so_form.order_line.new() as line:
line.product_id = product
line.product_uom_qty = 5
so = so_form.save()
so.action_confirm()
pick_picking, pack_picking, _ = so.picking_ids
(pick_picking + pack_picking).move_lines.quantity_done = 5
(pick_picking + pack_picking).button_validate()
with Form(so) as so_form:
with so_form.order_line.edit(0) as line:
line.product_uom_qty = 3
move_lines = so.picking_ids.move_lines.sorted('id')
ship_sm, pack_sm, pick_sm, ret_pack_sm, ret_pick_sm = move_lines
self.assertRecordValues(move_lines, [
{'location_id': out_location.id, 'location_dest_id': custo_location.id, 'move_orig_ids': pack_sm.ids, 'move_dest_ids': []},
{'location_id': pack_location.id, 'location_dest_id': out_location.id, 'move_orig_ids': pick_sm.ids, 'move_dest_ids': ship_sm.ids},
{'location_id': stock_location.id, 'location_dest_id': pack_location.id, 'move_orig_ids': [], 'move_dest_ids': pack_sm.ids},
{'location_id': out_location.id, 'location_dest_id': pack_location.id, 'move_orig_ids': [], 'move_dest_ids': ret_pick_sm.ids},
{'location_id': pack_location.id, 'location_dest_id': stock_location.id, 'move_orig_ids': ret_pack_sm.ids, 'move_dest_ids': []},
])
ret_pack_sm.picking_id.action_assign()
self.assertEqual(ret_pack_sm.state, 'assigned')
self.assertEqual(ret_pack_sm.move_line_ids.product_uom_qty, 2)
def test_mtso_and_qty_decreasing(self):
"""
First, confirm a SO that has a line with the MTO route (the product
should already be available in stock). Then, decrease the qty on the SO
line:
- The delivery should be updated
- There should not be any other picking
"""
warehouse = self.company_data['default_warehouse']
customer_location = self.env.ref('stock.stock_location_customers')
mto_route = self.env.ref('stock.route_warehouse0_mto')
mto_route.active = True
self.product_a.type = 'product'
self.env['stock.quant']._update_available_quantity(self.product_a, warehouse.lot_stock_id, 10)
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'warehouse_id': warehouse.id,
'order_line': [(0, 0, {
'name': self.product_a.name,
'product_id': self.product_a.id,
'product_uom_qty': 10,
'product_uom': self.product_a.uom_id.id,
'price_unit': 1,
'route_id': mto_route.id,
})],
})
so.action_confirm()
self.assertRecordValues(so.picking_ids, [{'location_id': warehouse.lot_stock_id.id, 'location_dest_id': customer_location.id}])
so.order_line.product_uom_qty = 8
self.assertRecordValues(so.picking_ids, [{'location_id': warehouse.lot_stock_id.id, 'location_dest_id': customer_location.id}])
self.assertEqual(so.picking_ids.move_lines.product_uom_qty, 8)
def test_packaging_and_qty_decrease(self):
packaging = self.env['product.packaging'].create({
'name': "Super Packaging",
'product_id': self.product_a.id,
'qty': 10.0,
})
so_form = Form(self.env['sale.order'])
so_form.partner_id = self.partner_a
with so_form.order_line.new() as line:
line.product_id = self.product_a
line.product_uom_qty = 10
so = so_form.save()
so.action_confirm()
self.assertEqual(so.order_line.product_packaging_id, packaging)
with Form(so) as so_form:
with so_form.order_line.edit(0) as line:
line.product_uom_qty = 8
self.assertEqual(so.picking_ids.move_lines.product_uom_qty, 8)
def test_backorder_and_decrease_sol_qty(self):
"""
2 steps delivery
SO with 10 x P
Process pickings of 6 x P with backorders
Update SO: 7 x P
Backorder should be updated: 1 x P
"""
warehouse = self.company_data['default_warehouse']
warehouse.delivery_steps = 'pick_ship'
stock_location = warehouse.lot_stock_id
out_location = warehouse.wh_output_stock_loc_id
customer_location = self.env.ref('stock.stock_location_customers')
so = self._get_new_sale_order()
so.action_confirm()
pick01, ship01 = so.picking_ids
pick01.move_line_ids.qty_done = 6
pick01._action_done()
pick02 = pick01.backorder_ids
ship01.move_line_ids[0].qty_done = 6
ship01._action_done()
ship02 = ship01.backorder_ids
so.order_line.product_uom_qty = 7
self.assertRecordValues(so.picking_ids.move_lines.sorted('id'), [
{'location_id': out_location.id, 'location_dest_id': customer_location.id, 'product_uom_qty': 6.0, 'quantity_done': 6.0, 'state': 'done'},
{'location_id': stock_location.id, 'location_dest_id': out_location.id, 'product_uom_qty': 6.0, 'quantity_done': 6.0, 'state': 'done'},
{'location_id': stock_location.id, 'location_dest_id': out_location.id, 'product_uom_qty': 1.0, 'quantity_done': 0.0, 'state': 'assigned'},
{'location_id': out_location.id, 'location_dest_id': customer_location.id, 'product_uom_qty': 1.0, 'quantity_done': 0.0, 'state': 'waiting'},
])
self.assertEqual(ship01.move_lines.move_orig_ids, (pick01 | pick02).move_lines)
self.assertEqual(ship02.move_lines.move_orig_ids, (pick01 | pick02).move_lines)
def test_incoterm_in_advance_payment(self):
"""When generating a advance payment invoice from a SO, this invoice incoterm should be the same as the SO"""
incoterm = self.env['account.incoterms'].create({
'name': 'Test Incoterm',
'code': 'TEST',
})
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'incoterm': incoterm.id,
'order_line': [(0, 0, {
'name': self.product_a.name,
'product_id': self.product_a.id,
'product_uom_qty': 10,
'product_uom': self.product_a.uom_id.id,
'price_unit': 1,
})],
})
so.action_confirm()
advance_product = self.env['product.product'].create({
'name': 'Deposit',
'type': 'service',
'invoice_policy': 'order',
})
adv_wiz = self.env['sale.advance.payment.inv'].with_context(active_ids=[so.id]).create({
'advance_payment_method': 'percentage',
'amount': 5.0,
'product_id': advance_product.id,
})
act = adv_wiz.with_context(open_invoices=True).create_invoices()
invoice = self.env['account.move'].browse(act['res_id'])
self.assertEqual(invoice.invoice_incoterm_id.id, incoterm.id)
def test_exception_delivery_partial_multi(self):
"""
When a backorder is cancelled for a picking in multi-picking,
the related SO should have an exception logged
"""
#Create 2 sale orders
so_1 = self._get_new_sale_order()
so_1.action_confirm()
picking_1 = so_1.picking_ids
picking_1.move_lines.write({'quantity_done': 1})
so_2 = self._get_new_sale_order()
so_2.action_confirm()
picking_2 = so_2.picking_ids
picking_2.move_lines.write({'quantity_done': 2})
#multi-picking validation
pick = picking_1 | picking_2
res_dict = pick.button_validate()
wizard = Form(self.env[(res_dict.get('res_model'))].with_context(res_dict['context'])).save()
wizard.backorder_confirmation_line_ids[1].write({'to_backorder': False})
wizard.process()
#Check Exception error is logged on so_2
activity = self.env['mail.activity'].search([('res_id', '=', so_2.id), ('res_model', '=', 'sale.order')])
self.assertEqual(len(activity), 1, 'When no backorder is created for a partial delivery, a warning error should be logged in its origin SO')
def test_3_steps_and_unpack(self):
"""
When removing the package of a stock.move.line mid-flow in a 3-steps delivery with backorders, make sure that
the OUT picking does not get packages again on its stock.move.line.
Steps:
- create a SO of product A for 10 units
- on PICK_1 picking: put 2 units in Done and put in a package, validate, create a backorder
- on PACK_1 picking: remove the destination package for the 2 units, validate, create a backorder
- on OUT picking: the stock.move.line should not have a package
- on PICK_2 picking: put 2 units in Done and put in a package, validate, create a backorder
- on OUT picking: the stock.move.line should still not have a package
- on PACK_2: validate, create a backorder
- on OUT picking: there should be 2 stock.move.lines, one with package and one without
"""
warehouse = self.company_data.get('default_warehouse')
self.env['res.config.settings'].write({
'group_stock_tracking_lot': True,
'group_stock_adv_location': True,
'group_stock_multi_locations': True,
})
warehouse.delivery_steps = 'pick_pack_ship'
self.env['stock.quant']._update_available_quantity(self.test_product_delivery, warehouse.lot_stock_id, 10)
so_1 = self._get_new_sale_order(product=self.test_product_delivery)
so_1.action_confirm()
pick_picking = so_1.picking_ids.filtered(lambda p: p.picking_type_id == warehouse.pick_type_id)
pack_picking = so_1.picking_ids.filtered(lambda p: p.picking_type_id == warehouse.pack_type_id)
out_picking = so_1.picking_ids.filtered(lambda p: p.picking_type_id == warehouse.out_type_id)
pick_picking.move_lines.quantity_done = 2
pick_picking.action_put_in_pack()
backorder_wizard_dict = pick_picking.button_validate()
backorder_wizard = Form(self.env[backorder_wizard_dict['res_model']].with_context(backorder_wizard_dict['context'])).save()
backorder_wizard.process()
pack_picking.move_line_ids.result_package_id = False
pack_picking.move_lines.quantity_done = 2
backorder_wizard_dict = pack_picking.button_validate()
backorder_wizard = Form(self.env[backorder_wizard_dict['res_model']].with_context(backorder_wizard_dict['context'])).save()
backorder_wizard.process()
self.assertEqual(out_picking.move_line_ids.package_id.id, False)
self.assertEqual(out_picking.move_line_ids.result_package_id.id, False)
pick_picking_2 = so_1.picking_ids.filtered(lambda x: x.picking_type_id == warehouse.pick_type_id and x.state != 'done')
pick_picking_2.move_lines.quantity_done = 2
package_2 = pick_picking_2.action_put_in_pack()
backorder_wizard_dict = pick_picking_2.button_validate()
backorder_wizard = Form(self.env[backorder_wizard_dict['res_model']].with_context(backorder_wizard_dict['context'])).save()
backorder_wizard.process()
self.assertEqual(out_picking.move_line_ids.package_id.id, False)
self.assertEqual(out_picking.move_line_ids.result_package_id.id, False)
pack_picking_2 = so_1.picking_ids.filtered(lambda p: p.picking_type_id == warehouse.pack_type_id and p.state != 'done')
pack_picking_2.move_lines.quantity_done = 2
backorder_wizard_dict = pack_picking_2.button_validate()
backorder_wizard = Form(self.env[backorder_wizard_dict['res_model']].with_context(backorder_wizard_dict['context'])).save()
backorder_wizard.process()
self.assertRecordValues(out_picking.move_line_ids, [{'result_package_id': False}, {'result_package_id': package_2.id}])
| 48.656755 | 75,272 |
11,568 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.stock_account.tests.test_anglo_saxon_valuation_reconciliation_common import ValuationReconciliationTestCommon
from odoo.addons.sale.tests.common import TestSaleCommon
from odoo import fields
from odoo.tests import tagged
from datetime import timedelta
@tagged('post_install', '-at_install')
class TestSaleStockLeadTime(TestSaleCommon, ValuationReconciliationTestCommon):
@classmethod
def setUpClass(cls, chart_template_ref=None):
super().setUpClass(chart_template_ref=chart_template_ref)
# Update the product_1 with type and Customer Lead Time
cls.test_product_order.sale_delay = 5.0
def test_00_product_company_level_delays(self):
""" In order to check schedule date, set product's Customer Lead Time
and company's Sales Safety Days."""
# Update company with Sales Safety Days
self.env.company.security_lead = 3.00
# Create sale order of product_1
order = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'pricelist_id': self.company_data['default_pricelist'].id,
'picking_policy': 'direct',
'warehouse_id': self.company_data['default_warehouse'].id,
'order_line': [(0, 0, {'name': self.test_product_order.name,
'product_id': self.test_product_order.id,
'product_uom_qty': 10,
'product_uom': self.env.ref('uom.product_uom_unit').id,
'customer_lead': self.test_product_order.sale_delay})]})
# Confirm our standard sale order
order.action_confirm()
# Check the picking crated or not
self.assertTrue(order.picking_ids, "Picking should be created.")
# Check schedule date of picking
out_date = fields.Datetime.from_string(order.date_order) + timedelta(days=self.test_product_order.sale_delay) - timedelta(days=self.env.company.security_lead)
min_date = fields.Datetime.from_string(order.picking_ids[0].scheduled_date)
self.assertTrue(abs(min_date - out_date) <= timedelta(seconds=1), 'Schedule date of picking should be equal to: order date + Customer Lead Time - Sales Safety Days.')
def test_01_product_route_level_delays(self):
""" In order to check schedule dates, set product's Customer Lead Time
and warehouse route's delay."""
# Update warehouse_1 with Outgoing Shippings pick + pack + ship
self.company_data['default_warehouse'].write({'delivery_steps': 'pick_pack_ship'})
# Set delay on pull rule
for pull_rule in self.company_data['default_warehouse'].delivery_route_id.rule_ids:
pull_rule.write({'delay': 2})
# Create sale order of product_1
order = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'pricelist_id': self.company_data['default_pricelist'].id,
'picking_policy': 'direct',
'warehouse_id': self.company_data['default_warehouse'].id,
'order_line': [(0, 0, {'name': self.test_product_order.name,
'product_id': self.test_product_order.id,
'product_uom_qty': 5,
'product_uom': self.env.ref('uom.product_uom_unit').id,
'customer_lead': self.test_product_order.sale_delay})]})
# Confirm our standard sale order
order.action_confirm()
# Check the picking crated or not
self.assertTrue(order.picking_ids, "Pickings should be created.")
# Check schedule date of ship type picking
out = order.picking_ids.filtered(lambda r: r.picking_type_id == self.company_data['default_warehouse'].out_type_id)
out_min_date = fields.Datetime.from_string(out.scheduled_date)
out_date = fields.Datetime.from_string(order.date_order) + timedelta(days=self.test_product_order.sale_delay) - timedelta(days=out.move_lines[0].rule_id.delay)
self.assertTrue(abs(out_min_date - out_date) <= timedelta(seconds=1), 'Schedule date of ship type picking should be equal to: order date + Customer Lead Time - pull rule delay.')
# Check schedule date of pack type picking
pack = order.picking_ids.filtered(lambda r: r.picking_type_id == self.company_data['default_warehouse'].pack_type_id)
pack_min_date = fields.Datetime.from_string(pack.scheduled_date)
pack_date = out_date - timedelta(days=pack.move_lines[0].rule_id.delay)
self.assertTrue(abs(pack_min_date - pack_date) <= timedelta(seconds=1), 'Schedule date of pack type picking should be equal to: Schedule date of ship type picking - pull rule delay.')
# Check schedule date of pick type picking
pick = order.picking_ids.filtered(lambda r: r.picking_type_id == self.company_data['default_warehouse'].pick_type_id)
pick_min_date = fields.Datetime.from_string(pick.scheduled_date)
pick_date = pack_date - timedelta(days=pick.move_lines[0].rule_id.delay)
self.assertTrue(abs(pick_min_date - pick_date) <= timedelta(seconds=1), 'Schedule date of pick type picking should be equal to: Schedule date of pack type picking - pull rule delay.')
def test_02_delivery_date_propagation(self):
""" In order to check deadline date propagation, set product's Customer Lead Time
and warehouse route's delay in stock rules"""
# Example :
# -> Set Warehouse with Outgoing Shipments : pick + pack + ship
# -> Set Delay : 5 days on stock rules
# -> Set Customer Lead Time on product : 30 days
# -> Set Sales Safety Days : 2 days
# -> Create an SO and confirm it with confirmation Date : 12/18/2018
# -> Pickings : OUT -> Scheduled Date : 01/12/2019, Deadline Date: 01/14/2019
# PACK -> Scheduled Date : 01/07/2019, Deadline Date: 01/09/2019
# PICK -> Scheduled Date : 01/02/2019, Deadline Date: 01/04/2019
# -> Now, change commitment_date in the sale order = out_deadline_date + 5 days
# -> Deadline Date should be changed and Scheduled date should be unchanged:
# OUT -> Deadline Date : 01/19/2019
# PACK -> Deadline Date : 01/14/2019
# PICK -> Deadline Date : 01/09/2019
# Update company with Sales Safety Days
self.env.company.security_lead = 2.00
# Update warehouse_1 with Outgoing Shippings pick + pack + ship
self.company_data['default_warehouse'].write({'delivery_steps': 'pick_pack_ship'})
# Set delay on pull rule
self.company_data['default_warehouse'].delivery_route_id.rule_ids.write({'delay': 5})
# Update the product_1 with type and Customer Lead Time
self.test_product_order.write({'type': 'product', 'sale_delay': 30.0})
# Now, create sale order of product_1 with customer_lead set on product
order = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'pricelist_id': self.company_data['default_pricelist'].id,
'picking_policy': 'direct',
'warehouse_id': self.company_data['default_warehouse'].id,
'order_line': [(0, 0, {'name': self.test_product_order.name,
'product_id': self.test_product_order.id,
'product_uom_qty': 5,
'product_uom': self.env.ref('uom.product_uom_unit').id,
'customer_lead': self.test_product_order.sale_delay})]})
# Confirm our standard sale order
order.action_confirm()
# Check the picking crated or not
self.assertTrue(order.picking_ids, "Pickings should be created.")
# Check schedule/deadline date of ship type picking
out = order.picking_ids.filtered(lambda r: r.picking_type_id == self.company_data['default_warehouse'].out_type_id)
deadline_date = order.date_order + timedelta(days=self.test_product_order.sale_delay) - timedelta(days=out.move_lines[0].rule_id.delay)
self.assertAlmostEqual(
out.date_deadline, deadline_date, delta=timedelta(seconds=1),
msg='Deadline date of ship type picking should be equal to: order date + Customer Lead Time - pull rule delay.')
out_scheduled_date = deadline_date - timedelta(days=self.env.company.security_lead)
self.assertAlmostEqual(
out.scheduled_date, out_scheduled_date, delta=timedelta(seconds=1),
msg='Schedule date of ship type picking should be equal to: order date + Customer Lead Time - pull rule delay - security_lead')
# Check schedule/deadline date of pack type picking
pack = order.picking_ids.filtered(lambda r: r.picking_type_id == self.company_data['default_warehouse'].pack_type_id)
pack_scheduled_date = out_scheduled_date - timedelta(days=pack.move_lines[0].rule_id.delay)
self.assertAlmostEqual(
pack.scheduled_date, pack_scheduled_date, delta=timedelta(seconds=1),
msg='Schedule date of pack type picking should be equal to: Schedule date of ship type picking - pull rule delay.')
deadline_date -= timedelta(days=pack.move_lines[0].rule_id.delay)
self.assertAlmostEqual(
pack.date_deadline, deadline_date, delta=timedelta(seconds=1),
msg='Deadline date of pack type picking should be equal to: Deadline date of ship type picking - pull rule delay.')
# Check schedule/deadline date of pick type picking
pick = order.picking_ids.filtered(lambda r: r.picking_type_id == self.company_data['default_warehouse'].pick_type_id)
pick_scheduled_date = pack_scheduled_date - timedelta(days=pick.move_lines[0].rule_id.delay)
self.assertAlmostEqual(
pick.scheduled_date, pick_scheduled_date, delta=timedelta(seconds=1),
msg='Schedule date of pack type picking should be equal to: Schedule date of ship type picking - pull rule delay.')
deadline_date -= timedelta(days=pick.move_lines[0].rule_id.delay)
self.assertAlmostEqual(
pick.date_deadline, deadline_date, delta=timedelta(seconds=1),
msg='Deadline date of pack type picking should be equal to: Deadline date of ship type picking - pull rule delay.')
# Now change the commitment date (Delivery Date) of the sale order
new_deadline = deadline_date + timedelta(days=5)
order.write({'commitment_date': new_deadline})
# Now check date_deadline of pick, pack and out are forced
# TODO : add note in case of change of deadline and check
self.assertEqual(out.date_deadline, new_deadline)
new_deadline -= timedelta(days=pack.move_lines[0].rule_id.delay)
self.assertEqual(pack.date_deadline, new_deadline)
new_deadline -= timedelta(days=pick.move_lines[0].rule_id.delay)
self.assertEqual(pick.date_deadline, new_deadline)
| 58.130653 | 11,568 |
5,853 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.stock_account.tests.test_anglo_saxon_valuation_reconciliation_common import ValuationReconciliationTestCommon
from datetime import timedelta
from odoo import fields
from odoo.tests import common, tagged
@tagged('post_install', '-at_install')
class TestSaleExpectedDate(ValuationReconciliationTestCommon):
def test_sale_order_expected_date(self):
""" Test expected date and effective date of Sales Orders """
Product = self.env['product.product']
product_A = Product.create({
'name': 'Product A',
'type': 'product',
'sale_delay': 5,
'uom_id': 1,
})
product_B = Product.create({
'name': 'Product B',
'type': 'product',
'sale_delay': 10,
'uom_id': 1,
})
product_C = Product.create({
'name': 'Product C',
'type': 'product',
'sale_delay': 15,
'uom_id': 1,
})
self.env['stock.quant']._update_available_quantity(product_A, self.company_data['default_warehouse'].lot_stock_id, 10)
self.env['stock.quant']._update_available_quantity(product_B, self.company_data['default_warehouse'].lot_stock_id, 10)
self.env['stock.quant']._update_available_quantity(product_C, self.company_data['default_warehouse'].lot_stock_id, 10)
sale_order = self.env['sale.order'].create({
'partner_id': self.env['res.partner'].create({'name': 'A Customer'}).id,
'picking_policy': 'direct',
'order_line': [
(0, 0, {'name': product_A.name, 'product_id': product_A.id, 'customer_lead': product_A.sale_delay, 'product_uom_qty': 5}),
(0, 0, {'name': product_B.name, 'product_id': product_B.id, 'customer_lead': product_B.sale_delay, 'product_uom_qty': 5}),
(0, 0, {'name': product_C.name, 'product_id': product_C.id, 'customer_lead': product_C.sale_delay, 'product_uom_qty': 5})
],
})
# if Shipping Policy is set to `direct`(when SO is in draft state) then expected date should be
# current date + shortest lead time from all of it's order lines
expected_date = fields.Datetime.now() + timedelta(days=5)
self.assertAlmostEqual(expected_date, sale_order.expected_date,
msg="Wrong expected date on sale order!", delta=timedelta(seconds=1))
# if Shipping Policy is set to `one`(when SO is in draft state) then expected date should be
# current date + longest lead time from all of it's order lines
sale_order.write({'picking_policy': 'one'})
expected_date = fields.Datetime.now() + timedelta(days=15)
self.assertAlmostEqual(expected_date, sale_order.expected_date,
msg="Wrong expected date on sale order!", delta=timedelta(seconds=1))
sale_order.action_confirm()
# Setting confirmation date of SO to 5 days from today so that the expected/effective date could be checked
# against real confirmation date
confirm_date = fields.Datetime.now() + timedelta(days=5)
sale_order.write({'date_order': confirm_date})
# if Shipping Policy is set to `one`(when SO is confirmed) then expected date should be
# SO confirmation date + longest lead time from all of it's order lines
expected_date = confirm_date + timedelta(days=15)
self.assertAlmostEqual(expected_date, sale_order.expected_date,
msg="Wrong expected date on sale order!", delta=timedelta(seconds=1))
# if Shipping Policy is set to `direct`(when SO is confirmed) then expected date should be
# SO confirmation date + shortest lead time from all of it's order lines
sale_order.write({'picking_policy': 'direct'})
expected_date = confirm_date + timedelta(days=5)
self.assertAlmostEqual(expected_date, sale_order.expected_date,
msg="Wrong expected date on sale order!", delta=timedelta(seconds=1))
# Check effective date, it should be date on which the first shipment successfully delivered to customer
picking = sale_order.picking_ids[0]
for ml in picking.move_line_ids:
ml.qty_done = ml.product_uom_qty
picking._action_done()
self.assertEqual(picking.state, 'done', "Picking not processed correctly!")
self.assertEqual(fields.Date.today(), sale_order.effective_date.date(), "Wrong effective date on sale order!")
def test_sale_order_commitment_date(self):
# In order to test the Commitment Date feature in Sales Orders in Odoo,
# I copy a demo Sales Order with committed Date on 2010-07-12
new_order = self.env['sale.order'].create({
'partner_id': self.env['res.partner'].create({'name': 'A Partner'}).id,
'order_line': [(0, 0, {
'name': "A product",
'product_id': self.env['product.product'].create({
'name': 'A product',
'type': 'product',
}).id,
'product_uom_qty': 1,
'price_unit': 750,
})],
'commitment_date': '2010-07-12',
})
# I confirm the Sales Order.
new_order.action_confirm()
# I verify that the Procurements and Stock Moves have been generated with the correct date
security_delay = timedelta(days=new_order.company_id.security_lead)
commitment_date = fields.Datetime.from_string(new_order.commitment_date)
right_date = commitment_date - security_delay
for line in new_order.order_line:
self.assertEqual(line.move_ids[0].date, right_date, "The expected date for the Stock Move is wrong")
| 50.895652 | 5,853 |
467 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class SaleAdvancePaymentInv(models.TransientModel):
_inherit = "sale.advance.payment.inv"
def _prepare_invoice_values(self, order, name, amount, so_line):
invoice_vals = super()._prepare_invoice_values(order, name, amount, so_line)
invoice_vals['invoice_incoterm_id'] = order.incoterm.id
return invoice_vals
| 35.923077 | 467 |
553 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class SaleOrderCancel(models.TransientModel):
_inherit = 'sale.order.cancel'
display_delivery_alert = fields.Boolean('Delivery Alert', compute='_compute_display_delivery_alert')
@api.depends('order_id')
def _compute_display_delivery_alert(self):
for wizard in self:
wizard.display_delivery_alert = bool(any(picking.state == 'done' for picking in wizard.order_id.picking_ids))
| 36.866667 | 553 |
594 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class StockRulesReport(models.TransientModel):
_inherit = 'stock.rules.report'
so_route_ids = fields.Many2many('stock.location.route', string='Apply specific routes',
domain="[('sale_selectable', '=', True)]", help="Choose to apply SO lines specific routes.")
def _prepare_report_data(self):
data = super(StockRulesReport, self)._prepare_report_data()
data['so_route_ids'] = self.so_route_ids.ids
return data
| 37.125 | 594 |
8,367 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from collections import defaultdict
from odoo import fields, models
from odoo.tools import float_is_zero, float_compare
from odoo.tools.misc import formatLang
class AccountMove(models.Model):
_inherit = 'account.move'
def _stock_account_get_last_step_stock_moves(self):
""" Overridden from stock_account.
Returns the stock moves associated to this invoice."""
rslt = super(AccountMove, self)._stock_account_get_last_step_stock_moves()
for invoice in self.filtered(lambda x: x.move_type == 'out_invoice'):
rslt += invoice.mapped('invoice_line_ids.sale_line_ids.move_ids').filtered(lambda x: x.state == 'done' and x.location_dest_id.usage == 'customer')
for invoice in self.filtered(lambda x: x.move_type == 'out_refund'):
rslt += invoice.mapped('reversed_entry_id.invoice_line_ids.sale_line_ids.move_ids').filtered(lambda x: x.state == 'done' and x.location_id.usage == 'customer')
# Add refunds generated from the SO
rslt += invoice.mapped('invoice_line_ids.sale_line_ids.move_ids').filtered(lambda x: x.state == 'done' and x.location_id.usage == 'customer')
return rslt
def _get_invoiced_lot_values(self):
""" Get and prepare data to show a table of invoiced lot on the invoice's report. """
self.ensure_one()
res = super(AccountMove, self)._get_invoiced_lot_values()
if self.state == 'draft' or not self.invoice_date or self.move_type not in ('out_invoice', 'out_refund'):
return res
current_invoice_amls = self.invoice_line_ids.filtered(lambda aml: not aml.display_type and aml.product_id and aml.product_id.type in ('consu', 'product') and aml.quantity)
all_invoices_amls = current_invoice_amls.sale_line_ids.invoice_lines.filtered(lambda aml: aml.move_id.state == 'posted').sorted(lambda aml: (aml.date, aml.move_name, aml.id))
index = all_invoices_amls.ids.index(current_invoice_amls[:1].id) if current_invoice_amls[:1] in all_invoices_amls else 0
previous_amls = all_invoices_amls[:index]
invoiced_qties = current_invoice_amls._get_invoiced_qty_per_product()
invoiced_products = invoiced_qties.keys()
if self.move_type == 'out_invoice':
# filter out the invoices that have been fully refund and re-invoice otherwise, the quantities would be
# consumed by the reversed invoice and won't be print on the new draft invoice
previous_amls = previous_amls.filtered(lambda aml: aml.move_id.payment_state != 'reversed')
previous_qties_invoiced = previous_amls._get_invoiced_qty_per_product()
if self.move_type == 'out_refund':
# we swap the sign because it's a refund, and it would print negative number otherwise
for p in previous_qties_invoiced:
previous_qties_invoiced[p] = -previous_qties_invoiced[p]
for p in invoiced_qties:
invoiced_qties[p] = -invoiced_qties[p]
qties_per_lot = defaultdict(float)
previous_qties_delivered = defaultdict(float)
stock_move_lines = current_invoice_amls.sale_line_ids.move_ids.move_line_ids.filtered(lambda sml: sml.state == 'done' and sml.lot_id).sorted(lambda sml: (sml.date, sml.id))
for sml in stock_move_lines:
if sml.product_id not in invoiced_products or 'customer' not in {sml.location_id.usage, sml.location_dest_id.usage}:
continue
product = sml.product_id
product_uom = product.uom_id
qty_done = sml.product_uom_id._compute_quantity(sml.qty_done, product_uom)
# is it a stock return considering the document type (should it be it thought of as positively or negatively?)
is_stock_return = (
self.move_type == 'out_invoice' and (sml.location_id.usage, sml.location_dest_id.usage) == ('customer', 'internal')
or
self.move_type == 'out_refund' and (sml.location_id.usage, sml.location_dest_id.usage) == ('internal', 'customer')
)
if is_stock_return:
returned_qty = min(qties_per_lot[sml.lot_id], qty_done)
qties_per_lot[sml.lot_id] -= returned_qty
qty_done = returned_qty - qty_done
previous_qty_invoiced = previous_qties_invoiced[product]
previous_qty_delivered = previous_qties_delivered[product]
# If we return more than currently delivered (i.e., qty_done < 0), we remove the surplus
# from the previously delivered (and qty_done becomes zero). If it's a delivery, we first
# try to reach the previous_qty_invoiced
if float_compare(qty_done, 0, precision_rounding=product_uom.rounding) < 0 or \
float_compare(previous_qty_delivered, previous_qty_invoiced, precision_rounding=product_uom.rounding) < 0:
previously_done = qty_done if is_stock_return else min(previous_qty_invoiced - previous_qty_delivered, qty_done)
previous_qties_delivered[product] += previously_done
qty_done -= previously_done
qties_per_lot[sml.lot_id] += qty_done
for lot, qty in qties_per_lot.items():
# access the lot as a superuser in order to avoid an error
# when a user prints an invoice without having the stock access
lot = lot.sudo()
if float_is_zero(invoiced_qties[lot.product_id], precision_rounding=lot.product_uom_id.rounding) \
or float_compare(qty, 0, precision_rounding=lot.product_uom_id.rounding) <= 0:
continue
invoiced_lot_qty = min(qty, invoiced_qties[lot.product_id])
invoiced_qties[lot.product_id] -= invoiced_lot_qty
res.append({
'product_name': lot.product_id.display_name,
'quantity': formatLang(self.env, invoiced_lot_qty, dp='Product Unit of Measure'),
'uom_name': lot.product_uom_id.name,
'lot_name': lot.name,
# The lot id is needed by localizations to inherit the method and add custom fields on the invoice's report.
'lot_id': lot.id,
})
return res
class AccountMoveLine(models.Model):
_inherit = "account.move.line"
def _sale_can_be_reinvoice(self):
self.ensure_one()
return not self.is_anglo_saxon_line and super(AccountMoveLine, self)._sale_can_be_reinvoice()
def _stock_account_get_anglo_saxon_price_unit(self):
self.ensure_one()
price_unit = super(AccountMoveLine, self)._stock_account_get_anglo_saxon_price_unit()
so_line = self.sale_line_ids and self.sale_line_ids[-1] or False
if so_line:
is_line_reversing = self.move_id.move_type == 'out_refund'
qty_to_invoice = self.product_uom_id._compute_quantity(self.quantity, self.product_id.uom_id)
account_moves = so_line.invoice_lines.move_id.filtered(lambda m: m.state == 'posted' and bool(m.reversed_entry_id) == is_line_reversing)
posted_cogs = account_moves.line_ids.filtered(lambda l: l.is_anglo_saxon_line and l.product_id == self.product_id and l.balance > 0)
qty_invoiced = sum([line.product_uom_id._compute_quantity(line.quantity, line.product_id.uom_id) for line in posted_cogs])
value_invoiced = sum(posted_cogs.mapped('balance'))
reversal_cogs = posted_cogs.move_id.reversal_move_id.line_ids.filtered(lambda l: l.is_anglo_saxon_line and l.product_id == self.product_id and l.balance > 0)
qty_invoiced -= sum([line.product_uom_id._compute_quantity(line.quantity, line.product_id.uom_id) for line in reversal_cogs])
value_invoiced -= sum(reversal_cogs.mapped('balance'))
product = self.product_id.with_company(self.company_id).with_context(is_returned=is_line_reversing, value_invoiced=value_invoiced)
average_price_unit = product._compute_average_price(qty_invoiced, qty_to_invoice, so_line.move_ids)
price_unit = self.product_id.uom_id.with_company(self.company_id)._compute_price(average_price_unit, self.product_uom_id)
return price_unit
| 60.194245 | 8,367 |
32,378 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
import logging
from datetime import timedelta
from collections import defaultdict
from odoo import api, fields, models, _
from odoo.tools import float_compare, float_round
from odoo.exceptions import UserError
_logger = logging.getLogger(__name__)
class SaleOrder(models.Model):
_inherit = "sale.order"
@api.model
def _default_warehouse_id(self):
# !!! Any change to the default value may have to be repercuted
# on _init_column() below.
return self.env.user._get_default_warehouse_id()
incoterm = fields.Many2one(
'account.incoterms', 'Incoterm',
help="International Commercial Terms are a series of predefined commercial terms used in international transactions.")
picking_policy = fields.Selection([
('direct', 'As soon as possible'),
('one', 'When all products are ready')],
string='Shipping Policy', required=True, readonly=True, default='direct',
states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}
,help="If you deliver all products at once, the delivery order will be scheduled based on the greatest "
"product lead time. Otherwise, it will be based on the shortest.")
warehouse_id = fields.Many2one(
'stock.warehouse', string='Warehouse',
required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]},
default=_default_warehouse_id, check_company=True)
picking_ids = fields.One2many('stock.picking', 'sale_id', string='Transfers')
delivery_count = fields.Integer(string='Delivery Orders', compute='_compute_picking_ids')
procurement_group_id = fields.Many2one('procurement.group', 'Procurement Group', copy=False)
effective_date = fields.Datetime("Effective Date", compute='_compute_effective_date', store=True, help="Completion date of the first delivery order.")
expected_date = fields.Datetime( help="Delivery date you can promise to the customer, computed from the minimum lead time of "
"the order lines in case of Service products. In case of shipping, the shipping policy of "
"the order will be taken into account to either use the minimum or maximum lead time of "
"the order lines.")
json_popover = fields.Char('JSON data for the popover widget', compute='_compute_json_popover')
show_json_popover = fields.Boolean('Has late picking', compute='_compute_json_popover')
def _init_column(self, column_name):
""" Ensure the default warehouse_id is correctly assigned
At column initialization, the ir.model.fields for res.users.property_warehouse_id isn't created,
which means trying to read the property field to get the default value will crash.
We therefore enforce the default here, without going through
the default function on the warehouse_id field.
"""
if column_name != "warehouse_id":
return super(SaleOrder, self)._init_column(column_name)
field = self._fields[column_name]
default = self.env['stock.warehouse'].search([('company_id', '=', self.env.company.id)], limit=1)
value = field.convert_to_write(default, self)
value = field.convert_to_column(value, self)
if value is not None:
_logger.debug("Table '%s': setting default value of new column %s to %r",
self._table, column_name, value)
query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" IS NULL' % (
self._table, column_name, field.column_format, column_name)
self._cr.execute(query, (value,))
@api.depends('picking_ids.date_done')
def _compute_effective_date(self):
for order in self:
pickings = order.picking_ids.filtered(lambda x: x.state == 'done' and x.location_dest_id.usage == 'customer')
dates_list = [date for date in pickings.mapped('date_done') if date]
order.effective_date = min(dates_list, default=False)
@api.depends('picking_policy')
def _compute_expected_date(self):
super(SaleOrder, self)._compute_expected_date()
for order in self:
dates_list = []
for line in order.order_line.filtered(lambda x: x.state != 'cancel' and not x._is_delivery() and not x.display_type):
dt = line._expected_date()
dates_list.append(dt)
if dates_list:
expected_date = min(dates_list) if order.picking_policy == 'direct' else max(dates_list)
order.expected_date = fields.Datetime.to_string(expected_date)
@api.model
def create(self, vals):
if 'warehouse_id' not in vals and 'company_id' in vals:
user = self.env['res.users'].browse(vals.get('user_id', False))
vals['warehouse_id'] = user.with_company(vals.get('company_id'))._get_default_warehouse_id().id
return super().create(vals)
def write(self, values):
if values.get('order_line') and self.state == 'sale':
for order in self:
pre_order_line_qty = {order_line: order_line.product_uom_qty for order_line in order.mapped('order_line') if not order_line.is_expense}
if values.get('partner_shipping_id'):
new_partner = self.env['res.partner'].browse(values.get('partner_shipping_id'))
for record in self:
picking = record.mapped('picking_ids').filtered(lambda x: x.state not in ('done', 'cancel'))
addresses = (record.partner_shipping_id.display_name, new_partner.display_name)
message = _("""The delivery address has been changed on the Sales Order<br/>
From <strong>"%s"</strong> To <strong>"%s"</strong>,
You should probably update the partner on this document.""") % addresses
picking.activity_schedule('mail.mail_activity_data_warning', note=message, user_id=self.env.user.id)
if values.get('commitment_date'):
# protagate commitment_date as the deadline of the related stock move.
# TODO: Log a note on each down document
self.order_line.move_ids.date_deadline = fields.Datetime.to_datetime(values.get('commitment_date'))
res = super(SaleOrder, self).write(values)
if values.get('order_line') and self.state == 'sale':
rounding = self.env['decimal.precision'].precision_get('Product Unit of Measure')
for order in self:
to_log = {}
for order_line in order.order_line:
if order_line.display_type:
continue
if float_compare(order_line.product_uom_qty, pre_order_line_qty.get(order_line, 0.0), precision_rounding=order_line.product_uom.rounding or rounding) < 0:
to_log[order_line] = (order_line.product_uom_qty, pre_order_line_qty.get(order_line, 0.0))
if to_log:
documents = self.env['stock.picking'].sudo()._log_activity_get_documents(to_log, 'move_ids', 'UP')
documents = {k:v for k, v in documents.items() if k[0].state != 'cancel'}
order._log_decrease_ordered_quantity(documents)
return res
def _compute_json_popover(self):
for order in self:
late_stock_picking = order.picking_ids.filtered(lambda p: p.delay_alert_date)
order.json_popover = json.dumps({
'popoverTemplate': 'sale_stock.DelayAlertWidget',
'late_elements': [{
'id': late_move.id,
'name': late_move.display_name,
'model': 'stock.picking',
} for late_move in late_stock_picking
]
})
order.show_json_popover = bool(late_stock_picking)
def _action_confirm(self):
self.order_line._action_launch_stock_rule()
return super(SaleOrder, self)._action_confirm()
@api.depends('picking_ids')
def _compute_picking_ids(self):
for order in self:
order.delivery_count = len(order.picking_ids)
@api.onchange('company_id')
def _onchange_company_id(self):
if self.company_id:
warehouse_id = self.env['ir.default'].get_model_defaults('sale.order').get('warehouse_id')
self.warehouse_id = warehouse_id or self.user_id.with_company(self.company_id.id)._get_default_warehouse_id().id
@api.onchange('user_id')
def onchange_user_id(self):
super().onchange_user_id()
if self.state in ['draft','sent']:
self.warehouse_id = self.user_id.with_company(self.company_id.id)._get_default_warehouse_id().id
@api.onchange('partner_shipping_id')
def _onchange_partner_shipping_id(self):
res = {}
pickings = self.picking_ids.filtered(
lambda p: p.state not in ['done', 'cancel'] and p.partner_id != self.partner_shipping_id
)
if pickings:
res['warning'] = {
'title': _('Warning!'),
'message': _(
'Do not forget to change the partner on the following delivery orders: %s'
) % (','.join(pickings.mapped('name')))
}
return res
def action_view_delivery(self):
return self._get_action_view_picking(self.picking_ids)
def _action_cancel(self):
documents = None
for sale_order in self:
if sale_order.state == 'sale' and sale_order.order_line:
sale_order_lines_quantities = {order_line: (order_line.product_uom_qty, 0) for order_line in sale_order.order_line}
documents = self.env['stock.picking'].with_context(include_draft_documents=True)._log_activity_get_documents(sale_order_lines_quantities, 'move_ids', 'UP')
self.picking_ids.filtered(lambda p: p.state != 'done').action_cancel()
if documents:
filtered_documents = {}
for (parent, responsible), rendering_context in documents.items():
if parent._name == 'stock.picking':
if parent.state == 'cancel':
continue
filtered_documents[(parent, responsible)] = rendering_context
self._log_decrease_ordered_quantity(filtered_documents, cancel=True)
return super()._action_cancel()
def _get_action_view_picking(self, pickings):
'''
This function returns an action that display existing delivery orders
of given sales order ids. It can either be a in a list or in a form
view, if there is only one delivery order to show.
'''
action = self.env["ir.actions.actions"]._for_xml_id("stock.action_picking_tree_all")
if len(pickings) > 1:
action['domain'] = [('id', 'in', pickings.ids)]
elif pickings:
form_view = [(self.env.ref('stock.view_picking_form').id, 'form')]
if 'views' in action:
action['views'] = form_view + [(state,view) for state,view in action['views'] if view != 'form']
else:
action['views'] = form_view
action['res_id'] = pickings.id
# Prepare the context.
picking_id = pickings.filtered(lambda l: l.picking_type_id.code == 'outgoing')
if picking_id:
picking_id = picking_id[0]
else:
picking_id = pickings[0]
action['context'] = dict(self._context, default_partner_id=self.partner_id.id, default_picking_type_id=picking_id.picking_type_id.id, default_origin=self.name, default_group_id=picking_id.group_id.id)
return action
def _prepare_invoice(self):
invoice_vals = super(SaleOrder, self)._prepare_invoice()
invoice_vals['invoice_incoterm_id'] = self.incoterm.id
return invoice_vals
@api.model
def _get_customer_lead(self, product_tmpl_id):
super(SaleOrder, self)._get_customer_lead(product_tmpl_id)
return product_tmpl_id.sale_delay
def _log_decrease_ordered_quantity(self, documents, cancel=False):
def _render_note_exception_quantity_so(rendering_context):
order_exceptions, visited_moves = rendering_context
visited_moves = list(visited_moves)
visited_moves = self.env[visited_moves[0]._name].concat(*visited_moves)
order_line_ids = self.env['sale.order.line'].browse([order_line.id for order in order_exceptions.values() for order_line in order[0]])
sale_order_ids = order_line_ids.mapped('order_id')
impacted_pickings = visited_moves.filtered(lambda m: m.state not in ('done', 'cancel')).mapped('picking_id')
values = {
'sale_order_ids': sale_order_ids,
'order_exceptions': order_exceptions.values(),
'impacted_pickings': impacted_pickings,
'cancel': cancel
}
return self.env.ref('sale_stock.exception_on_so')._render(values=values)
self.env['stock.picking']._log_activity(_render_note_exception_quantity_so, documents)
def _show_cancel_wizard(self):
res = super(SaleOrder, self)._show_cancel_wizard()
for order in self:
if any(picking.state == 'done' for picking in order.picking_ids) and not order._context.get('disable_cancel_warning'):
return True
return res
class SaleOrderLine(models.Model):
_inherit = 'sale.order.line'
qty_delivered_method = fields.Selection(selection_add=[('stock_move', 'Stock Moves')])
route_id = fields.Many2one('stock.location.route', string='Route', domain=[('sale_selectable', '=', True)], ondelete='restrict', check_company=True)
move_ids = fields.One2many('stock.move', 'sale_line_id', string='Stock Moves')
product_type = fields.Selection(related='product_id.detailed_type')
virtual_available_at_date = fields.Float(compute='_compute_qty_at_date', digits='Product Unit of Measure')
scheduled_date = fields.Datetime(compute='_compute_qty_at_date')
forecast_expected_date = fields.Datetime(compute='_compute_qty_at_date')
free_qty_today = fields.Float(compute='_compute_qty_at_date', digits='Product Unit of Measure')
qty_available_today = fields.Float(compute='_compute_qty_at_date')
warehouse_id = fields.Many2one(related='order_id.warehouse_id')
qty_to_deliver = fields.Float(compute='_compute_qty_to_deliver', digits='Product Unit of Measure')
is_mto = fields.Boolean(compute='_compute_is_mto')
display_qty_widget = fields.Boolean(compute='_compute_qty_to_deliver')
@api.depends('product_type', 'product_uom_qty', 'qty_delivered', 'state', 'move_ids', 'product_uom')
def _compute_qty_to_deliver(self):
"""Compute the visibility of the inventory widget."""
for line in self:
line.qty_to_deliver = line.product_uom_qty - line.qty_delivered
if line.state in ('draft', 'sent', 'sale') and line.product_type == 'product' and line.product_uom and line.qty_to_deliver > 0:
if line.state == 'sale' and not line.move_ids:
line.display_qty_widget = False
else:
line.display_qty_widget = True
else:
line.display_qty_widget = False
@api.depends(
'product_id', 'customer_lead', 'product_uom_qty', 'product_uom', 'order_id.commitment_date',
'move_ids', 'move_ids.forecast_expected_date', 'move_ids.forecast_availability')
def _compute_qty_at_date(self):
""" Compute the quantity forecasted of product at delivery date. There are
two cases:
1. The quotation has a commitment_date, we take it as delivery date
2. The quotation hasn't commitment_date, we compute the estimated delivery
date based on lead time"""
treated = self.browse()
# If the state is already in sale the picking is created and a simple forecasted quantity isn't enough
# Then used the forecasted data of the related stock.move
for line in self.filtered(lambda l: l.state == 'sale'):
if not line.display_qty_widget:
continue
moves = line.move_ids.filtered(lambda m: m.product_id == line.product_id)
line.forecast_expected_date = max(moves.filtered("forecast_expected_date").mapped("forecast_expected_date"), default=False)
line.qty_available_today = 0
line.free_qty_today = 0
for move in moves:
line.qty_available_today += move.product_uom._compute_quantity(move.reserved_availability, line.product_uom)
line.free_qty_today += move.product_id.uom_id._compute_quantity(move.forecast_availability, line.product_uom)
line.scheduled_date = line.order_id.commitment_date or line._expected_date()
line.virtual_available_at_date = False
treated |= line
qty_processed_per_product = defaultdict(lambda: 0)
grouped_lines = defaultdict(lambda: self.env['sale.order.line'])
# We first loop over the SO lines to group them by warehouse and schedule
# date in order to batch the read of the quantities computed field.
for line in self.filtered(lambda l: l.state in ('draft', 'sent')):
if not (line.product_id and line.display_qty_widget):
continue
grouped_lines[(line.warehouse_id.id, line.order_id.commitment_date or line._expected_date())] |= line
for (warehouse, scheduled_date), lines in grouped_lines.items():
product_qties = lines.mapped('product_id').with_context(to_date=scheduled_date, warehouse=warehouse).read([
'qty_available',
'free_qty',
'virtual_available',
])
qties_per_product = {
product['id']: (product['qty_available'], product['free_qty'], product['virtual_available'])
for product in product_qties
}
for line in lines:
line.scheduled_date = scheduled_date
qty_available_today, free_qty_today, virtual_available_at_date = qties_per_product[line.product_id.id]
line.qty_available_today = qty_available_today - qty_processed_per_product[line.product_id.id]
line.free_qty_today = free_qty_today - qty_processed_per_product[line.product_id.id]
line.virtual_available_at_date = virtual_available_at_date - qty_processed_per_product[line.product_id.id]
line.forecast_expected_date = False
product_qty = line.product_uom_qty
if line.product_uom and line.product_id.uom_id and line.product_uom != line.product_id.uom_id:
line.qty_available_today = line.product_id.uom_id._compute_quantity(line.qty_available_today, line.product_uom)
line.free_qty_today = line.product_id.uom_id._compute_quantity(line.free_qty_today, line.product_uom)
line.virtual_available_at_date = line.product_id.uom_id._compute_quantity(line.virtual_available_at_date, line.product_uom)
product_qty = line.product_uom._compute_quantity(product_qty, line.product_id.uom_id)
qty_processed_per_product[line.product_id.id] += product_qty
treated |= lines
remaining = (self - treated)
remaining.virtual_available_at_date = False
remaining.scheduled_date = False
remaining.forecast_expected_date = False
remaining.free_qty_today = False
remaining.qty_available_today = False
@api.depends('product_id', 'route_id', 'order_id.warehouse_id', 'product_id.route_ids')
def _compute_is_mto(self):
""" Verify the route of the product based on the warehouse
set 'is_available' at True if the product availibility in stock does
not need to be verified, which is the case in MTO, Cross-Dock or Drop-Shipping
"""
self.is_mto = False
for line in self:
if not line.display_qty_widget:
continue
product = line.product_id
product_routes = line.route_id or (product.route_ids + product.categ_id.total_route_ids)
# Check MTO
mto_route = line.order_id.warehouse_id.mto_pull_id.route_id
if not mto_route:
try:
mto_route = self.env['stock.warehouse']._find_global_route('stock.route_warehouse0_mto', _('Make To Order'))
except UserError:
# if route MTO not found in ir_model_data, we treat the product as in MTS
pass
if mto_route and mto_route in product_routes:
line.is_mto = True
else:
line.is_mto = False
@api.depends('product_id')
def _compute_qty_delivered_method(self):
""" Stock module compute delivered qty for product [('type', 'in', ['consu', 'product'])]
For SO line coming from expense, no picking should be generate: we don't manage stock for
thoses lines, even if the product is a storable.
"""
super(SaleOrderLine, self)._compute_qty_delivered_method()
for line in self:
if not line.is_expense and line.product_id.type in ['consu', 'product']:
line.qty_delivered_method = 'stock_move'
@api.depends('move_ids.state', 'move_ids.scrapped', 'move_ids.product_uom_qty', 'move_ids.product_uom')
def _compute_qty_delivered(self):
super(SaleOrderLine, self)._compute_qty_delivered()
for line in self: # TODO: maybe one day, this should be done in SQL for performance sake
if line.qty_delivered_method == 'stock_move':
qty = 0.0
outgoing_moves, incoming_moves = line._get_outgoing_incoming_moves()
for move in outgoing_moves:
if move.state != 'done':
continue
qty += move.product_uom._compute_quantity(move.product_uom_qty, line.product_uom, rounding_method='HALF-UP')
for move in incoming_moves:
if move.state != 'done':
continue
qty -= move.product_uom._compute_quantity(move.product_uom_qty, line.product_uom, rounding_method='HALF-UP')
line.qty_delivered = qty
@api.model_create_multi
def create(self, vals_list):
lines = super(SaleOrderLine, self).create(vals_list)
lines.filtered(lambda line: line.state == 'sale')._action_launch_stock_rule()
return lines
def write(self, values):
lines = self.env['sale.order.line']
if 'product_uom_qty' in values:
lines = self.filtered(lambda r: r.state == 'sale' and not r.is_expense)
if 'product_packaging_id' in values:
self.move_ids.filtered(
lambda m: m.state not in ['cancel', 'done']
).product_packaging_id = values['product_packaging_id']
previous_product_uom_qty = {line.id: line.product_uom_qty for line in lines}
res = super(SaleOrderLine, self).write(values)
if lines:
lines._action_launch_stock_rule(previous_product_uom_qty)
if 'customer_lead' in values and self.state == 'sale' and not self.order_id.commitment_date:
# Propagate deadline on related stock move
self.move_ids.date_deadline = self.order_id.date_order + timedelta(days=self.customer_lead or 0.0)
return res
@api.depends('order_id.state')
def _compute_invoice_status(self):
def check_moves_state(moves):
# All moves states are either 'done' or 'cancel', and there is at least one 'done'
at_least_one_done = False
for move in moves:
if move.state not in ['done', 'cancel']:
return False
at_least_one_done = at_least_one_done or move.state == 'done'
return at_least_one_done
super(SaleOrderLine, self)._compute_invoice_status()
for line in self:
# We handle the following specific situation: a physical product is partially delivered,
# but we would like to set its invoice status to 'Fully Invoiced'. The use case is for
# products sold by weight, where the delivered quantity rarely matches exactly the
# quantity ordered.
if line.order_id.state == 'done'\
and line.invoice_status == 'no'\
and line.product_id.type in ['consu', 'product']\
and line.product_id.invoice_policy == 'delivery'\
and line.move_ids \
and check_moves_state(line.move_ids):
line.invoice_status = 'invoiced'
@api.depends('move_ids')
def _compute_product_updatable(self):
for line in self:
if not line.move_ids.filtered(lambda m: m.state != 'cancel'):
super(SaleOrderLine, line)._compute_product_updatable()
else:
line.product_updatable = False
@api.onchange('product_id')
def _onchange_product_id_set_customer_lead(self):
self.customer_lead = self.product_id.sale_delay
def _prepare_procurement_values(self, group_id=False):
""" Prepare specific key for moves or other components that will be created from a stock rule
comming from a sale order line. This method could be override in order to add other custom key that could
be used in move/po creation.
"""
values = super(SaleOrderLine, self)._prepare_procurement_values(group_id)
self.ensure_one()
# Use the delivery date if there is else use date_order and lead time
date_deadline = self.order_id.commitment_date or (self.order_id.date_order + timedelta(days=self.customer_lead or 0.0))
date_planned = date_deadline - timedelta(days=self.order_id.company_id.security_lead)
values.update({
'group_id': group_id,
'sale_line_id': self.id,
'date_planned': date_planned,
'date_deadline': date_deadline,
'route_ids': self.route_id,
'warehouse_id': self.order_id.warehouse_id or False,
'partner_id': self.order_id.partner_shipping_id.id,
'product_description_variants': self.with_context(lang=self.order_id.partner_id.lang)._get_sale_order_line_multiline_description_variants(),
'company_id': self.order_id.company_id,
'product_packaging_id': self.product_packaging_id,
'sequence': self.sequence,
})
return values
def _get_qty_procurement(self, previous_product_uom_qty=False):
self.ensure_one()
qty = 0.0
outgoing_moves, incoming_moves = self._get_outgoing_incoming_moves()
for move in outgoing_moves:
qty += move.product_uom._compute_quantity(move.product_uom_qty, self.product_uom, rounding_method='HALF-UP')
for move in incoming_moves:
qty -= move.product_uom._compute_quantity(move.product_uom_qty, self.product_uom, rounding_method='HALF-UP')
return qty
def _get_outgoing_incoming_moves(self):
outgoing_moves = self.env['stock.move']
incoming_moves = self.env['stock.move']
moves = self.move_ids.filtered(lambda r: r.state != 'cancel' and not r.scrapped and self.product_id == r.product_id)
if self._context.get('accrual_entry_date'):
moves = moves.filtered(lambda r: fields.Date.context_today(r, r.date) <= self._context['accrual_entry_date'])
for move in moves:
if move.location_dest_id.usage == "customer":
if not move.origin_returned_move_id or (move.origin_returned_move_id and move.to_refund):
outgoing_moves |= move
elif move.location_dest_id.usage != "customer" and move.to_refund:
incoming_moves |= move
return outgoing_moves, incoming_moves
def _get_procurement_group(self):
return self.order_id.procurement_group_id
def _prepare_procurement_group_vals(self):
return {
'name': self.order_id.name,
'move_type': self.order_id.picking_policy,
'sale_id': self.order_id.id,
'partner_id': self.order_id.partner_shipping_id.id,
}
def _action_launch_stock_rule(self, previous_product_uom_qty=False):
"""
Launch procurement group run method with required/custom fields genrated by a
sale order line. procurement group will launch '_run_pull', '_run_buy' or '_run_manufacture'
depending on the sale order line product rule.
"""
if self._context.get("skip_procurement"):
return True
precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
procurements = []
for line in self:
line = line.with_company(line.company_id)
if line.state != 'sale' or not line.product_id.type in ('consu','product'):
continue
qty = line._get_qty_procurement(previous_product_uom_qty)
if float_compare(qty, line.product_uom_qty, precision_digits=precision) == 0:
continue
group_id = line._get_procurement_group()
if not group_id:
group_id = self.env['procurement.group'].create(line._prepare_procurement_group_vals())
line.order_id.procurement_group_id = group_id
else:
# In case the procurement group is already created and the order was
# cancelled, we need to update certain values of the group.
updated_vals = {}
if group_id.partner_id != line.order_id.partner_shipping_id:
updated_vals.update({'partner_id': line.order_id.partner_shipping_id.id})
if group_id.move_type != line.order_id.picking_policy:
updated_vals.update({'move_type': line.order_id.picking_policy})
if updated_vals:
group_id.write(updated_vals)
values = line._prepare_procurement_values(group_id=group_id)
product_qty = line.product_uom_qty - qty
line_uom = line.product_uom
quant_uom = line.product_id.uom_id
product_qty, procurement_uom = line_uom._adjust_uom_quantities(product_qty, quant_uom)
procurements.append(self.env['procurement.group'].Procurement(
line.product_id, product_qty, procurement_uom,
line.order_id.partner_shipping_id.property_stock_customer,
line.product_id.display_name, line.order_id.name, line.order_id.company_id, values))
if procurements:
self.env['procurement.group'].run(procurements)
# This next block is currently needed only because the scheduler trigger is done by picking confirmation rather than stock.move confirmation
orders = self.mapped('order_id')
for order in orders:
pickings_to_confirm = order.picking_ids.filtered(lambda p: p.state not in ['cancel', 'done'])
if pickings_to_confirm:
# Trigger the Scheduler for Pickings
pickings_to_confirm.action_confirm()
return True
def _update_line_quantity(self, values):
precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
line_products = self.filtered(lambda l: l.product_id.type in ['product', 'consu'])
if line_products.mapped('qty_delivered') and float_compare(values['product_uom_qty'], max(line_products.mapped('qty_delivered')), precision_digits=precision) == -1:
raise UserError(_('You cannot decrease the ordered quantity below the delivered quantity.\n'
'Create a return first.'))
super(SaleOrderLine, self)._update_line_quantity(values)
| 53.517355 | 32,378 |
631 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class ProductTemplate(models.Model):
_inherit = 'product.template'
@api.onchange('type')
def _onchange_type(self):
""" We want to prevent storable product to be expensed, since it make no sense as when confirm
expenses, the product is already out of our stock.
"""
res = super(ProductTemplate, self)._onchange_type()
if self.type == 'product':
self.expense_policy = 'no'
self.service_type = 'manual'
return res
| 33.210526 | 631 |
7,705 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from collections import defaultdict
from odoo import api, fields, models, _
from odoo.tools.sql import column_exists, create_column
class StockLocationRoute(models.Model):
_inherit = "stock.location.route"
sale_selectable = fields.Boolean("Selectable on Sales Order Line")
class StockMove(models.Model):
_inherit = "stock.move"
sale_line_id = fields.Many2one('sale.order.line', 'Sale Line', index=True)
@api.model
def _prepare_merge_moves_distinct_fields(self):
distinct_fields = super(StockMove, self)._prepare_merge_moves_distinct_fields()
distinct_fields.append('sale_line_id')
return distinct_fields
def _get_related_invoices(self):
""" Overridden from stock_account to return the customer invoices
related to this stock move.
"""
rslt = super(StockMove, self)._get_related_invoices()
invoices = self.mapped('picking_id.sale_id.invoice_ids').filtered(lambda x: x.state == 'posted')
rslt += invoices
#rslt += invoices.mapped('reverse_entry_ids')
return rslt
def _get_source_document(self):
res = super()._get_source_document()
return self.sale_line_id.order_id or res
def _assign_picking_post_process(self, new=False):
super(StockMove, self)._assign_picking_post_process(new=new)
if new:
picking_id = self.mapped('picking_id')
sale_order_ids = self.mapped('sale_line_id.order_id')
for sale_order_id in sale_order_ids:
picking_id.message_post_with_view(
'mail.message_origin_link',
values={'self': picking_id, 'origin': sale_order_id},
subtype_id=self.env.ref('mail.mt_note').id)
class ProcurementGroup(models.Model):
_inherit = 'procurement.group'
sale_id = fields.Many2one('sale.order', 'Sale Order')
class StockRule(models.Model):
_inherit = 'stock.rule'
def _get_custom_move_fields(self):
fields = super(StockRule, self)._get_custom_move_fields()
fields += ['sale_line_id', 'partner_id', 'sequence']
return fields
class StockPicking(models.Model):
_inherit = 'stock.picking'
sale_id = fields.Many2one(related="group_id.sale_id", string="Sales Order", store=True, readonly=False)
def _auto_init(self):
"""
Create related field here, too slow
when computing it afterwards through _compute_related.
Since group_id.sale_id is created in this module,
no need for an UPDATE statement.
"""
if not column_exists(self.env.cr, 'stock_picking', 'sale_id'):
create_column(self.env.cr, 'stock_picking', 'sale_id', 'int4')
return super()._auto_init()
def _action_done(self):
res = super()._action_done()
sale_order_lines_vals = []
for move in self.move_lines:
sale_order = move.picking_id.sale_id
# Creates new SO line only when pickings linked to a sale order and
# for moves with qty. done and not already linked to a SO line.
if not sale_order or move.location_dest_id.usage != 'customer' or move.sale_line_id or not move.quantity_done:
continue
product = move.product_id
so_line_vals = {
'move_ids': [(4, move.id, 0)],
'name': product.display_name,
'order_id': sale_order.id,
'product_id': product.id,
'product_uom_qty': 0,
'qty_delivered': move.quantity_done,
'product_uom': move.product_uom.id,
}
if product.invoice_policy == 'delivery':
# Check if there is already a SO line for this product to get
# back its unit price (in case it was manually updated).
so_line = sale_order.order_line.filtered(lambda sol: sol.product_id == product)
if so_line:
so_line_vals['price_unit'] = so_line[0].price_unit
elif product.invoice_policy == 'order':
# No unit price if the product is invoiced on the ordered qty.
so_line_vals['price_unit'] = 0
sale_order_lines_vals.append(so_line_vals)
if sale_order_lines_vals:
self.env['sale.order.line'].with_context(skip_procurement=True).create(sale_order_lines_vals)
return res
def _log_less_quantities_than_expected(self, moves):
""" Log an activity on sale order that are linked to moves. The
note summarize the real proccessed quantity and promote a
manual action.
:param dict moves: a dict with a move as key and tuple with
new and old quantity as value. eg: {move_1 : (4, 5)}
"""
def _keys_in_sorted(sale_line):
""" sort by order_id and the sale_person on the order """
return (sale_line.order_id.id, sale_line.order_id.user_id.id)
def _keys_in_groupby(sale_line):
""" group by order_id and the sale_person on the order """
return (sale_line.order_id, sale_line.order_id.user_id)
def _render_note_exception_quantity(moves_information):
""" Generate a note with the picking on which the action
occurred and a summary on impacted quantity that are
related to the sale order where the note will be logged.
:param moves_information dict:
{'move_id': ['sale_order_line_id', (new_qty, old_qty)], ..}
:return: an html string with all the information encoded.
:rtype: str
"""
origin_moves = self.env['stock.move'].browse([move.id for move_orig in moves_information.values() for move in move_orig[0]])
origin_picking = origin_moves.mapped('picking_id')
values = {
'origin_moves': origin_moves,
'origin_picking': origin_picking,
'moves_information': moves_information.values(),
}
return self.env.ref('sale_stock.exception_on_picking')._render(values=values)
documents = self._log_activity_get_documents(moves, 'sale_line_id', 'DOWN', _keys_in_sorted, _keys_in_groupby)
self._log_activity(_render_note_exception_quantity, documents)
return super(StockPicking, self)._log_less_quantities_than_expected(moves)
class ProductionLot(models.Model):
_inherit = 'stock.production.lot'
sale_order_ids = fields.Many2many('sale.order', string="Sales Orders", compute='_compute_sale_order_ids')
sale_order_count = fields.Integer('Sale order count', compute='_compute_sale_order_ids')
@api.depends('name')
def _compute_sale_order_ids(self):
sale_orders = defaultdict(lambda: self.env['sale.order'])
for move_line in self.env['stock.move.line'].search([('lot_id', 'in', self.ids), ('state', '=', 'done')]):
move = move_line.move_id
if move.picking_id.location_dest_id.usage == 'customer' and move.sale_line_id.order_id:
sale_orders[move_line.lot_id.id] |= move.sale_line_id.order_id
for lot in self:
lot.sale_order_ids = sale_orders[lot.id]
lot.sale_order_count = len(lot.sale_order_ids)
def action_view_so(self):
self.ensure_one()
action = self.env["ir.actions.actions"]._for_xml_id("sale.action_orders")
action['domain'] = [('id', 'in', self.mapped('sale_order_ids.id'))]
action['context'] = dict(self._context, create=False)
return action
| 42.569061 | 7,705 |
559 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class company(models.Model):
_inherit = 'res.company'
security_lead = fields.Float(
'Sales Safety Days', default=0.0, required=True,
help="Margin of error for dates promised to customers. "
"Products will be scheduled for procurement and delivery "
"that many days earlier than the actual promised date, to "
"cope with unexpected delays in the supply chain.")
| 37.266667 | 559 |
951 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields
class Users(models.Model):
_inherit = ['res.users']
property_warehouse_id = fields.Many2one('stock.warehouse', string='Default Warehouse', company_dependent=True, check_company=True)
def _get_default_warehouse_id(self):
if self.property_warehouse_id:
return self.property_warehouse_id
# !!! Any change to the following search domain should probably
# be also applied in sale_stock/models/sale_order.py/_init_column.
return self.env['stock.warehouse'].search([('company_id', '=', self.env.company.id)], limit=1)
@property
def SELF_READABLE_FIELDS(self):
return super().SELF_READABLE_FIELDS + ['property_warehouse_id']
@property
def SELF_WRITEABLE_FIELDS(self):
return super().SELF_WRITEABLE_FIELDS + ['property_warehouse_id']
| 38.04 | 951 |
1,220 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
security_lead = fields.Float(related='company_id.security_lead', string="Security Lead Time", readonly=False)
group_display_incoterm = fields.Boolean("Incoterms", implied_group='sale_stock.group_display_incoterm')
use_security_lead = fields.Boolean(
string="Security Lead Time for Sales",
config_parameter='sale_stock.use_security_lead',
help="Margin of error for dates promised to customers. Products will be scheduled for delivery that many days earlier than the actual promised date, to cope with unexpected delays in the supply chain.")
default_picking_policy = fields.Selection([
('direct', 'Ship products as soon as available, with back orders'),
('one', 'Ship all products at once')
], "Picking Policy", default='direct', default_model="sale.order", required=True)
@api.onchange('use_security_lead')
def _onchange_use_security_lead(self):
if not self.use_security_lead:
self.security_lead = 0.0
| 50.833333 | 1,220 |
476 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class ReportStockRule(models.AbstractModel):
_inherit = 'report.stock.report_stock_rule'
@api.model
def _get_routes(self, data):
res = super(ReportStockRule, self)._get_routes(data)
if data.get('so_route_ids'):
res = self.env['stock.location.route'].browse(data['so_route_ids']) | res
return res
| 31.733333 | 476 |
1,677 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class ReplenishmentReport(models.AbstractModel):
_inherit = 'report.stock.report_product_product_replenishment'
def _compute_draft_quantity_count(self, product_template_ids, product_variant_ids, wh_location_ids):
res = super()._compute_draft_quantity_count(product_template_ids, product_variant_ids, wh_location_ids)
domain = self._product_sale_domain(product_template_ids, product_variant_ids)
so_lines = self.env['sale.order.line'].search(domain)
out_sum = 0
if so_lines:
product_uom = so_lines[0].product_id.uom_id
quantities = so_lines.mapped(lambda line: line.product_uom._compute_quantity(line.product_uom_qty, product_uom))
out_sum = sum(quantities)
res['draft_sale_qty'] = out_sum
res['draft_sale_orders'] = so_lines.mapped("order_id").sorted(key=lambda so: so.name)
res['draft_sale_orders_matched'] = self.env.context.get('sale_line_to_match_id') in so_lines.ids
res['qty']['out'] += out_sum
return res
def _product_sale_domain(self, product_template_ids, product_variant_ids):
domain = [('state', 'in', ['draft', 'sent'])]
if product_template_ids:
domain += [('product_template_id', 'in', product_template_ids)]
elif product_variant_ids:
domain += [('product_id', 'in', product_variant_ids)]
warehouse_id = self.env.context.get('warehouse', False)
if warehouse_id:
domain += [('warehouse_id', '=', warehouse_id)]
return domain
| 49.323529 | 1,677 |
598 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class SaleReport(models.Model):
_inherit = "sale.report"
warehouse_id = fields.Many2one('stock.warehouse', 'Warehouse', readonly=True)
def _group_by_sale(self, groupby=''):
res = super()._group_by_sale(groupby)
res += """,s.warehouse_id"""
return res
def _select_additional_fields(self, fields):
fields['warehouse_id'] = ", s.warehouse_id as warehouse_id"
return super()._select_additional_fields(fields)
| 31.473684 | 598 |
1,748 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import exceptions, SUPERUSER_ID
from odoo.addons.sale.controllers.portal import CustomerPortal
from odoo.http import request, route
from odoo.tools import consteq
class SaleStockPortal(CustomerPortal):
def _stock_picking_check_access(self, picking_id, access_token=None):
picking = request.env['stock.picking'].browse([picking_id])
picking_sudo = picking.sudo()
try:
picking.check_access_rights('read')
picking.check_access_rule('read')
except exceptions.AccessError:
if not access_token or not consteq(picking_sudo.sale_id.access_token, access_token):
raise
return picking_sudo
@route(['/my/picking/pdf/<int:picking_id>'], type='http', auth="public", website=True)
def portal_my_picking_report(self, picking_id, access_token=None, **kw):
""" Print delivery slip for customer, using either access rights or access token
to be sure customer has access """
try:
picking_sudo = self._stock_picking_check_access(picking_id, access_token=access_token)
except exceptions.AccessError:
return request.redirect('/my')
# print report as SUPERUSER, since it require access to product, taxes, payment term etc.. and portal does not have those access rights.
pdf = request.env.ref('stock.action_report_delivery').with_user(SUPERUSER_ID)._render_qweb_pdf([picking_sudo.id])[0]
pdfhttpheaders = [
('Content-Type', 'application/pdf'),
('Content-Length', len(pdf)),
]
return request.make_response(pdf, headers=pdfhttpheaders)
| 46 | 1,748 |
506 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'pos_sale_margin',
'version': '1.1',
'category': 'Hidden',
'summary': 'Link module between Point of Sale and Sales Margin',
'description': """
This module adds enable you to view the margin of your Point of Sale orders in the Sales Margin report.
""",
'depends': ['pos_sale', 'sale_margin'],
'installable': True,
'auto_install': True,
'license': 'LGPL-3',
}
| 28.111111 | 506 |
1,421 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import odoo
from odoo.addons.point_of_sale.tests.common import TestPoSCommon
@odoo.tests.tagged('post_install', '-at_install')
class TestPoSSaleMarginReport(TestPoSCommon):
def setUp(self):
super(TestPoSSaleMarginReport, self).setUp()
self.config = self.basic_config
def test_pos_sale_margin_report(self):
product1 = self.create_product('Product 1', self.categ_basic, 150, standard_price=50)
self.open_new_session()
session = self.pos_session
self.env['pos.order'].create({
'session_id': session.id,
'lines': [(0, 0, {
'name': "OL/0001",
'product_id': product1.id,
'price_unit': 450,
'discount': 5.0,
'qty': 1.0,
'price_subtotal': 150,
'price_subtotal_incl': 150,
'total_cost': 50,
}),],
'amount_total': 150.0,
'amount_tax': 0.0,
'amount_paid': 0.0,
'amount_return': 0.0,
})
# PoS Orders have negative IDs to avoid conflict, so reports[0] will correspond to the newest order
reports = self.env['sale.report'].sudo().search([('product_id', '=', product1.id)], order='id')
self.assertEqual(reports[0].margin, 100)
| 33.046512 | 1,421 |
489 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class SaleReport(models.Model):
_inherit = "sale.report"
def _select_pos(self, fields=None):
if not fields:
fields = {}
fields['margin'] = ', SUM(l.price_subtotal - COALESCE(l.total_cost,0) / CASE COALESCE(pos.currency_rate, 0) WHEN 0 THEN 1.0 ELSE pos.currency_rate END) AS margin'
return super()._select_pos(fields)
| 34.928571 | 489 |
524 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Partners Geolocation',
'version': '2.1',
'category': 'Hidden/Tools',
'description': """
Partners Geolocation
========================
""",
'depends': ['base_setup'],
'data': [
'security/ir.model.access.csv',
'views/res_partner_views.xml',
'views/res_config_settings_views.xml',
'data/data.xml',
],
'installable': True,
'license': 'LGPL-3',
}
| 26.2 | 524 |
1,553 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests import TransactionCase
from odoo.exceptions import UserError
import odoo.tests
@odoo.tests.tagged('external', '-standard')
class TestGeoLocalize(TransactionCase):
def test_default_openstreetmap(self):
""" Test that openstreetmap localize service works. """
test_partner = self.env.ref('base.res_partner_2')
test_partner.geo_localize()
self.assertTrue(test_partner.partner_longitude)
self.assertTrue(test_partner.partner_latitude)
self.assertTrue(test_partner.date_localization)
# we don't check here that the localization is at right place
# but just that result is realistic float coordonates
self.assertTrue(float(test_partner.partner_longitude) != 0.0)
self.assertTrue(float(test_partner.partner_latitude) != 0.0)
def test_googlemap_without_api_key(self):
""" Without providing API key to google maps,
the service doesn't work."""
test_partner = self.env.ref('base.res_partner_address_4')
google_map = self.env.ref('base_geolocalize.geoprovider_google_map').id
self.env['ir.config_parameter'].set_param('base_geolocalize.geo_provider', google_map)
with self.assertRaises(UserError):
test_partner.geo_localize()
self.assertFalse(test_partner.partner_longitude)
self.assertFalse(test_partner.partner_latitude)
self.assertFalse(test_partner.date_localization)
| 44.371429 | 1,553 |
6,731 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import requests
import logging
from odoo import api, fields, models, tools, _
from odoo.exceptions import UserError
_logger = logging.getLogger(__name__)
class GeoProvider(models.Model):
_name = "base.geo_provider"
_description = "Geo Provider"
tech_name = fields.Char()
name = fields.Char()
class GeoCoder(models.AbstractModel):
"""
Abstract class used to call Geolocalization API and convert addresses
into GPS coordinates.
"""
_name = "base.geocoder"
_description = "Geo Coder"
@api.model
def _get_provider(self):
prov_id = self.env['ir.config_parameter'].sudo().get_param('base_geolocalize.geo_provider')
if prov_id:
provider = self.env['base.geo_provider'].browse(int(prov_id))
if not prov_id or not provider.exists():
provider = self.env['base.geo_provider'].search([], limit=1)
return provider
@api.model
def geo_query_address(self, street=None, zip=None, city=None, state=None, country=None):
""" Converts address fields into a valid string for querying
geolocation APIs.
:param street: street address
:param zip: zip code
:param city: city
:param state: state
:param country: country
:return: formatted string
"""
provider = self._get_provider().tech_name
if hasattr(self, '_geo_query_address_' + provider):
# Makes the transformation defined for provider
return getattr(self, '_geo_query_address_' + provider)(street, zip, city, state, country)
else:
# By default, join the non-empty parameters
return self._geo_query_address_default(street=street, zip=zip, city=city, state=state, country=country)
@api.model
def geo_find(self, addr, **kw):
"""Use a location provider API to convert an address string into a latitude, longitude tuple.
Here we use Openstreetmap Nominatim by default.
:param addr: Address string passed to API
:return: (latitude, longitude) or None if not found
"""
provider = self._get_provider().tech_name
try:
service = getattr(self, '_call_' + provider)
result = service(addr, **kw)
except AttributeError:
raise UserError(_(
'Provider %s is not implemented for geolocation service.'
) % provider)
except UserError:
raise
except Exception:
_logger.debug('Geolocalize call failed', exc_info=True)
result = None
return result
@api.model
def _call_openstreetmap(self, addr, **kw):
"""
Use Openstreemap Nominatim service to retrieve location
:return: (latitude, longitude) or None if not found
"""
if not addr:
_logger.info('invalid address given')
return None
url = 'https://nominatim.openstreetmap.org/search'
try:
headers = {'User-Agent': 'Odoo (http://www.odoo.com/contactus)'}
response = requests.get(url, headers=headers, params={'format': 'json', 'q': addr})
_logger.info('openstreetmap nominatim service called')
if response.status_code != 200:
_logger.warning('Request to openstreetmap failed.\nCode: %s\nContent: %s', response.status_code, response.content)
result = response.json()
except Exception as e:
self._raise_query_error(e)
geo = result[0]
return float(geo['lat']), float(geo['lon'])
@api.model
def _call_googlemap(self, addr, **kw):
""" Use google maps API. It won't work without a valid API key.
:return: (latitude, longitude) or None if not found
"""
apikey = self.env['ir.config_parameter'].sudo().get_param('base_geolocalize.google_map_api_key')
if not apikey:
raise UserError(_(
"API key for GeoCoding (Places) required.\n"
"Visit https://developers.google.com/maps/documentation/geocoding/get-api-key for more information."
))
url = "https://maps.googleapis.com/maps/api/geocode/json"
params = {'sensor': 'false', 'address': addr, 'key': apikey}
if kw.get('force_country'):
params['components'] = 'country:%s' % kw['force_country']
try:
result = requests.get(url, params).json()
except Exception as e:
self._raise_query_error(e)
try:
if result['status'] == 'ZERO_RESULTS':
return None
if result['status'] != 'OK':
_logger.debug('Invalid Gmaps call: %s - %s',
result['status'], result.get('error_message', ''))
error_msg = _('Unable to geolocate, received the error:\n%s'
'\n\nGoogle made this a paid feature.\n'
'You should first enable billing on your Google account.\n'
'Then, go to Developer Console, and enable the APIs:\n'
'Geocoding, Maps Static, Maps Javascript.\n') % result.get('error_message')
raise UserError(error_msg)
geo = result['results'][0]['geometry']['location']
return float(geo['lat']), float(geo['lng'])
except (KeyError, ValueError):
_logger.debug('Unexpected Gmaps API answer %s', result.get('error_message', ''))
return None
@api.model
def _geo_query_address_default(self, street=None, zip=None, city=None, state=None, country=None):
address_list = [
street,
("%s %s" % (zip or '', city or '')).strip(),
state,
country
]
address_list = [item for item in address_list if item]
return tools.ustr(', '.join(address_list))
@api.model
def _geo_query_address_googlemap(self, street=None, zip=None, city=None, state=None, country=None):
# put country qualifier in front, otherwise GMap gives wrong# results
# e.g. 'Congo, Democratic Republic of the' => 'Democratic Republic of the Congo'
if country and ',' in country and (
country.endswith(' of') or country.endswith(' of the')):
country = '{1} {0}'.format(*country.split(',', 1))
return self._geo_query_address_default(street=street, zip=zip, city=city, state=state, country=country)
def _raise_query_error(self, error):
raise UserError(_('Error with geolocation server:') + ' %s' % error)
| 42.06875 | 6,731 |
807 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
geoloc_provider_id = fields.Many2one(
'base.geo_provider',
string='API',
config_parameter='base_geolocalize.geo_provider',
default=lambda x: x.env['base.geocoder']._get_provider()
)
geoloc_provider_techname = fields.Char(related='geoloc_provider_id.tech_name', readonly=1)
geoloc_provider_googlemap_key = fields.Char(
string='Google Map API Key',
config_parameter='base_geolocalize.google_map_api_key',
help="Visit https://developers.google.com/maps/documentation/geocoding/get-api-key for more information."
)
| 38.428571 | 807 |
2,255 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
from odoo.tools import config
class ResPartner(models.Model):
_inherit = "res.partner"
date_localization = fields.Date(string='Geolocation Date')
def write(self, vals):
# Reset latitude/longitude in case we modify the address without
# updating the related geolocation fields
if any(field in vals for field in ['street', 'zip', 'city', 'state_id', 'country_id']) \
and not all('partner_%s' % field in vals for field in ['latitude', 'longitude']):
vals.update({
'partner_latitude': 0.0,
'partner_longitude': 0.0,
})
return super().write(vals)
@api.model
def _geo_localize(self, street='', zip='', city='', state='', country=''):
geo_obj = self.env['base.geocoder']
search = geo_obj.geo_query_address(street=street, zip=zip, city=city, state=state, country=country)
result = geo_obj.geo_find(search, force_country=country)
if result is None:
search = geo_obj.geo_query_address(city=city, state=state, country=country)
result = geo_obj.geo_find(search, force_country=country)
return result
def geo_localize(self):
# We need country names in English below
if not self._context.get('force_geo_localize') \
and (self._context.get('import_file') \
or any(config[key] for key in ['test_enable', 'test_file', 'init', 'update'])):
return False
for partner in self.with_context(lang='en_US'):
result = self._geo_localize(partner.street,
partner.zip,
partner.city,
partner.state_id.name,
partner.country_id.name)
if result:
partner.write({
'partner_latitude': result[0],
'partner_longitude': result[1],
'date_localization': fields.Date.context_today(partner)
})
return True
| 42.54717 | 2,255 |
650 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Landed Costs On MO',
'version': '1.0',
'summary': 'Landed Costs on Manufacturing Order',
'description': """
This module allows you to easily add extra costs on manufacturing order
and decide the split of these costs among their stock moves in order to
take them into account in your stock valuation.
""",
'depends': ['stock_landed_costs', 'mrp'],
'category': 'Manufacturing/Manufacturing',
'data': [
'views/stock_landed_cost_views.xml',
],
'auto_install': True,
'license': 'LGPL-3',
}
| 32.5 | 650 |
8,752 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.stock_account.tests.test_anglo_saxon_valuation_reconciliation_common import ValuationReconciliationTestCommon
from odoo.tests import tagged, Form
@tagged('post_install', '-at_install')
class TestStockLandedCostsMrp(ValuationReconciliationTestCommon):
@classmethod
def setUpClass(cls):
super(TestStockLandedCostsMrp, cls).setUpClass()
# References
cls.supplier_id = cls.env['res.partner'].create({'name': 'My Test Supplier'}).id
cls.customer_id = cls.env['res.partner'].create({'name': 'My Test Customer'}).id
cls.picking_type_in_id = cls.env.ref('stock.picking_type_in')
cls.picking_type_out_id = cls.env.ref('stock.picking_type_out')
cls.supplier_location_id = cls.env.ref('stock.stock_location_suppliers')
cls.stock_location_id = cls.company_data['default_warehouse'].lot_stock_id
cls.customer_location_id = cls.env.ref('stock.stock_location_customers')
cls.categ_all = cls.env.ref('product.product_category_all')
# Create product refrigerator & oven
cls.product_component1 = cls.env['product.product'].create({
'name': 'Component1',
'type': 'product',
'standard_price': 1.0,
'categ_id': cls.categ_all.id
})
cls.product_component2 = cls.env['product.product'].create({
'name': 'Component2',
'type': 'product',
'standard_price': 2.0,
'categ_id': cls.categ_all.id
})
cls.product_refrigerator = cls.env['product.product'].create({
'name': 'Refrigerator',
'type': 'product',
'categ_id': cls.categ_all.id
})
cls.uom_unit = cls.env.ref('uom.product_uom_unit')
cls.bom_refri = cls.env['mrp.bom'].create({
'product_id': cls.product_refrigerator.id,
'product_tmpl_id': cls.product_refrigerator.product_tmpl_id.id,
'product_uom_id': cls.uom_unit.id,
'product_qty': 1.0,
'type': 'normal',
})
cls.bom_refri_line1 = cls.env['mrp.bom.line'].create({
'bom_id': cls.bom_refri.id,
'product_id': cls.product_component1.id,
'product_qty': 3,
})
cls.bom_refri_line2 = cls.env['mrp.bom.line'].create({
'bom_id': cls.bom_refri.id,
'product_id': cls.product_component2.id,
'product_qty': 1,
})
# Warehouses
cls.warehouse_1 = cls.env['stock.warehouse'].create({
'name': 'Base Warehouse',
'reception_steps': 'one_step',
'delivery_steps': 'ship_only',
'code': 'BWH'})
cls.product_refrigerator.categ_id.property_cost_method = 'fifo'
cls.product_refrigerator.categ_id.property_valuation = 'real_time'
cls.product_refrigerator.categ_id.property_stock_account_input_categ_id = cls.company_data['default_account_stock_in']
cls.product_refrigerator.categ_id.property_stock_account_output_categ_id = cls.company_data['default_account_stock_out']
# Create service type product 1.Labour 2.Brokerage 3.Transportation 4.Packaging
cls.landed_cost = cls.env['product.product'].create({
'name': 'Landed Cost',
'type': 'service',
})
cls.allow_user = cls.env['res.users'].with_context({'no_reset_password': True}).create({
'name': "Adviser",
'login': "fm",
'email': "[email protected]",
'groups_id': [(6, 0, [cls.env.ref('account.group_account_manager').id, cls.env.ref('mrp.group_mrp_user').id, cls.env.ref('stock.group_stock_manager').id])]
})
def test_landed_cost_on_mrp(self):
# Initial inventory
quants = self.env['stock.quant'].with_context(inventory_mode=True).create({
'product_id': self.product_component1.id,
'inventory_quantity': 500,
'location_id': self.warehouse_1.lot_stock_id.id,
})
quants |= self.env['stock.quant'].with_context(inventory_mode=True).create({
'product_id': self.product_component2.id,
'inventory_quantity': 500,
'location_id': self.warehouse_1.lot_stock_id.id,
})
quants.action_apply_inventory()
man_order_form = Form(self.env['mrp.production'].with_user(self.allow_user))
man_order_form.product_id = self.product_refrigerator
man_order_form.bom_id = self.bom_refri
man_order_form.product_qty = 2.0
man_order = man_order_form.save()
self.assertEqual(man_order.state, 'draft', "Production order should be in draft state.")
man_order.action_confirm()
self.assertEqual(man_order.state, 'confirmed', "Production order should be in confirmed state.")
# check production move
production_move = man_order.move_finished_ids
self.assertEqual(production_move.product_id, self.product_refrigerator)
first_move = man_order.move_raw_ids.filtered(lambda move: move.product_id == self.product_component1)
self.assertEqual(first_move.product_qty, 6.0)
first_move = man_order.move_raw_ids.filtered(lambda move: move.product_id == self.product_component2)
self.assertEqual(first_move.product_qty, 2.0)
# produce product
mo_form = Form(man_order.with_user(self.allow_user))
mo_form.qty_producing = 2
man_order = mo_form.save()
man_order.button_mark_done()
landed_cost = Form(self.env['stock.landed.cost'].with_user(self.allow_user)).save()
landed_cost.target_model = 'manufacturing'
# Check domain of the views
self.assertTrue(man_order in self.env['mrp.production'].search([
('move_finished_ids.stock_valuation_layer_ids', '!=', False), ('company_id', '=', landed_cost.company_id.id)]))
landed_cost.mrp_production_ids = [(6, 0, [man_order.id])]
landed_cost.cost_lines = [(0, 0, {'product_id': self.landed_cost.id, 'price_unit': 5.0, 'split_method': 'equal'})]
landed_cost.button_validate()
self.assertEqual(landed_cost.state, 'done')
self.assertTrue(landed_cost.account_move_id)
# Link to one layer of product_refrigerator
self.assertEqual(len(landed_cost.stock_valuation_layer_ids), 1)
self.assertEqual(landed_cost.stock_valuation_layer_ids.product_id, self.product_refrigerator)
self.assertEqual(landed_cost.stock_valuation_layer_ids.value, 5.0)
def test_landed_cost_on_mrp_02(self):
"""
Test that a user who has manager access to stock can create and validate a landed cost linked
to a Manufacturing order without the need for MRP access
"""
# Create a user with only manager access to stock
stock_manager = self.env['res.users'].with_context({'no_reset_password': True}).create({
'name': "Stock Manager",
'login': "test",
'email': "[email protected]",
'groups_id': [(6, 0, [self.env.ref('stock.group_stock_manager').id])]
})
# Make some stock and reserve
self.env['stock.quant']._update_available_quantity(self.product_component1, self.warehouse_1.lot_stock_id, 10)
self.env['stock.quant']._update_available_quantity(self.product_component2, self.warehouse_1.lot_stock_id, 10)
# Create and confirm a MO with a user who has access to MRP
man_order_form = Form(self.env['mrp.production'].with_user(self.allow_user))
man_order_form.product_id = self.product_refrigerator
man_order_form.bom_id = self.bom_refri
man_order_form.product_qty = 1.0
man_order = man_order_form.save()
man_order.action_confirm()
# produce product
man_order_form.qty_producing = 1
man_order_form.save()
man_order.button_mark_done()
# Create the landed cost with the stock_manager user
landed_cost = Form(self.env['stock.landed.cost'].with_user(stock_manager)).save()
landed_cost.target_model = 'manufacturing'
# Check that the MO can be selected by the stock_manger user
self.assertTrue(man_order in self.env['mrp.production'].search([
('move_finished_ids.stock_valuation_layer_ids', '!=', False), ('company_id', '=', landed_cost.company_id.id)]))
landed_cost.mrp_production_ids = [(6, 0, [man_order.id])]
# Check that he can validate the landed cost without an access error
landed_cost.with_user(stock_manager).button_validate()
self.assertEqual(landed_cost.state, 'done')
| 49.168539 | 8,752 |
896 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, api
class StockLandedCost(models.Model):
_inherit = 'stock.landed.cost'
target_model = fields.Selection(selection_add=[
('manufacturing', "Manufacturing Orders")
], ondelete={'manufacturing': 'set default'})
mrp_production_ids = fields.Many2many(
'mrp.production', string='Manufacturing order',
copy=False, states={'done': [('readonly', True)]}, groups='stock.group_stock_manager')
@api.onchange('target_model')
def _onchange_target_model(self):
super()._onchange_target_model()
if self.target_model != 'manufacturing':
self.mrp_production_ids = False
def _get_targeted_move_ids(self):
return super()._get_targeted_move_ids() | self.mrp_production_ids.move_finished_ids
| 37.333333 | 896 |
26,028 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Web',
'category': 'Hidden',
'version': '1.0',
'description': """
Odoo Web core module.
========================
This module provides the core of the Odoo Web Client.
""",
'depends': ['base'],
'auto_install': True,
'data': [
'security/ir.model.access.csv',
'views/webclient_templates.xml',
'views/report_templates.xml',
'views/base_document_layout_views.xml',
'views/speedscope_template.xml',
'views/lazy_assets.xml',
'data/report_layout.xml',
],
'assets': {
# ---------------------------------------------------------------------
# MAIN BUNDLES
# ---------------------------------------------------------------------
# These are the bundles meant to be called via "t-call-assets" in
# regular XML templates.
#
# The convention to name bundles is as following:
# 1) the name of the first module defining the bundle
# 2) the prefix "assets_"
# 3) an arbitrary name, relevant to the content of the bundle.
#
# Exemples:
# > web.assets_common = assets common to both frontend and backend clients.
# > web_editor.assets_wysiwyg = assets needed by components defined in the "web_editor" module.
'web.assets_qweb': [
'web/static/src/**/*.xml',
('remove', 'web/static/src/legacy/**/*.xml'),
'web/static/src/legacy/xml/base.xml',
'web/static/src/legacy/xml/control_panel.xml',
'web/static/src/legacy/xml/chart.xml',
'web/static/src/legacy/xml/fields.xml',
'web/static/src/legacy/xml/file_upload_progress_bar.xml',
'web/static/src/legacy/xml/file_upload_progress_card.xml',
'web/static/src/legacy/xml/kanban.xml',
'web/static/src/legacy/xml/pivot.xml',
'web/static/src/legacy/xml/report.xml',
'web/static/src/legacy/xml/web_calendar.xml',
'web/static/src/legacy/xml/graph.xml',
'web/static/src/legacy/xml/search_panel.xml',
'web/static/src/legacy/xml/week_days.xml',
],
'web.assets_common_minimal': [
'web/static/lib/es6-promise/es6-promise-polyfill.js',
'web/static/src/legacy/js/promise_extension.js',
'web/static/src/boot.js',
'web/static/src/session.js',
'web/static/src/legacy/js/core/cookie_utils.js',
'web/static/src/legacy/js/core/menu.js',
],
'web.assets_common': [
('include', 'web._assets_helpers'),
'web/static/lib/bootstrap/scss/_variables.scss',
('include', 'web._assets_common_styles'),
('include', 'web.assets_common_minimal'),
('include', 'web._assets_common_scripts'),
],
'web.assets_common_lazy': [
('include', 'web.assets_common'),
# Remove assets_common_minimal
('remove', 'web/static/lib/es6-promise/es6-promise-polyfill.js'),
('remove', 'web/static/src/legacy/js/promise_extension.js'),
('remove', 'web/static/src/boot.js'),
('remove', 'web/static/src/session.js'),
('remove', 'web/static/src/legacy/js/core/cookie_utils.js'),
('remove', 'web/static/src/legacy/js/core/menu.js'),
],
'web.assets_backend': [
('include', 'web._assets_helpers'),
('include', 'web._assets_backend_helpers'),
'web/static/lib/bootstrap/scss/_variables.scss',
('include', 'web._assets_bootstrap'),
'base/static/src/css/modules.css',
'base/static/src/js/res_config_settings.js',
'web/static/src/core/utils/transitions.scss',
'web/static/src/core/**/*',
'web/static/src/fields/**/*',
'web/static/src/search/**/*',
'web/static/src/views/**/*',
'web/static/src/webclient/**/*',
('remove', 'web/static/src/webclient/clickbot/clickbot.js'), # lazy loaded
'web/static/src/env.js',
'web/static/lib/jquery.scrollTo/jquery.scrollTo.js',
'web/static/lib/luxon/luxon.js',
'web/static/lib/py.js/lib/py.js',
'web/static/lib/py.js/lib/py_extras.js',
'web/static/lib/jquery.ba-bbq/jquery.ba-bbq.js',
'web/static/src/legacy/scss/domain_selector.scss',
'web/static/src/legacy/scss/model_field_selector.scss',
'web/static/src/legacy/scss/progress_bar.scss',
'web/static/src/legacy/scss/dropdown.scss',
'web/static/src/legacy/scss/tooltip.scss',
'web/static/src/legacy/scss/switch_company_menu.scss',
'web/static/src/legacy/scss/ace.scss',
'web/static/src/legacy/scss/fields.scss',
'web/static/src/legacy/scss/file_upload.scss',
'web/static/src/legacy/scss/views.scss',
'web/static/src/legacy/scss/form_view.scss',
'web/static/src/legacy/scss/list_view.scss',
'web/static/src/legacy/scss/kanban_dashboard.scss',
'web/static/src/legacy/scss/kanban_examples_dialog.scss',
'web/static/src/legacy/scss/kanban_column_progressbar.scss',
'web/static/src/legacy/scss/kanban_view.scss',
'web/static/src/legacy/scss/web_calendar.scss',
'web/static/src/legacy/scss/data_export.scss',
'base/static/src/scss/onboarding.scss',
'web/static/src/legacy/scss/attachment_preview.scss',
'web/static/src/legacy/scss/base_document_layout.scss',
'web/static/src/legacy/scss/special_fields.scss',
'web/static/src/legacy/scss/ribbon.scss',
'web/static/src/legacy/scss/base_settings.scss',
'web/static/src/legacy/scss/report_backend.scss',
'web/static/src/legacy/scss/dropdown_extra.scss',
'web/static/src/legacy/scss/fields_extra.scss',
'web/static/src/legacy/scss/form_view_extra.scss',
'web/static/src/legacy/scss/list_view_extra.scss',
'web/static/src/legacy/scss/profiling_qweb_view.scss',
'web/static/src/legacy/action_adapters.js',
'web/static/src/legacy/debug_manager.js',
'web/static/src/legacy/legacy_service_provider.js',
'web/static/src/legacy/legacy_client_actions.js',
'web/static/src/legacy/legacy_dialog.js',
'web/static/src/legacy/legacy_views.js',
'web/static/src/legacy/legacy_promise_error_handler.js',
'web/static/src/legacy/legacy_rpc_error_handler.js',
'web/static/src/legacy/root_widget.js',
'web/static/src/legacy/systray_menu.js',
'web/static/src/legacy/systray_menu_item.js',
'web/static/src/legacy/backend_utils.js',
'web/static/src/legacy/utils.js',
'web/static/src/legacy/web_client.js',
'web/static/src/legacy/js/_deprecated/*',
'web/static/src/legacy/js/chrome/*',
'web/static/src/legacy/js/components/*',
'web/static/src/legacy/js/control_panel/*',
'web/static/src/legacy/js/core/domain.js',
'web/static/src/legacy/js/core/mvc.js',
'web/static/src/legacy/js/core/py_utils.js',
'web/static/src/legacy/js/core/context.js',
'web/static/src/legacy/js/core/data_comparison_utils.js',
'web/static/src/legacy/js/core/misc.js',
'web/static/src/legacy/js/core/profiling_qweb_view.js',
'web/static/src/legacy/js/fields/*',
'web/static/src/legacy/js/report/utils.js',
'web/static/src/legacy/js/report/client_action.js',
'web/static/src/legacy/js/services/data_manager.js',
'web/static/src/legacy/js/services/report_service.js',
'web/static/src/legacy/js/services/session.js',
'web/static/src/legacy/js/tools/tools.js',
'web/static/src/legacy/js/views/**/*',
'web/static/src/legacy/js/widgets/change_password.js',
'web/static/src/legacy/js/widgets/data_export.js',
'web/static/src/legacy/js/widgets/date_picker.js',
'web/static/src/legacy/js/widgets/domain_selector_dialog.js',
'web/static/src/legacy/js/widgets/domain_selector.js',
'web/static/src/legacy/js/widgets/iframe_widget.js',
'web/static/src/legacy/js/widgets/model_field_selector.js',
'web/static/src/legacy/js/widgets/pie_chart.js',
'web/static/src/legacy/js/widgets/ribbon.js',
'web/static/src/legacy/js/widgets/week_days.js',
'web/static/src/legacy/js/widgets/signature.js',
'web/static/src/legacy/js/widgets/attach_document.js',
'web/static/src/legacy/js/apps.js',
'web/static/src/legacy/js/env.js',
'web/static/src/legacy/js/model.js',
'web/static/src/legacy/js/owl_compatibility.js',
("remove", 'web/static/src/legacy/js/views/graph/**/*'),
("remove", 'web/static/src/legacy/js/views/pivot/**/*'),
],
"web.assets_backend_legacy_lazy": [
("include", "web._assets_helpers"),
('include', 'web._assets_backend_helpers'),
'web/static/lib/bootstrap/scss/_variables.scss',
# graph
'web/static/src/legacy/js/views/graph/**/*',
'web/static/src/legacy/scss/graph_view.scss',
# pivot
'web/static/src/legacy/js/views/pivot/**/*',
'web/static/src/legacy/scss/pivot_view.scss',
],
'web.assets_frontend_minimal': [
'web/static/src/legacy/js/public/lazyloader.js',
],
'web.assets_frontend': [
('include', 'web._assets_helpers'),
('include', 'web._assets_frontend_helpers'),
'web/static/lib/bootstrap/scss/_variables.scss',
'web/static/lib/luxon/luxon.js',
('include', 'web._assets_bootstrap'),
'web/static/src/env.js',
'web/static/src/core/utils/transitions.scss', # included early because used by other files
'web/static/src/core/**/*',
('remove', 'web/static/src/core/commands/**/*'),
('remove', 'web/static/src/core/debug/debug_menu.js'),
'web/static/src/public/error_notifications.js',
'web/static/src/legacy/scss/base_frontend.scss',
'web/static/src/legacy/scss/lazyloader.scss',
'web/static/src/legacy/scss/navbar_mobile.scss',
('include', 'web.assets_frontend_minimal'),
'web/static/src/legacy/utils.js',
'web/static/src/legacy/js/owl_compatibility.js',
'web/static/src/legacy/js/services/session.js',
'web/static/src/legacy/js/public/public_env.js',
'web/static/src/legacy/js/public/public_root.js',
'web/static/src/legacy/js/public/public_root_instance.js',
'web/static/src/legacy/js/public/public_widget.js',
'web/static/src/legacy/legacy_promise_error_handler.js',
'web/static/src/legacy/legacy_rpc_error_handler.js',
('include', 'web.frontend_legacy'),
],
'web.assets_frontend_lazy': [
('include', 'web.assets_frontend'),
# Remove assets_frontend_minimal
('remove', 'web/static/src/legacy/js/public/lazyloader.js')
],
'web.assets_backend_prod_only': [
'web/static/src/main.js',
'web/static/src/start.js',
'web/static/src/legacy/legacy_setup.js',
],
# Optional Bundle for PDFJS lib
# Since PDFJS is quite huge (80000≈ lines), please only load it when it is necessary.
# For now, it is only use to display the PDF slide Viewer during an embed.
# Bundlized, the size is reduced to 5300≈ lines.
'web.pdf_js_lib': [
'web/static/lib/pdfjs/build/pdf.js',
'web/static/lib/pdfjs/build/pdf.worker.js',
],
'web.report_assets_common': [
('include', 'web._assets_helpers'),
'web/static/src/legacy/scss/bootstrap_overridden_report.scss',
'web/static/lib/bootstrap/scss/_variables.scss',
('include', 'web._assets_bootstrap'),
'base/static/src/css/description.css',
'web/static/lib/fontawesome/css/font-awesome.css',
'web/static/fonts/fonts.scss',
'web/static/src/legacy/scss/report.scss',
'web/static/src/legacy/scss/layout_standard.scss',
'web/static/src/legacy/scss/layout_background.scss',
'web/static/src/legacy/scss/layout_boxed.scss',
'web/static/src/legacy/scss/layout_clean.scss',
'/web/static/src/legacy/scss/asset_styles_company_report.scss',
'web/static/src/legacy/js/services/session.js',
'web/static/src/legacy/js/public/public_root.js',
'web/static/src/legacy/js/public/public_root_instance.js',
'web/static/src/legacy/js/public/public_widget.js',
'web/static/src/legacy/js/report/utils.js',
'web/static/src/legacy/js/report/report.js',
],
'web.report_assets_pdf': [
'web/static/src/legacy/js/report/reset.min.css',
],
# ---------------------------------------------------------------------
# SUB BUNDLES
# ---------------------------------------------------------------------
# These bundles can be used by main bundles but are not supposed to be
# called directly from XML templates.
#
# Their naming conventions are similar to those of the main bundles,
# with the addition of a prefixed underscore to reflect the "private"
# aspect.
#
# Exemples:
# > web._assets_helpers = define assets needed in most main bundles
'web._assets_primary_variables': [
'web/static/src/legacy/scss/primary_variables.scss',
],
'web._assets_secondary_variables': [
'web/static/src/legacy/scss/secondary_variables.scss',
],
'web._assets_helpers': [
'web/static/lib/bootstrap/scss/_functions.scss',
'web/static/lib/bootstrap/scss/_mixins.scss',
'web/static/src/legacy/scss/bs_mixins_overrides.scss',
'web/static/src/legacy/scss/utils.scss',
('include', 'web._assets_primary_variables'),
('include', 'web._assets_secondary_variables'),
],
'web._assets_bootstrap': [
'web/static/src/legacy/scss/import_bootstrap.scss',
'web/static/src/legacy/scss/bootstrap_review.scss',
],
'web._assets_backend_helpers': [
'web/static/src/legacy/scss/bootstrap_overridden.scss',
'web/static/src/legacy/scss/bs_mixins_overrides_backend.scss',
],
'web._assets_frontend_helpers': [
'web/static/src/legacy/scss/bootstrap_overridden_frontend.scss',
],
'web._assets_common_styles': [
'web/static/src/legacy/scss/tempusdominus_overridden.scss',
'web/static/lib/tempusdominus/tempusdominus.scss',
'web/static/lib/jquery.ui/jquery-ui.css',
'web/static/lib/fontawesome/css/font-awesome.css',
'web/static/lib/select2/select2.css',
'web/static/lib/select2-bootstrap-css/select2-bootstrap.css',
'web/static/lib/daterangepicker/daterangepicker.css',
'web/static/fonts/fonts.scss',
'web/static/src/legacy/scss/ui.scss',
'web/static/src/legacy/scss/ui_extra.scss',
'web/static/src/legacy/scss/navbar.scss',
'web/static/src/legacy/scss/mimetypes.scss',
'web/static/src/legacy/scss/modal.scss',
'web/static/src/legacy/scss/animation.scss',
'web/static/src/legacy/scss/datepicker.scss',
'web/static/src/legacy/scss/daterangepicker.scss',
'web/static/src/legacy/scss/banner.scss',
'web/static/src/legacy/scss/colorpicker.scss',
'web/static/src/legacy/scss/popover.scss',
'web/static/src/legacy/scss/translation_dialog.scss',
'web/static/src/legacy/scss/keyboard.scss',
'web/static/src/legacy/scss/name_and_signature.scss',
'web/static/src/legacy/scss/web.zoomodoo.scss',
'web/static/src/legacy/scss/color_picker.scss',
'web/static/src/legacy/scss/fontawesome_overridden.scss',
],
'web._assets_common_scripts': [
'web/static/lib/underscore/underscore.js',
'web/static/lib/underscore.string/lib/underscore.string.js',
'web/static/lib/moment/moment.js',
'web/static/lib/owl/owl.js',
'web/static/src/legacy/js/component_extension.js',
'web/static/lib/jquery/jquery.js',
'web/static/lib/jquery.ui/jquery-ui.js',
'web/static/lib/jquery/jquery.browser.js',
'web/static/lib/jquery.blockUI/jquery.blockUI.js',
'web/static/lib/jquery.hotkeys/jquery.hotkeys.js',
'web/static/lib/jquery.placeholder/jquery.placeholder.js',
'web/static/lib/jquery.form/jquery.form.js',
'web/static/lib/jquery.ba-bbq/jquery.ba-bbq.js',
'web/static/lib/jquery.mjs.nestedSortable/jquery.mjs.nestedSortable.js',
'web/static/lib/popper/popper.js',
'web/static/lib/bootstrap/js/index.js',
'web/static/lib/bootstrap/js/util.js',
'web/static/lib/bootstrap/js/alert.js',
'web/static/lib/bootstrap/js/button.js',
'web/static/lib/bootstrap/js/carousel.js',
'web/static/lib/bootstrap/js/collapse.js',
'web/static/lib/bootstrap/js/dropdown.js',
'web/static/lib/bootstrap/js/modal.js',
'web/static/lib/bootstrap/js/tooltip.js',
'web/static/lib/bootstrap/js/popover.js',
'web/static/lib/bootstrap/js/scrollspy.js',
'web/static/lib/bootstrap/js/tab.js',
'web/static/lib/bootstrap/js/toast.js',
'web/static/lib/tempusdominus/tempusdominus.js',
'web/static/lib/select2/select2.js',
'web/static/lib/clipboard/clipboard.js',
'web/static/lib/jSignature/jSignatureCustom.js',
'web/static/lib/qweb/qweb2.js',
'web/static/src/legacy/js/libs/autocomplete.js',
'web/static/src/legacy/js/libs/bootstrap.js',
'web/static/src/legacy/js/libs/content-disposition.js',
'web/static/src/legacy/js/libs/download.js',
'web/static/src/legacy/js/libs/fullcalendar.js',
'web/static/src/legacy/js/libs/jquery.js',
'web/static/src/legacy/js/libs/moment.js',
'web/static/src/legacy/js/libs/underscore.js',
'web/static/src/legacy/js/libs/pdfjs.js',
'web/static/src/legacy/js/libs/popper.js',
'web/static/src/legacy/js/libs/zoomodoo.js',
'web/static/src/legacy/js/libs/jSignatureCustom.js',
'web/static/src/legacy/js/core/abstract_service.js',
'web/static/src/legacy/js/core/abstract_storage_service.js',
'web/static/src/legacy/js/core/ajax.js',
'web/static/src/legacy/js/core/browser_detection.js',
'web/static/src/legacy/js/core/bus.js',
'web/static/src/legacy/js/core/custom_hooks.js',
'web/static/src/legacy/js/core/class.js',
'web/static/src/legacy/js/core/collections.js',
'web/static/src/legacy/js/core/concurrency.js',
'web/static/src/legacy/js/core/dialog.js',
'web/static/src/legacy/js/core/owl_dialog.js',
'web/static/src/legacy/js/core/popover.js',
'web/static/src/legacy/js/core/dom.js',
'web/static/src/legacy/js/core/local_storage.js',
'web/static/src/legacy/js/core/mixins.js',
'web/static/src/legacy/js/core/qweb.js',
'web/static/src/legacy/js/core/ram_storage.js',
'web/static/src/legacy/js/core/registry.js',
'web/static/src/legacy/js/core/rpc.js',
'web/static/src/legacy/js/core/service_mixins.js',
'web/static/src/legacy/js/core/session.js',
'web/static/src/legacy/js/core/session_storage.js',
'web/static/src/legacy/js/core/time.js',
'web/static/src/legacy/js/core/translation.js',
'web/static/src/legacy/js/core/utils.js',
'web/static/src/legacy/js/core/widget.js',
'web/static/src/legacy/js/services/ajax_service.js',
'web/static/src/legacy/js/services/config.js',
'web/static/src/legacy/js/services/core.js',
'web/static/src/legacy/js/services/local_storage_service.js',
'web/static/src/legacy/js/core/error_utils.js',
'web/static/src/legacy/js/services/session_storage_service.js',
'web/static/src/legacy/js/common_env.js',
'web/static/src/legacy/js/widgets/name_and_signature.js',
'web/static/src/legacy/js/core/smooth_scroll_on_drag.js',
'web/static/src/legacy/js/widgets/colorpicker.js',
'web/static/src/legacy/js/widgets/translation_dialog.js',
],
# Used during the transition of the web architecture
'web.frontend_legacy': [
'web/static/src/legacy/frontend/**/*',
],
# ---------------------------------------------------------------------
# TESTS BUNDLES
# ---------------------------------------------------------------------
'web.assets_tests': [
# No tours are defined in web, but the bundle "assets_tests" is
# first called in web.
'web/static/tests/legacy/helpers/test_utils_file.js'
],
'web.tests_assets': [
'web/static/lib/qunit/qunit-2.9.1.css',
'web/static/lib/qunit/qunit-2.9.1.js',
'web/static/tests/legacy/helpers/**/*',
('remove', 'web/static/tests/legacy/helpers/test_utils_tests.js'),
'web/static/tests/legacy/legacy_setup.js',
'web/static/lib/fullcalendar/core/main.css',
'web/static/lib/fullcalendar/daygrid/main.css',
'web/static/lib/fullcalendar/timegrid/main.css',
'web/static/lib/fullcalendar/list/main.css',
'web/static/lib/fullcalendar/core/main.js',
'web/static/lib/fullcalendar/moment/main.js',
'web/static/lib/fullcalendar/interaction/main.js',
'web/static/lib/fullcalendar/daygrid/main.js',
'web/static/lib/fullcalendar/timegrid/main.js',
'web/static/lib/fullcalendar/list/main.js',
'web/static/lib/ace/ace.js',
'web/static/lib/ace/javascript_highlight_rules.js',
'web/static/lib/ace/mode-python.js',
'web/static/lib/ace/mode-xml.js',
'web/static/lib/ace/mode-js.js',
'web/static/lib/nearest/jquery.nearest.js',
'web/static/lib/daterangepicker/daterangepicker.js',
'web/static/lib/stacktracejs/stacktrace.js',
# 'web/static/tests/legacy/main_tests.js',
'web/static/tests/helpers/**/*.js',
'web/static/tests/search/helpers.js',
'web/static/tests/webclient/**/helpers.js',
'web/static/tests/qunit.js',
'web/static/tests/main.js',
'web/static/tests/mock_server_tests.js',
'web/static/tests/setup.js',
# These 2 lines below are taken from web.assets_frontend
# They're required for the web.frontend_legacy to work properly
# It is expected to add other lines coming from the web.assets_frontend
# if we need to add more and more legacy stuff that would require other scss or js.
('include', 'web._assets_helpers'),
'web/static/lib/bootstrap/scss/_variables.scss',
('include', 'web.frontend_legacy'),
("include", "web.assets_backend_legacy_lazy"),
],
'web.qunit_suite_tests': [
'base/static/tests/base_settings_tests.js',
'web/static/tests/env_tests.js',
'web/static/tests/core/**/*.js',
'web/static/tests/fields/**/*.js',
'web/static/tests/search/**/*.js',
('remove', 'web/static/tests/search/helpers.js'),
'web/static/tests/views/**/*.js',
'web/static/tests/webclient/**/*.js',
('remove', 'web/static/tests/webclient/**/helpers.js'),
'web/static/tests/legacy/**/*.js',
('remove', 'web/static/tests/legacy/**/*_mobile_tests.js'),
('remove', 'web/static/tests/legacy/**/*_benchmarks.js'),
('remove', 'web/static/tests/legacy/helpers/**/*.js'),
('remove', 'web/static/tests/legacy/legacy_setup.js'),
('include', 'web.frontend_legacy_tests'),
],
'web.qunit_mobile_suite_tests': [
'web/static/lib/jquery.touchSwipe/jquery.touchSwipe.js',
'web/static/tests/legacy/fields/basic_fields_mobile_tests.js',
'web/static/tests/legacy/fields/relational_fields_mobile_tests.js',
'web/static/tests/legacy/components/dropdown_menu_mobile_tests.js',
],
# Used during the transition of the web architecture
'web.frontend_legacy_tests': [
'web/static/tests/legacy/frontend/*.js',
],
},
'bootstrap': True, # load translations for login screen,
'license': 'LGPL-3',
}
| 49.009416 | 26,024 |
2,729 |
py
|
PYTHON
|
15.0
|
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import time
import odoo
import odoo.tests
from odoo.modules.module import read_manifest
from odoo.tools import mute_logger
_logger = logging.getLogger(__name__)
class TestAssetsGenerateTimeCommon(odoo.tests.TransactionCase):
def generate_bundles(self):
bundles = set()
installed_module_names = self.env['ir.module.module'].search([('state', '=', 'installed')]).mapped('name')
for addon_path in odoo.addons.__path__:
for addon in installed_module_names:
manifest = read_manifest(addon_path, addon) or {}
assets = manifest.get('assets')
if assets:
bundles |= set(assets.keys())
for bundle in bundles:
with mute_logger('odoo.addons.base.models.assetsbundle'):
for assets_type in 'css', 'js':
try:
start_t = time.time()
css = assets_type == 'css'
js = assets_type == 'js'
self.env['ir.qweb']._generate_asset_nodes(bundle, css=css, js=js)
yield (f'{bundle}.{assets_type}', time.time() - start_t)
except ValueError:
_logger.info('Error detected while generating bundle %r %s', bundle, assets_type)
@odoo.tests.tagged('post_install', '-at_install')
class TestLogsAssetsGenerateTime(TestAssetsGenerateTimeCommon):
def test_logs_assets_generate_time(self):
"""
The purpose of this test is to monitor the time of assets bundle generation.
This is not meant to test the generation failure, hence the try/except and the mute logger.
For example, 'web.assets_qweb' is contains only static xml.
"""
for bundle, duration in self.generate_bundles():
_logger.info('Bundle %r generated in %.2fs', bundle, duration)
@odoo.tests.tagged('post_install', '-at_install', '-standard', 'bundle_generation')
class TestAssetsGenerateTime(TestAssetsGenerateTimeCommon):
"""
This test is meant to be run nightly to ensure bundle generation does not exceed
a low threshold
"""
def test_assets_generate_time(self):
thresholds = {
'web.qunit_suite_tests.js': 3.6,
'project.webclient.js': 2.5,
'point_of_sale.pos_assets_backend.js': 2.5,
'web.assets_backend.js': 2.5,
}
for bundle, duration in self.generate_bundles():
threshold = thresholds.get(bundle, 2)
self.assertLess(duration, threshold, "Bundle %r took more than %s sec" % (bundle, threshold))
| 39.550725 | 2,729 |
41,844 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import random
import re
from unittest.mock import patch
import textwrap
from datetime import datetime, timedelta
from lxml import etree
import logging
from odoo.tests.common import BaseCase, HttpCase, tagged
from odoo.tools import topological_sort
from odoo.addons.web.controllers.main import HomeStaticTemplateHelpers
_logger = logging.getLogger(__name__)
def sample(population):
return random.sample(
population,
random.randint(0, min(len(population), 5)))
class TestModulesLoading(BaseCase):
def setUp(self):
self.mods = [str(i) for i in range(1000)]
def test_topological_sort(self):
random.shuffle(self.mods)
modules = [
(k, sample(self.mods[:i]))
for i, k in enumerate(self.mods)]
random.shuffle(modules)
ms = dict(modules)
seen = set()
sorted_modules = topological_sort(ms)
for module in sorted_modules:
deps = ms[module]
self.assertGreaterEqual(
seen, set(deps),
'Module %s (index %d), ' \
'missing dependencies %s from loaded modules %s' % (
module, sorted_modules.index(module), deps, seen
))
seen.add(module)
class TestStaticInheritanceCommon(BaseCase):
def setUp(self):
super(TestStaticInheritanceCommon, self).setUp()
# output is "manifest_glob" return
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
('module_2_file_1', 'module_2', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<t t-name="template_1_2">
<div>And I grew strong</div>
</t>
</templates>
""",
'module_2_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_2_1" t-inherit="module_1.template_1_1" t-inherit-mode="primary">
<xpath expr="//div[1]" position="after">
<div>I was petrified</div>
</xpath>
<xpath expr="//div[2]" position="after">
<div>But then I spent so many nights thinking how you did me wrong</div>
</xpath>
</form>
<div t-name="template_2_2">
<div>And I learned how to get along</div>
</div>
<form t-inherit="module_1.template_1_2" t-inherit-mode="extension">
<xpath expr="//div[1]" position="after">
<div>And I learned how to get along</div>
</xpath>
</form>
</templates>
""",
}
self._set_patchers()
self._toggle_patchers('start')
self._reg_replace_ws = r"\s|\t"
def tearDown(self):
super(TestStaticInheritanceCommon, self).tearDown()
self._toggle_patchers('stop')
# Custom Assert
def assertXMLEqual(self, output, expected):
self.assertTrue(output)
self.assertTrue(expected)
output = textwrap.dedent(output.decode('UTF-8')).strip()
output = re.sub(self._reg_replace_ws, '', output)
expected = textwrap.dedent(expected.decode('UTF-8')).strip()
expected = re.sub(self._reg_replace_ws, '', expected)
self.assertEqual(output, expected)
# Private methods
def _get_module_names(self):
return ','.join([asset_path[1] for asset_path in self.asset_paths])
def _set_patchers(self):
def _patched_for_get_asset_paths(*args, **kwargs):
# Ordered by module
return self.asset_paths
def _patch_for_read_addon_file(*args, **kwargs):
return self.template_files[args[1]]
self.patchers = [
patch.object(HomeStaticTemplateHelpers, '_get_asset_paths', _patched_for_get_asset_paths),
patch.object(HomeStaticTemplateHelpers, '_read_addon_file', _patch_for_read_addon_file),
]
def _toggle_patchers(self, mode):
self.assertTrue(mode in ('start', 'stop'))
for p in self.patchers:
getattr(p, mode)()
@tagged('static_templates')
class TestStaticInheritance(TestStaticInheritanceCommon):
# Actual test cases
def test_static_inheritance_01(self):
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<t t-name="template_1_2">
<div>And I grew strong</div>
<div>And I learned how to get along</div>
</t>
<form t-name="template_2_1" random-attr="gloria">
<div>At first I was afraid</div>
<div>I was petrified</div>
<div>But then I spent so many nights thinking how you did me wrong</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<div t-name="template_2_2">
<div>And I learned how to get along</div>
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_static_inheritance_02(self):
self.template_files = {
'module_1_file_1': b'''
<templates id="template" xml:space="preserve">
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_2" t-inherit="template_1_1" added="true">
<xpath expr="//div[1]" position="after">
<div>I was petrified</div>
</xpath>
</form>
</templates>
'''
}
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_2" random-attr="gloria" added="true">
<div>At first I was afraid</div>
<div>I was petrified</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_static_inheritance_03(self):
self.maxDiff = None
self.template_files = {
'module_1_file_1': b'''
<templates id="template" xml:space="preserve">
<form t-name="template_1_1">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_2" t-inherit="template_1_1" added="true">
<xpath expr="//div[1]" position="after">
<div>I was petrified</div>
</xpath>
</form>
<form t-name="template_1_3" t-inherit="template_1_2" added="false" other="here">
<xpath expr="//div[2]" position="replace"/>
</form>
</templates>
'''
}
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_2" added="true">
<div>At first I was afraid</div>
<div>I was petrified</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_3" added="false" other="here">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_static_inheritance_in_same_module(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
('module_1_file_2', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b'''
<templates id="template" xml:space="preserve">
<form t-name="template_1_1">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
</templates>
''',
'module_1_file_2': b'''
<templates id="template" xml:space="preserve">
<form t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="primary">
<xpath expr="//div[1]" position="after">
<div>I was petrified</div>
</xpath>
</form>
</templates>
'''
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_2">
<div>At first I was afraid</div>
<div>I was petrified</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_static_inheritance_in_same_file(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b'''
<templates id="template" xml:space="preserve">
<form t-name="template_1_1">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="primary">
<xpath expr="//div[1]" position="after">
<div>I was petrified</div>
</xpath>
</form>
</templates>
''',
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_2">
<div>At first I was afraid</div>
<div>I was petrified</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_static_inherit_extended_template(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b'''
<templates id="template" xml:space="preserve">
<form t-name="template_1_1">
<div>At first I was afraid</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="extension">
<xpath expr="//div[1]" position="after">
<div>I was petrified</div>
</xpath>
</form>
<form t-name="template_1_3" t-inherit="template_1_1" t-inherit-mode="primary">
<xpath expr="//div[3]" position="after">
<div>But then I spent so many nights thinking how you did me wrong</div>
</xpath>
</form>
</templates>
''',
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1">
<div>At first I was afraid</div>
<div>I was petrified</div>
<div>Kept thinking I could never live without you by my side</div>
</form>
<form t-name="template_1_3">
<div>At first I was afraid</div>
<div>I was petrified</div>
<div>Kept thinking I could never live without you by my side</div>
<div>But then I spent so many nights thinking how you did me wrong</div>
</form>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_sibling_extension(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
('module_2_file_1', 'module_2', 'bundle_1'),
('module_3_file_1', 'module_3', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b'''
<templates id="template" xml:space="preserve">
<form t-name="template_1_1">
<div>I am a man of constant sorrow</div>
<div>I've seen trouble all my days</div>
</form>
</templates>
''',
'module_2_file_1': b'''
<templates id="template" xml:space="preserve">
<form t-name="template_2_1" t-inherit="module_1.template_1_1" t-inherit-mode="extension">
<xpath expr="//div[1]" position="after">
<div>In constant sorrow all through his days</div>
</xpath>
</form>
</templates>
''',
'module_3_file_1': b'''
<templates id="template" xml:space="preserve">
<form t-name="template_3_1" t-inherit="module_1.template_1_1" t-inherit-mode="extension">
<xpath expr="//div[2]" position="after">
<div>Oh Brother !</div>
</xpath>
</form>
</templates>
'''
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1">
<div>I am a man of constant sorrow</div>
<div>In constant sorrow all through his days</div>
<div>Oh Brother !</div>
<div>I've seen trouble all my days</div>
</form>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_static_misordered_modules(self):
self.asset_paths.reverse()
with self.assertRaises(ValueError) as ve:
HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
self.assertEqual(
str(ve.exception),
'Module module_1 not loaded or inexistent, or templates of addon being loaded (module_2) are misordered'
)
def test_static_misordered_templates(self):
self.template_files['module_2_file_1'] = b"""
<templates id="template" xml:space="preserve">
<form t-name="template_2_1" t-inherit="module_2.template_2_2" t-inherit-mode="primary">
<xpath expr="//div[1]" position="after">
<div>I was petrified</div>
</xpath>
</form>
<div t-name="template_2_2">
<div>And I learned how to get along</div>
</div>
</templates>
"""
with self.assertRaises(ValueError) as ve:
HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
self.assertEqual(
str(ve.exception),
'No template found to inherit from. Module module_2 and template name template_2_2'
)
def test_replace_in_debug_mode(self):
"""
Replacing a template's meta definition in place doesn't keep the original attrs of the template
"""
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<t t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="extension">
<xpath expr="." position="replace">
<div overriden-attr="overriden">And I grew strong</div>
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<div overriden-attr="overriden" t-name="template_1_1">
And I grew strong
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_replace_in_debug_mode2(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<t t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="extension">
<xpath expr="." position="replace">
<div>
And I grew strong
<p>And I learned how to get along</p>
And so you're back
</div>
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<div t-name="template_1_1">
And I grew strong
<p>And I learned how to get along</p>
And so you're back
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_replace_in_debug_mode3(self):
"""Text outside of a div which will replace a whole template
becomes outside of the template
This doesn't mean anything in terms of the business of template inheritance
But it is in the XPATH specs"""
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<t t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="extension">
<xpath expr="." position="replace">
<div>
And I grew strong
<p>And I learned how to get along</p>
</div>
And so you're back
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<div t-name="template_1_1">
And I grew strong
<p>And I learned how to get along</p>
</div>
And so you're back
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_replace_root_node_tag(self):
"""
Root node IS targeted by //NODE_TAG in xpath
"""
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
<form>Inner Form</form>
</form>
<t t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="extension">
<xpath expr="//form" position="replace">
<div>
Form replacer
</div>
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<div t-name="template_1_1">
Form replacer
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_replace_root_node_tag_in_primary(self):
"""
Root node IS targeted by //NODE_TAG in xpath
"""
self.maxDiff = None
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
<form>Inner Form</form>
</form>
<form t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="primary">
<xpath expr="//form" position="replace">
<div>Form replacer</div>
</xpath>
</form>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
<form>Inner Form</form>
</form>
<div t-name="template_1_2">
Form replacer
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_inherit_primary_replace_debug(self):
"""
The inheriting template has got both its own defining attrs
and new ones if one is to replace its defining root node
"""
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<t t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="primary">
<xpath expr="." position="replace">
<div overriden-attr="overriden">
And I grew strong
<p>And I learned how to get along</p>
</div>
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<div overriden-attr="overriden" t-name="template_1_2">
And I grew strong
<p>And I learned how to get along</p>
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_replace_in_nodebug_mode1(self):
"""Comments already in the arch are ignored"""
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<t t-name="template_1_2" t-inherit="template_1_1" t-inherit-mode="extension">
<xpath expr="." position="replace">
<div>
<!-- Random Comment -->
And I grew strong
<p>And I learned how to get along</p>
And so you're back
</div>
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=False)
expected = b"""
<templates>
<div t-name="template_1_1">
And I grew strong
<p>And I learned how to get along</p>
And so you're back
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_inherit_from_dotted_tname_1(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="module_1.template_1_1.dot" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<t t-name="template_1_2" t-inherit="template_1_1.dot" t-inherit-mode="primary">
<xpath expr="." position="replace">
<div overriden-attr="overriden">
And I grew strong
<p>And I learned how to get along</p>
</div>
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="module_1.template_1_1.dot" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<div overriden-attr="overriden" t-name="template_1_2">
And I grew strong
<p>And I learned how to get along</p>
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_inherit_from_dotted_tname_2(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1.dot" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<t t-name="template_1_2" t-inherit="template_1_1.dot" t-inherit-mode="primary">
<xpath expr="." position="replace">
<div overriden-attr="overriden">
And I grew strong
<p>And I learned how to get along</p>
</div>
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1.dot" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<div overriden-attr="overriden" t-name="template_1_2">
And I grew strong
<p>And I learned how to get along</p>
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_inherit_from_dotted_tname_2bis(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="template_1_1.dot" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<t t-name="template_1_2" t-inherit="module_1.template_1_1.dot" t-inherit-mode="primary">
<xpath expr="." position="replace">
<div overriden-attr="overriden">
And I grew strong
<p>And I learned how to get along</p>
</div>
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="template_1_1.dot" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<div overriden-attr="overriden" t-name="template_1_2">
And I grew strong
<p>And I learned how to get along</p>
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_inherit_from_dotted_tname_2ter(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="module_1" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<t t-name="template_1_2" t-inherit="module_1" t-inherit-mode="primary">
<xpath expr="." position="replace">
<div overriden-attr="overriden">
And I grew strong
<p>And I learned how to get along</p>
</div>
</xpath>
</t>
</templates>
""",
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="module_1" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<div overriden-attr="overriden" t-name="template_1_2">
And I grew strong
<p>And I learned how to get along</p>
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
def test_inherit_from_dotted_tname_3(self):
self.asset_paths = [
('module_1_file_1', 'module_1', 'bundle_1'),
('module_2_file_1', 'module_2', 'bundle_1'),
]
self.template_files = {
'module_1_file_1': b"""
<templates id="template" xml:space="preserve">
<form t-name="module_1.template_1_1.dot" random-attr="gloria">
<div>At first I was afraid</div>
</form>
</templates>
""",
'module_2_file_1': b"""
<templates id="template" xml:space="preserve">
<t t-name="template_2_1" t-inherit="module_1.template_1_1.dot" t-inherit-mode="primary">
<xpath expr="." position="replace">
<div overriden-attr="overriden">
And I grew strong
<p>And I learned how to get along</p>
</div>
</xpath>
</t>
</templates>
"""
}
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
expected = b"""
<templates>
<form t-name="module_1.template_1_1.dot" random-attr="gloria">
<div>At first I was afraid</div>
</form>
<div overriden-attr="overriden" t-name="template_2_1">
And I grew strong
<p>And I learned how to get along</p>
</div>
</templates>
"""
self.assertXMLEqual(contents, expected)
@tagged('static_templates')
class TestHttpStaticInheritance(HttpCase):
def test_static_attachments(self):
url = '/test_module/test_file.xml'
self.env['ir.attachment'].create({
'name': 'test_attachment',
'url': url,
'res_model': 'ir.ui.view',
'type': 'binary',
'raw': b"""
<templates>
<t t-name="test_template">
<div class="test_div" />
</t>
</templates>
"""
})
self.env['ir.asset'].create({
'name': 'test_asset',
'path': url,
'bundle': 'test.bundle',
})
res = self.url_open('/web/webclient/qweb/HASH_BIDON?bundle=test.bundle')
[template] = etree.fromstring(res.text)
self.assertEqual(template.get('t-name'), 'test_template')
self.assertEqual(template[0].get('class'), 'test_div')
@tagged('-standard', 'static_templates_performance')
class TestStaticInheritancePerformance(TestStaticInheritanceCommon):
def _sick_script(self, nMod, nFilePerMod, nTemplatePerFile, stepInheritInModule=2, stepInheritPreviousModule=3):
"""
Make a sick amount of templates to test perf
nMod modules
each module: has nFilesPerModule files, each of which contains nTemplatePerFile templates
"""
self.asset_paths = []
self.template_files = {}
number_templates = 0
for m in range(nMod):
for f in range(nFilePerMod):
mname = 'mod_%s' % m
fname = 'mod_%s_file_%s' % (m, f)
self.asset_paths.append((fname, mname, 'bundle_1'))
_file = '<templates id="template" xml:space="preserve">'
for t in range(nTemplatePerFile):
_template = ''
if t % stepInheritInModule or t % stepInheritPreviousModule or t == 0:
_template += """
<div t-name="template_%(t_number)s_mod_%(m_number)s">
<div>Parent</div>
</div>
"""
elif not t % stepInheritInModule and t >= 1:
_template += """
<div t-name="template_%(t_number)s_mod_%(m_number)s"
t-inherit="template_%(t_inherit)s_mod_%(m_number)s"
t-inherit-mode="primary">
<xpath expr="/div/div[1]" position="before">
<div>Sick XPath</div>
</xpath>
</div>
"""
elif not t % stepInheritPreviousModule and m >= 1:
_template += """
<div t-name="template_%(t_number)s_mod_%(m_number)s"
t-inherit="mod_%(m_module_inherit)s.template_%(t_module_inherit)s_mod_%(m_module_inherit)s"
t-inherit-mode="primary">
<xpath expr="/div/div[1]" position="inside">
<div>Mental XPath</div>
</xpath>
</div>
"""
if _template:
number_templates += 1
_template_number = 1000 * f + t
_file += _template % {
't_number': _template_number,
'm_number': m,
't_inherit': _template_number - 1,
't_module_inherit': _template_number,
'm_module_inherit': m - 1,
}
_file += '</templates>'
self.template_files[fname] = _file.encode()
self.assertEqual(number_templates, nMod * nFilePerMod * nTemplatePerFile)
def test_static_templates_treatment_linearity(self):
# With 2500 templates for starters
nMod, nFilePerMod, nTemplatePerFile = 50, 5, 10
self._sick_script(nMod, nFilePerMod, nTemplatePerFile)
before = datetime.now()
contents = HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
after = datetime.now()
delta2500 = after - before
_logger.runbot('Static Templates Inheritance: 2500 templates treated in %s seconds' % delta2500.total_seconds())
whole_tree = etree.fromstring(contents)
self.assertEqual(len(whole_tree), nMod * nFilePerMod * nTemplatePerFile)
# With 25000 templates next
nMod, nFilePerMod, nTemplatePerFile = 50, 5, 100
self._sick_script(nMod, nFilePerMod, nTemplatePerFile)
before = datetime.now()
HomeStaticTemplateHelpers.get_qweb_templates(addons=self._get_module_names(), debug=True)
after = datetime.now()
delta25000 = after - before
time_ratio = delta25000.total_seconds() / delta2500.total_seconds()
_logger.runbot('Static Templates Inheritance: 25000 templates treated in %s seconds' % delta25000.total_seconds())
_logger.runbot('Static Templates Inheritance: Computed linearity ratio: %s' % time_ratio)
self.assertLessEqual(time_ratio, 14)
| 40.863281 | 41,844 |
3,561 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import io
import base64
from PIL import Image
from odoo.http import content_disposition
from odoo.tests.common import HttpCase, tagged
@tagged('-at_install', 'post_install')
class TestImage(HttpCase):
def test_01_content_image_resize_placeholder(self):
"""The goal of this test is to make sure the placeholder image is
resized appropriately depending on the given URL parameters."""
# CASE: resize placeholder, given size but original ratio is always kept
response = self.url_open('/web/image/0/200x150')
image = Image.open(io.BytesIO(response.content))
self.assertEqual(image.size, (150, 150))
# CASE: resize placeholder to 128
response = self.url_open('/web/image/fake/0/image_128')
image = Image.open(io.BytesIO(response.content))
self.assertEqual(image.size, (128, 128))
# CASE: resize placeholder to 256
response = self.url_open('/web/image/fake/0/image_256')
image = Image.open(io.BytesIO(response.content))
self.assertEqual(image.size, (256, 256))
# CASE: resize placeholder to 1024 (but placeholder image is too small)
response = self.url_open('/web/image/fake/0/image_1024')
image = Image.open(io.BytesIO(response.content))
self.assertEqual(image.size, (256, 256))
# CASE: no size found, use placeholder original size
response = self.url_open('/web/image/fake/0/image_no_size')
image = Image.open(io.BytesIO(response.content))
self.assertEqual(image.size, (256, 256))
def test_02_content_image_Etag_304(self):
"""This test makes sure that the 304 response is properly returned if the ETag is properly set"""
attachment = self.env['ir.attachment'].create({
'datas': b"R0lGODdhAQABAIAAAP///////ywAAAAAAQABAAACAkQBADs=",
'name': 'testEtag.gif',
'public': True,
'mimetype': 'image/gif',
})
response = self.url_open('/web/image/%s' % attachment.id, timeout=None)
self.assertEqual(response.status_code, 200)
self.assertEqual(base64.b64encode(response.content), attachment.datas)
etag = response.headers.get('ETag')
response2 = self.url_open('/web/image/%s' % attachment.id, headers={"If-None-Match": etag})
self.assertEqual(response2.status_code, 304)
self.assertEqual(len(response2.content), 0)
def test_03_web_content_filename(self):
"""This test makes sure the Content-Disposition header matches the given filename"""
att = self.env['ir.attachment'].create({
'datas': b'R0lGODdhAQABAIAAAP///////ywAAAAAAQABAAACAkQBADs=',
'name': 'testFilename.gif',
'public': True,
'mimetype': 'image/gif'
})
# CASE: no filename given
res = self.url_open('/web/image/%s/0x0/?download=true' % att.id)
self.assertEqual(res.headers['Content-Disposition'], content_disposition('testFilename.gif'))
# CASE: given filename without extension
res = self.url_open('/web/image/%s/0x0/custom?download=true' % att.id)
self.assertEqual(res.headers['Content-Disposition'], content_disposition('custom.gif'))
# CASE: given filename and extention
res = self.url_open('/web/image/%s/0x0/custom.png?download=true' % att.id)
self.assertEqual(res.headers['Content-Disposition'], content_disposition('custom.png'))
| 42.903614 | 3,561 |
1,801 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
import odoo.tests
RE_ONLY = re.compile(r'QUnit\.(only|debug)\(')
@odoo.tests.tagged('post_install', '-at_install')
class WebSuite(odoo.tests.HttpCase):
def test_js(self):
# webclient desktop test suite
self.browser_js('/web/tests?mod=web&failfast', "", "", login='admin', timeout=1800)
def test_check_suite(self):
# verify no js test is using `QUnit.only` as it forbid any other test to be executed
self._check_only_call('web.qunit_suite_tests')
self._check_only_call('web.qunit_mobile_suite_tests')
def _check_only_call(self, suite):
# As we currently aren't in a request context, we can't render `web.layout`.
# redefinied it as a minimal proxy template.
self.env.ref('web.layout').write({'arch_db': '<t t-name="web.layout"><head><meta charset="utf-8"/><t t-esc="head"/></head></t>'})
assets = self.env['ir.qweb']._get_asset_content(suite)[0]
if len(assets) == 0:
self.fail("No assets found in the given test suite")
for asset in assets:
filename = asset['filename']
if not filename or asset['atype'] != 'text/javascript':
continue
with open(filename, 'rb') as fp:
if RE_ONLY.search(fp.read().decode('utf-8')):
self.fail("`QUnit.only()` or `QUnit.debug()` used in file %r" % asset['url'])
@odoo.tests.tagged('post_install', '-at_install')
class MobileWebSuite(odoo.tests.HttpCase):
browser_size = '375x667'
def test_mobile_js(self):
# webclient mobile test suite
self.browser_js('/web/tests/mobile?mod=web&failfast', "", "", login='admin', timeout=1800)
| 39.152174 | 1,801 |
2,433 |
py
|
PYTHON
|
15.0
|
# -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
from uuid import uuid4
from odoo import Command
from odoo.tests import common
class TestSessionInfo(common.HttpCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.company_a = cls.env['res.company'].create({'name': "A"})
cls.company_b = cls.env['res.company'].create({'name': "B"})
cls.company_c = cls.env['res.company'].create({'name': "C"})
cls.companies = [cls.company_a, cls.company_b, cls.company_c]
cls.user_password = "info"
cls.user = common.new_test_user(
cls.env,
"session",
email="[email protected]",
password=cls.user_password,
tz="UTC")
cls.user.write({
'company_id': cls.company_a.id,
'company_ids': [Command.set([company.id for company in cls.companies])],
})
cls.payload = json.dumps(dict(jsonrpc="2.0", method="call", id=str(uuid4())))
cls.headers = {
"Content-Type": "application/json",
}
def test_session_info(self):
""" Checks that the session_info['user_companies'] structure correspond to what is expected """
self.authenticate(self.user.login, self.user_password)
response = self.url_open("/web/session/get_session_info", data=self.payload, headers=self.headers)
self.assertEqual(response.status_code, 200)
data = response.json()
result = data["result"]
expected_allowed_companies = {
str(company.id): {
'id': company.id,
'name': company.name,
'sequence': company.sequence,
} for company in self.companies
}
expected_user_companies = {
'current_company': self.company_a.id,
'allowed_companies': expected_allowed_companies,
}
self.assertEqual(
result['user_companies'],
expected_user_companies,
"The session_info['user_companies'] does not have the expected structure")
def test_session_modules(self):
self.authenticate(self.user.login, self.user_password)
response = self.url_open("/web/session/modules", data=self.payload, headers=self.headers)
data = response.json()
self.assertTrue(isinstance(data['result'], list))
| 37.430769 | 2,433 |
1,503 |
py
|
PYTHON
|
15.0
|
from odoo.tests.common import HttpCase
class LoadMenusTests(HttpCase):
def setUp(self):
super().setUp()
self.menu = self.env["ir.ui.menu"].create({
"name": "test_menu",
"parent_id": False,
})
def search(*args, **kwargs):
return self.menu
self.patch(type(self.env["ir.ui.menu"]), "search", search)
self.authenticate("admin", "admin")
def test_load_menus(self):
menu_loaded = self.url_open("/web/webclient/load_menus/1234")
expected = {
str(self.menu.id): {
"actionID": False,
"actionModel": False,
"appID": self.menu.id,
"children": [],
"id": self.menu.id,
"name": "test_menu",
"webIcon": False,
"webIconData": False,
"xmlid": ""
},
"root": {
"actionID": False,
"actionModel": False,
"appID": False,
"children": [
self.menu.id,
],
"id": "root",
"name": "root",
"webIcon": None,
"webIconData": None,
"xmlid": "",
"backgroundImage": None,
}
}
self.assertDictEqual(
menu_loaded.json(),
expected,
"load_menus didn't return the expected value"
)
| 28.358491 | 1,503 |
5,381 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import datetime
import json
from unittest.mock import patch
from odoo.tools import mute_logger
from odoo.tests.common import HttpCase, tagged
class ProfilingHttpCase(HttpCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# Trick: we patch db_connect() to make it return the registry; when the
# profiler calls cursor() on it, it gets a test cursor (with cls.cr as
# its actual cursor), which prevents the profiling data from being
# committed for real.
cls.patcher = patch('odoo.sql_db.db_connect', return_value=cls.registry)
cls.patcher.start()
cls.addClassCleanup(cls.patcher.stop)
def profile_rpc(self, params=None):
params = params or {}
return self.url_open(
'/web/dataset/call_kw/ir.profile/set_profiling', # use model and method in route has web client does
headers={'Content-Type': 'application/json'},
data=json.dumps({'params':{
'model': 'ir.profile',
'method': 'set_profiling',
'args': [],
'kwargs': params,
}})
).json()
@tagged('post_install', '-at_install', 'profiling')
class TestProfilingWeb(ProfilingHttpCase):
@mute_logger('odoo.http')
def test_profiling_enabled(self):
# since profiling will use a direct connection to the database patch 'db_connect' to ensure we are using the test cursor
self.authenticate('admin', 'admin')
last_profile = self.env['ir.profile'].search([], limit=1, order='id desc')
# Trying to start profiling when not enabled
self.env['ir.config_parameter'].set_param('base.profiling_enabled_until', '')
res = self.profile_rpc({'profile': 1})
self.assertEqual(res['result']['res_model'], 'base.enable.profiling.wizard')
self.assertEqual(last_profile, self.env['ir.profile'].search([], limit=1, order='id desc'))
# Enable profiling and start blank profiling
expiration = datetime.datetime.now() + datetime.timedelta(seconds=50)
self.env['ir.config_parameter'].set_param('base.profiling_enabled_until', expiration)
res = self.profile_rpc({'profile': 1})
self.assertTrue(res['result']['session'])
self.assertEqual(last_profile, self.env['ir.profile'].search([], limit=1, order='id desc'), "profiling route shouldn't have been profiled")
# Profile a page
res = self.url_open('/web/speedscope') # profile a light route
new_profile = self.env['ir.profile'].search([], limit=1, order='id desc')
self.assertNotEqual(last_profile, new_profile, "A new profile should have been created")
self.assertEqual(new_profile.name, '/web/speedscope?')
@tagged('post_install', '-at_install', 'profiling')
class TestProfilingModes(ProfilingHttpCase):
@mute_logger('odoo.http')
def test_profile_collectors(self):
expiration = datetime.datetime.now() + datetime.timedelta(seconds=50)
self.env['ir.config_parameter'].set_param('base.profiling_enabled_until', expiration)
self.authenticate('admin', 'admin')
res = self.profile_rpc({})
self.assertEqual(res['result']['collectors'], None)
res = self.profile_rpc({'profile': 1, 'collectors': ['sql', 'traces_async']})
self.assertEqual(sorted(res['result']['collectors']), ['sql', 'traces_async'])
res = self.profile_rpc({'collectors': ['sql']})
self.assertEqual(res['result']['collectors'], ['sql'],)
res = self.profile_rpc({'profile': 0})
res = self.profile_rpc({'profile': 1})
self.assertEqual(res['result']['collectors'], ['sql'],
"Enabling and disabling profiling shouldn't have change existing preferences")
@tagged('post_install', '-at_install', 'profiling')
class TestProfilingPublic(ProfilingHttpCase):
def test_public_user_profiling(self):
last_profile = self.env['ir.profile'].search([], limit=1, order='id desc')
self.env['ir.config_parameter'].set_param('base.profiling_enabled_until', '')
self.authenticate(None, None)
res = self.url_open('/web/set_profiling?profile=1')
self.assertEqual(res.status_code, 500)
self.assertEqual(res.text, 'error: Profiling is not enabled on this database. Please contact an administrator.')
expiration = datetime.datetime.now() + datetime.timedelta(seconds=50)
self.env['ir.config_parameter'].set_param('base.profiling_enabled_until', expiration)
res = self.url_open('/web/set_profiling?profile=1')
self.assertEqual(res.status_code, 200)
res = res.json()
self.assertTrue(res.pop('session'))
self.assertEqual(res, {"collectors": ["sql", "traces_async"], "params": {}})
self.assertEqual(last_profile, self.env['ir.profile'].search([], limit=1, order='id desc'), "profiling route shouldn't have been profiled")
res = self.url_open('/web/login') # profile /web/login to avoid redirections of /
new_profile = self.env['ir.profile'].search([], limit=1, order='id desc')
self.assertNotEqual(last_profile, new_profile, "A route should have been profiled")
self.assertEqual(new_profile.name, '/web/login?')
| 49.366972 | 5,381 |
6,942 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests import common
@common.tagged('post_install', '-at_install')
class TestReadProgressBar(common.TransactionCase):
"""Test for read_progress_bar"""
def setUp(self):
super(TestReadProgressBar, self).setUp()
self.Model = self.env['res.partner']
def test_read_progress_bar_m2m(self):
""" Test that read_progress_bar works with m2m field grouping """
progressbar = {
'field': 'type',
'colors': {
'contact': 'success', 'private': 'danger', 'other': 'muted',
}
}
result = self.env['res.partner'].read_progress_bar([], 'category_id', progressbar)
# check that it works when grouping by m2m field
self.assertTrue(result)
# check the null group
self.assertIn('False', result)
def test_week_grouping(self):
"""The labels associated to each record in read_progress_bar should match
the ones from read_group, even in edge cases like en_US locale on sundays
"""
context = {"lang": "en_US"}
groupby = "date:week"
self.Model.create({'date': '2021-05-02', 'name': "testWeekGrouping_first"}) # Sunday
self.Model.create({'date': '2021-05-09', 'name': "testWeekGrouping_second"}) # Sunday
progress_bar = {
'field': 'name',
'colors': {
"testWeekGrouping_first": 'success',
"testWeekGrouping_second": 'danger',
}
}
groups = self.Model.with_context(context).read_group(
[('name', "like", "testWeekGrouping%")], fields=['date', 'name'], groupby=[groupby])
progressbars = self.Model.with_context(context).read_progress_bar(
[('name', "like", "testWeekGrouping%")], group_by=groupby, progress_bar=progress_bar)
self.assertEqual(len(groups), 2)
self.assertEqual(len(progressbars), 2)
# format the read_progress_bar result to get a dictionary under this format : {record_name: group_name}
# original format (after read_progress_bar) is : {group_name: {record_name: count}}
pg_groups = {
next(record_name for record_name, count in data.items() if count): group_name
for group_name, data in progressbars.items()
}
self.assertEqual(groups[0][groupby], pg_groups["testWeekGrouping_first"])
self.assertEqual(groups[1][groupby], pg_groups["testWeekGrouping_second"])
def test_simple(self):
model = self.env['ir.model'].create({
'model': 'x_progressbar',
'name': 'progress_bar',
'field_id': [
(0, 0, {
'field_description': 'Country',
'name': 'x_country_id',
'ttype': 'many2one',
'relation': 'res.country',
}),
(0, 0, {
'field_description': 'Date',
'name': 'x_date',
'ttype': 'date',
}),
(0, 0, {
'field_description': 'State',
'name': 'x_state',
'ttype': 'selection',
'selection': "[('foo', 'Foo'), ('bar', 'Bar'), ('baz', 'Baz')]",
}),
],
})
c1, c2, c3 = self.env['res.country'].search([], limit=3)
self.env['x_progressbar'].create([
# week 21
{'x_country_id': c1.id, 'x_date': '2021-05-20', 'x_state': 'foo'},
{'x_country_id': c1.id, 'x_date': '2021-05-21', 'x_state': 'foo'},
{'x_country_id': c1.id, 'x_date': '2021-05-22', 'x_state': 'foo'},
{'x_country_id': c1.id, 'x_date': '2021-05-23', 'x_state': 'bar'},
# week 22
{'x_country_id': c1.id, 'x_date': '2021-05-24', 'x_state': 'baz'},
{'x_country_id': c2.id, 'x_date': '2021-05-25', 'x_state': 'foo'},
{'x_country_id': c2.id, 'x_date': '2021-05-26', 'x_state': 'bar'},
{'x_country_id': c2.id, 'x_date': '2021-05-27', 'x_state': 'bar'},
{'x_country_id': c2.id, 'x_date': '2021-05-28', 'x_state': 'baz'},
{'x_country_id': c2.id, 'x_date': '2021-05-29', 'x_state': 'baz'},
{'x_country_id': c3.id, 'x_date': '2021-05-30', 'x_state': 'foo'},
# week 23
{'x_country_id': c3.id, 'x_date': '2021-05-31', 'x_state': 'foo'},
{'x_country_id': c3.id, 'x_date': '2021-06-01', 'x_state': 'baz'},
{'x_country_id': c3.id, 'x_date': '2021-06-02', 'x_state': 'baz'},
{'x_country_id': c3.id, 'x_date': '2021-06-03', 'x_state': 'baz'},
])
progress_bar = {
'field': 'x_state',
'colors': {'foo': 'success', 'bar': 'warning', 'baz': 'danger'},
}
result = self.env['x_progressbar'].read_progress_bar([], 'x_country_id', progress_bar)
self.assertEqual(result, {
c1.display_name: {'foo': 3, 'bar': 1, 'baz': 1},
c2.display_name: {'foo': 1, 'bar': 2, 'baz': 2},
c3.display_name: {'foo': 2, 'bar': 0, 'baz': 3},
})
# check date aggregation and format
result = self.env['x_progressbar'].read_progress_bar([], 'x_date:week', progress_bar)
self.assertEqual(result, {
'W21 2021': {'foo': 3, 'bar': 1, 'baz': 0},
'W22 2021': {'foo': 2, 'bar': 2, 'baz': 3},
'W23 2021': {'foo': 1, 'bar': 0, 'baz': 3},
})
# add a computed field on model
model.write({'field_id': [
(0, 0, {
'field_description': 'Related State',
'name': 'x_state_computed',
'ttype': 'selection',
'selection': "[('foo', 'Foo'), ('bar', 'Bar'), ('baz', 'Baz')]",
'compute': "for rec in self: rec['x_state_computed'] = rec.x_state",
'depends': 'x_state',
'readonly': True,
'store': False,
}),
]})
progress_bar = {
'field': 'x_state_computed',
'colors': {'foo': 'success', 'bar': 'warning', 'baz': 'danger'},
}
result = self.env['x_progressbar'].read_progress_bar([], 'x_country_id', progress_bar)
self.assertEqual(result, {
c1.display_name: {'foo': 3, 'bar': 1, 'baz': 1},
c2.display_name: {'foo': 1, 'bar': 2, 'baz': 2},
c3.display_name: {'foo': 2, 'bar': 0, 'baz': 3},
})
result = self.env['x_progressbar'].read_progress_bar([], 'x_date:week', progress_bar)
self.assertEqual(result, {
'W21 2021': {'foo': 3, 'bar': 1, 'baz': 0},
'W22 2021': {'foo': 2, 'bar': 2, 'baz': 3},
'W23 2021': {'foo': 1, 'bar': 0, 'baz': 3},
})
| 43.936709 | 6,942 |
1,714 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests.common import BaseCase
from ..controllers import main
class ActionMungerTest(BaseCase):
def test_actual_treeview(self):
action = {
"views": [[False, "tree"], [False, "form"],
[False, "calendar"]],
"view_type": "tree",
"view_id": False,
"view_mode": "tree,form,calendar"
}
changed = action.copy()
del action['view_type']
main.fix_view_modes(changed)
self.assertEqual(changed, action)
def test_list_view(self):
action = {
"views": [[False, "tree"], [False, "form"],
[False, "calendar"]],
"view_type": "form",
"view_id": False,
"view_mode": "tree,form,calendar"
}
main.fix_view_modes(action)
self.assertEqual(action, {
"views": [[False, "list"], [False, "form"],
[False, "calendar"]],
"view_id": False,
"view_mode": "list,form,calendar"
})
def test_redundant_views(self):
action = {
"views": [[False, "tree"], [False, "form"],
[False, "calendar"], [42, "tree"]],
"view_type": "form",
"view_id": False,
"view_mode": "tree,form,calendar"
}
main.fix_view_modes(action)
self.assertEqual(action, {
"views": [[False, "list"], [False, "form"],
[False, "calendar"], [42, "list"]],
"view_id": False,
"view_mode": "list,form,calendar"
})
| 31.163636 | 1,714 |
415 |
py
|
PYTHON
|
15.0
|
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests import HttpCase
class TestWebController(HttpCase):
def test_health(self):
response = self.url_open('/web/health')
self.assertEqual(response.status_code, 200)
payload = response.json()
self.assertEqual(payload['status'], 'pass')
self.assertNotIn('session_id', response.cookies)
| 34.583333 | 415 |
2,060 |
py
|
PYTHON
|
15.0
|
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import odoo.tests
_logger = logging.getLogger(__name__)
@odoo.tests.tagged('click_all', 'post_install', '-at_install', '-standard')
class TestMenusAdmin(odoo.tests.HttpCase):
def test_01_click_everywhere_as_admin(self):
menus = self.env['ir.ui.menu'].load_menus(False)
for app_id in menus['root']['children']:
with self.subTest(app=menus[app_id]['name']):
_logger.runbot('Testing %s', menus[app_id]['name'])
self.browser_js("/web", "odoo.__DEBUG__.services['web.clickEverywhere']('%s');" % menus[app_id]['xmlid'], "odoo.isReady === true", login="admin", timeout=600)
self.terminate_browser()
@odoo.tests.tagged('click_all', 'post_install', '-at_install', '-standard')
class TestMenusDemo(odoo.tests.HttpCase):
def test_01_click_everywhere_as_demo(self):
user_demo = self.env.ref("base.user_demo")
menus = self.env['ir.ui.menu'].with_user(user_demo.id).load_menus(False)
for app_id in menus['root']['children']:
with self.subTest(app=menus[app_id]['name']):
_logger.runbot('Testing %s', menus[app_id]['name'])
self.browser_js("/web", "odoo.__DEBUG__.services['web.clickEverywhere']('%s');" % menus[app_id]['xmlid'], "odoo.isReady === true", login="demo", timeout=600)
self.terminate_browser()
@odoo.tests.tagged('post_install', '-at_install')
class TestMenusAdminLight(odoo.tests.HttpCase):
def test_01_click_apps_menus_as_admin(self):
self.browser_js("/web", "odoo.__DEBUG__.services['web.clickEverywhere'](undefined, true);", "odoo.isReady === true", login="admin", timeout=120)
@odoo.tests.tagged('post_install', '-at_install',)
class TestMenusDemoLight(odoo.tests.HttpCase):
def test_01_click_apps_menus_as_demo(self):
self.browser_js("/web", "odoo.__DEBUG__.services['web.clickEverywhere'](undefined, true);", "odoo.isReady === true", login="demo", timeout=120)
| 47.906977 | 2,060 |
10,352 |
py
|
PYTHON
|
15.0
|
import os
from PIL import Image
from functools import partial
from odoo.tests import TransactionCase, tagged, Form
from odoo.tools import frozendict, image_to_base64, hex_to_rgb
dir_path = os.path.dirname(os.path.realpath(__file__))
_file_cache = {}
class TestBaseDocumentLayoutHelpers(TransactionCase):
#
# Public
#
def setUp(self):
super(TestBaseDocumentLayoutHelpers, self).setUp()
self.color_fields = ['primary_color', 'secondary_color']
self.company = self.env.company
self.css_color_error = 0
self._set_templates_and_layouts()
self._set_images()
def assertColors(self, checked_obj, expected):
_expected_getter = expected.get if isinstance(expected, dict) else partial(getattr, expected)
for fname in self.color_fields:
color1 = getattr(checked_obj, fname)
color2 = _expected_getter(fname)
if self.css_color_error:
self._compare_colors_rgb(color1, color2)
else:
self.assertEqual(color1, color2)
#
# Private
#
def _compare_colors_rgb(self, color1, color2):
self.assertEqual(bool(color1), bool(color2))
if not color1:
return
color1 = hex_to_rgb(color1)
color2 = hex_to_rgb(color2)
self.assertEqual(len(color1), len(color2))
for i in range(len(color1)):
self.assertAlmostEqual(color1[i], color2[i], delta=self.css_color_error)
def _get_images_for_test(self):
return ['sweden.png', 'odoo.png']
def _set_images(self):
for fname in self._get_images_for_test():
fname_split = fname.split('.')
if not fname_split[0] in _file_cache:
with Image.open(os.path.join(dir_path, fname), 'r') as img:
base64_img = image_to_base64(img, 'PNG')
primary, secondary = self.env['base.document.layout'].extract_image_primary_secondary_colors(base64_img)
_img = frozendict({
'img': base64_img,
'colors': {
'primary_color': primary,
'secondary_color': secondary,
},
})
_file_cache[fname_split[0]] = _img
self.company_imgs = frozendict(_file_cache)
def _set_templates_and_layouts(self):
self.layout_template1 = self.env['ir.ui.view'].create({
'name': 'layout_template1',
'key': 'web.layout_template1',
'type': 'qweb',
'arch': '''<div></div>''',
})
self.env['ir.model.data'].create({
'name': self.layout_template1.name,
'model': 'ir.ui.view',
'module': 'web',
'res_id': self.layout_template1.id,
})
self.default_colors = {
'primary_color': '#000000',
'secondary_color': '#000000',
}
self.report_layout1 = self.env['report.layout'].create({
'view_id': self.layout_template1.id,
'name': 'report_%s' % self.layout_template1.name,
})
self.layout_template2 = self.env['ir.ui.view'].create({
'name': 'layout_template2',
'key': 'web.layout_template2',
'type': 'qweb',
'arch': '''<div></div>''',
})
self.env['ir.model.data'].create({
'name': self.layout_template2.name,
'model': 'ir.ui.view',
'module': 'web',
'res_id': self.layout_template2.id,
})
self.report_layout2 = self.env['report.layout'].create({
'view_id': self.layout_template2.id,
'name': 'report_%s' % self.layout_template2.name,
})
@tagged('document_layout', "post_install", "-at_install")
class TestBaseDocumentLayout(TestBaseDocumentLayoutHelpers):
# Logo change Tests
def test_company_no_color_change_logo(self):
"""When neither a logo nor the colors are set
The wizard displays the colors of the report layout
Changing logo means the colors on the wizard change too
Emptying the logo works and doesn't change the colors"""
self.company.write({
'primary_color': False,
'secondary_color': False,
'logo': False,
'external_report_layout_id': self.env.ref('web.layout_template1').id,
'paperformat_id': self.env.ref('base.paperformat_us').id,
})
default_colors = self.default_colors
with Form(self.env['base.document.layout']) as doc_layout:
self.assertColors(doc_layout, default_colors)
self.assertEqual(doc_layout.company_id, self.company)
doc_layout.logo = self.company_imgs['sweden']['img']
self.assertColors(doc_layout, self.company_imgs['sweden']['colors'])
doc_layout.logo = ''
self.assertColors(doc_layout, self.company_imgs['sweden']['colors'])
self.assertEqual(doc_layout.logo, '')
def test_company_no_color_but_logo_change_logo(self):
"""When company colors are not set, but a logo is,
the wizard displays the computed colors from the logo"""
self.company.write({
'primary_color': '#ff0080',
'secondary_color': '#00ff00',
'logo': self.company_imgs['sweden']['img'],
'paperformat_id': self.env.ref('base.paperformat_us').id,
})
with Form(self.env['base.document.layout']) as doc_layout:
self.assertColors(doc_layout, self.company)
doc_layout.logo = self.company_imgs['odoo']['img']
self.assertColors(doc_layout, self.company_imgs['odoo']['colors'])
def test_company_colors_change_logo(self):
"""changes of the logo implies displaying the new computed colors"""
self.company.write({
'primary_color': '#ff0080',
'secondary_color': '#00ff00',
'logo': False,
'paperformat_id': self.env.ref('base.paperformat_us').id,
})
with Form(self.env['base.document.layout']) as doc_layout:
self.assertColors(doc_layout, self.company)
doc_layout.logo = self.company_imgs['odoo']['img']
self.assertColors(doc_layout, self.company_imgs['odoo']['colors'])
def test_company_colors_and_logo_change_logo(self):
"""The colors of the company may differ from the one the logo computes
Opening the wizard in these condition displays the company's colors
When the logo changes, colors must change according to the logo"""
self.company.write({
'primary_color': '#ff0080',
'secondary_color': '#00ff00',
'logo': self.company_imgs['sweden']['img'],
'paperformat_id': self.env.ref('base.paperformat_us').id,
})
with Form(self.env['base.document.layout']) as doc_layout:
self.assertColors(doc_layout, self.company)
doc_layout.logo = self.company_imgs['odoo']['img']
self.assertColors(doc_layout, self.company_imgs['odoo']['colors'])
# Layout change tests
def test_company_colors_reset_colors(self):
"""Reset the colors when they differ from the ones originally
computed from the company logo"""
self.company.write({
'primary_color': '#ff0080',
'secondary_color': '#00ff00',
'logo': self.company_imgs['sweden']['img'],
'paperformat_id': self.env.ref('base.paperformat_us').id,
})
with Form(self.env['base.document.layout']) as doc_layout:
self.assertColors(doc_layout, self.company)
doc_layout.primary_color = doc_layout.logo_primary_color
doc_layout.secondary_color = doc_layout.logo_secondary_color
self.assertColors(doc_layout, self.company_imgs['sweden']['colors'])
def test_parse_company_colors_grayscale(self):
"""Grayscale images with transparency - make sure the color extraction does not crash"""
self.company.write({
'primary_color': '#ff0080',
'secondary_color': '#00ff00',
'paperformat_id': self.env.ref('base.paperformat_us').id,
})
with Form(self.env['base.document.layout']) as doc_layout:
with Image.open(os.path.join(dir_path, 'logo_ci.png'), 'r') as img:
base64_img = image_to_base64(img, 'PNG')
doc_layout.logo = base64_img
self.assertNotEqual(None, doc_layout.primary_color)
# /!\ This case is NOT supported, and probably not supportable
# res.partner resizes manu-militari the image it is given
# so res.company._get_logo differs from res.partner.[default image]
# def test_company_no_colors_default_logo_and_layout_change_layout(self):
# """When the default YourCompany logo is set, and no colors are set on company:
# change wizard's color according to template"""
# self.company.write({
# 'primary_color': False,
# 'secondary_color': False,
# 'external_report_layout_id': self.layout_template1.id,
# })
# default_colors = self.default_colors
# with Form(self.env['base.document.layout']) as doc_layout:
# self.assertColors(doc_layout, default_colors)
# doc_layout.report_layout_id = self.report_layout2
# self.assertColors(doc_layout, self.report_layout2)
def test_company_details_blank_lines(self):
"""Test that the company address is generated dynamically using only the fields that are defined,
without leaving any blank lines."""
# Make sure there is no blank line in the company details.
doc_layout_1 = self.env['base.document.layout'].create({'company_id': self.company.id})
self.assertFalse('\n<br>\n' in doc_layout_1.company_details)
# Make sure that 'street2' (an optional field, initially blank),
# appears in the company details when it is defined.
self.company.write({'street2': 'street_2_detail'})
doc_layout_2 = self.env['base.document.layout'].create({'company_id': self.company.id})
self.assertTrue('street_2_detail' in doc_layout_2.company_details)
| 43.495798 | 10,352 |
9,541 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import hashlib
import json
import odoo
from odoo import api, http, models
from odoo.http import request
from odoo.tools import file_open, image_process, ustr
from odoo.addons.web.controllers.main import HomeStaticTemplateHelpers
class Http(models.AbstractModel):
_inherit = 'ir.http'
def webclient_rendering_context(self):
return {
'menu_data': request.env['ir.ui.menu'].load_menus(request.session.debug),
'session_info': self.session_info(),
}
def session_info(self):
user = request.env.user
version_info = odoo.service.common.exp_version()
session_uid = request.session.uid
user_context = request.session.get_context() if session_uid else {}
IrConfigSudo = self.env['ir.config_parameter'].sudo()
max_file_upload_size = int(IrConfigSudo.get_param(
'web.max_file_upload_size',
default=128 * 1024 * 1024, # 128MiB
))
mods = odoo.conf.server_wide_modules or []
if request.db:
mods = list(request.registry._init_modules) + mods
lang = user_context.get("lang")
translation_hash = request.env['ir.translation'].sudo().get_web_translations_hash(mods, lang)
session_info = {
"uid": session_uid,
"is_system": user._is_system() if session_uid else False,
"is_admin": user._is_admin() if session_uid else False,
"user_context": user_context,
"db": request.session.db,
"server_version": version_info.get('server_version'),
"server_version_info": version_info.get('server_version_info'),
"support_url": "https://www.odoo.com/buy",
"name": user.name,
"username": user.login,
"partner_display_name": user.partner_id.display_name,
"company_id": user.company_id.id if session_uid else None, # YTI TODO: Remove this from the user context
"partner_id": user.partner_id.id if session_uid and user.partner_id else None,
"web.base.url": IrConfigSudo.get_param('web.base.url', default=''),
"active_ids_limit": int(IrConfigSudo.get_param('web.active_ids_limit', default='20000')),
'profile_session': request.session.profile_session,
'profile_collectors': request.session.profile_collectors,
'profile_params': request.session.profile_params,
"max_file_upload_size": max_file_upload_size,
"home_action_id": user.action_id.id,
"cache_hashes": {
"translations": translation_hash,
},
"currencies": self.sudo().get_currencies(),
}
if self.env.user.has_group('base.group_user'):
# the following is only useful in the context of a webclient bootstrapping
# but is still included in some other calls (e.g. '/web/session/authenticate')
# to avoid access errors and unnecessary information, it is only included for users
# with access to the backend ('internal'-type users)
qweb_checksum = HomeStaticTemplateHelpers.get_qweb_templates_checksum(debug=request.session.debug, bundle="web.assets_qweb")
menus = request.env['ir.ui.menu'].load_menus(request.session.debug)
ordered_menus = {str(k): v for k, v in menus.items()}
menu_json_utf8 = json.dumps(ordered_menus, default=ustr, sort_keys=True).encode()
session_info['cache_hashes'].update({
"load_menus": hashlib.sha512(menu_json_utf8).hexdigest()[:64], # sha512/256
"qweb": qweb_checksum,
})
session_info.update({
# current_company should be default_company
"user_companies": {
'current_company': user.company_id.id,
'allowed_companies': {
comp.id: {
'id': comp.id,
'name': comp.name,
'sequence': comp.sequence,
} for comp in user.company_ids
},
},
"show_effect": True,
"display_switch_company_menu": user.has_group('base.group_multi_company') and len(user.company_ids) > 1,
})
return session_info
@api.model
def get_frontend_session_info(self):
session_info = {
'is_admin': request.session.uid and self.env.user._is_admin() or False,
'is_system': request.session.uid and self.env.user._is_system() or False,
'is_website_user': request.session.uid and self.env.user._is_public() or False,
'user_id': request.session.uid and self.env.user.id or False,
'is_frontend': True,
'profile_session': request.session.profile_session,
'profile_collectors': request.session.profile_collectors,
'profile_params': request.session.profile_params,
'show_effect': request.env['ir.config_parameter'].sudo().get_param('base_setup.show_effect'),
}
if request.session.uid:
version_info = odoo.service.common.exp_version()
session_info.update({
'server_version': version_info.get('server_version'),
'server_version_info': version_info.get('server_version_info')
})
return session_info
def get_currencies(self):
Currency = request.env['res.currency']
currencies = Currency.search([]).read(['symbol', 'position', 'decimal_places'])
return {c['id']: {'symbol': c['symbol'], 'position': c['position'], 'digits': [69,c['decimal_places']]} for c in currencies}
@api.model
def _get_content_common(self, xmlid=None, model='ir.attachment', res_id=None, field='datas',
unique=None, filename=None, filename_field='name', download=None, mimetype=None,
access_token=None, token=None):
status, headers, content = self.binary_content(
xmlid=xmlid, model=model, id=res_id, field=field, unique=unique, filename=filename,
filename_field=filename_field, download=download, mimetype=mimetype, access_token=access_token
)
if status != 200:
return self._response_by_status(status, headers, content)
else:
content_base64 = base64.b64decode(content)
headers.append(('Content-Length', len(content_base64)))
response = request.make_response(content_base64, headers)
return response
@api.model
def _content_image(self, xmlid=None, model='ir.attachment', res_id=None, field='datas',
filename_field='name', unique=None, filename=None, mimetype=None, download=None,
width=0, height=0, crop=False, quality=0, access_token=None, **kwargs):
status, headers, image_base64 = self.binary_content(
xmlid=xmlid, model=model, id=res_id, field=field, unique=unique, filename=filename,
filename_field=filename_field, download=download, mimetype=mimetype,
default_mimetype='image/png', access_token=access_token
)
return self._content_image_get_response(
status, headers, image_base64, model=model, field=field, download=download,
width=width, height=height, crop=crop, quality=quality)
@api.model
def _content_image_get_response(self, status, headers, image_base64, model='ir.attachment',
field='datas', download=None, width=0, height=0, crop=False, quality=0):
if status in [301, 304] or (status != 200 and download):
return self._response_by_status(status, headers, image_base64)
if not image_base64:
placeholder_filename = False
if model in self.env:
placeholder_filename = self.env[model]._get_placeholder_filename(field)
placeholder_content = self._placeholder(image=placeholder_filename)
# Since we set a placeholder for any missing image, the status must be 200. In case one
# wants to configure a specific 404 page (e.g. though nginx), a 404 status will cause
# troubles.
status = 200
image_base64 = base64.b64encode(placeholder_content)
if not (width or height):
width, height = odoo.tools.image_guess_size_from_field_name(field)
try:
image_base64 = image_process(image_base64, size=(int(width), int(height)), crop=crop, quality=int(quality))
except Exception:
return request.not_found()
content = base64.b64decode(image_base64)
headers = http.set_safe_image_headers(headers, content)
response = request.make_response(content, headers)
response.status_code = status
return response
@api.model
def _placeholder_image_get_response(self, placeholder_base64):
content = base64.b64decode(placeholder_base64)
headers = http.set_safe_image_headers([], content)
response = request.make_response(content, headers)
response.status_code = 200
return response
@api.model
def _placeholder(self, image=False):
if not image:
image = 'web/static/img/placeholder.png'
with file_open(image, 'rb', filter_ext=('.png', '.jpg')) as fd:
return fd.read()
| 49.692708 | 9,541 |
35,081 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
import babel.dates
import pytz
from lxml import etree
import base64
import json
from odoo import _, _lt, api, fields, models
from odoo.osv.expression import AND, TRUE_DOMAIN, normalize_domain
from odoo.tools import date_utils, lazy
from odoo.tools.misc import get_lang
from odoo.exceptions import UserError
from collections import defaultdict
SEARCH_PANEL_ERROR_MESSAGE = _lt("Too many items to display.")
def is_true_domain(domain):
return normalize_domain(domain) == TRUE_DOMAIN
class lazymapping(defaultdict):
def __missing__(self, key):
value = self.default_factory(key)
self[key] = value
return value
DISPLAY_DATE_FORMATS = {
'day': 'dd MMM yyyy',
'week': "'W'w YYYY",
'month': 'MMMM yyyy',
'quarter': 'QQQ yyyy',
'year': 'yyyy',
}
class IrActionsActWindowView(models.Model):
_inherit = 'ir.actions.act_window.view'
view_mode = fields.Selection(selection_add=[
('qweb', 'QWeb')
], ondelete={'qweb': 'cascade'})
class Base(models.AbstractModel):
_inherit = 'base'
@api.model
def web_search_read(self, domain=None, fields=None, offset=0, limit=None, order=None):
"""
Performs a search_read and a search_count.
:param domain: search domain
:param fields: list of fields to read
:param limit: maximum number of records to read
:param offset: number of records to skip
:param order: columns to sort results
:return: {
'records': array of read records (result of a call to 'search_read')
'length': number of records matching the domain (result of a call to 'search_count')
}
"""
records = self.search_read(domain, fields, offset=offset, limit=limit, order=order)
if not records:
return {
'length': 0,
'records': []
}
if limit and (len(records) == limit or self.env.context.get('force_search_count')):
length = self.search_count(domain)
else:
length = len(records) + offset
return {
'length': length,
'records': records
}
@api.model
def web_read_group(self, domain, fields, groupby, limit=None, offset=0, orderby=False,
lazy=True, expand=False, expand_limit=None, expand_orderby=False):
"""
Returns the result of a read_group (and optionally search for and read records inside each
group), and the total number of groups matching the search domain.
:param domain: search domain
:param fields: list of fields to read (see ``fields``` param of ``read_group``)
:param groupby: list of fields to group on (see ``groupby``` param of ``read_group``)
:param limit: see ``limit`` param of ``read_group``
:param offset: see ``offset`` param of ``read_group``
:param orderby: see ``orderby`` param of ``read_group``
:param lazy: see ``lazy`` param of ``read_group``
:param expand: if true, and groupby only contains one field, read records inside each group
:param expand_limit: maximum number of records to read in each group
:param expand_orderby: order to apply when reading records in each group
:return: {
'groups': array of read groups
'length': total number of groups
}
"""
groups = self._web_read_group(domain, fields, groupby, limit, offset, orderby, lazy, expand,
expand_limit, expand_orderby)
if not groups:
length = 0
elif limit and len(groups) == limit:
# We need to fetch all groups to know the total number
# this cannot be done all at once to avoid MemoryError
length = limit
chunk_size = 100000
while True:
more = len(self.read_group(domain, ['display_name'], groupby, offset=length, limit=chunk_size, lazy=True))
length += more
if more < chunk_size:
break
else:
length = len(groups) + offset
return {
'groups': groups,
'length': length
}
@api.model
def _web_read_group(self, domain, fields, groupby, limit=None, offset=0, orderby=False,
lazy=True, expand=False, expand_limit=None, expand_orderby=False):
"""
Performs a read_group and optionally a web_search_read for each group.
See ``web_read_group`` for params description.
:returns: array of groups
"""
groups = self.read_group(domain, fields, groupby, offset=offset, limit=limit,
orderby=orderby, lazy=lazy)
if expand and len(groupby) == 1:
for group in groups:
group['__data'] = self.web_search_read(domain=group['__domain'], fields=fields,
offset=0, limit=expand_limit,
order=expand_orderby)
return groups
@api.model
def read_progress_bar(self, domain, group_by, progress_bar):
"""
Gets the data needed for all the kanban column progressbars.
These are fetched alongside read_group operation.
:param domain - the domain used in the kanban view to filter records
:param group_by - the name of the field used to group records into
kanban columns
:param progress_bar - the <progressbar/> declaration attributes
(field, colors, sum)
:return a dictionnary mapping group_by values to dictionnaries mapping
progress bar field values to the related number of records
"""
group_by_fname = group_by.partition(':')[0]
field_type = self._fields[group_by_fname].type
if field_type == 'selection':
selection_labels = dict(self.fields_get()[group_by]['selection'])
def adapt(value):
if field_type == 'selection':
value = selection_labels.get(value, False)
if isinstance(value, tuple):
value = value[1] # FIXME should use technical value (0)
return value
result = {}
for group in self._read_progress_bar(domain, group_by, progress_bar):
group_by_value = str(adapt(group[group_by]))
field_value = group[progress_bar['field']]
if group_by_value not in result:
result[group_by_value] = dict.fromkeys(progress_bar['colors'], 0)
if field_value in result[group_by_value]:
result[group_by_value][field_value] += group['__count']
return result
def _read_progress_bar(self, domain, group_by, progress_bar):
""" Implementation of read_progress_bar() that returns results in the
format of read_group().
"""
try:
fname = progress_bar['field']
return self.read_group(domain, [fname], [group_by, fname], lazy=False)
except UserError:
# possibly failed because of grouping on or aggregating non-stored
# field; fallback on alternative implementation
pass
# Workaround to match read_group's infrastructure
# TO DO in master: harmonize this function and readgroup to allow factorization
group_by_name = group_by.partition(':')[0]
group_by_modifier = group_by.partition(':')[2] or 'month'
records_values = self.search_read(domain or [], [progress_bar['field'], group_by_name])
field_type = self._fields[group_by_name].type
for record_values in records_values:
group_by_value = record_values.pop(group_by_name)
# Again, imitating what _read_group_format_result and _read_group_prepare_data do
if group_by_value and field_type in ['date', 'datetime']:
locale = get_lang(self.env).code
group_by_value = date_utils.start_of(fields.Datetime.to_datetime(group_by_value), group_by_modifier)
group_by_value = pytz.timezone('UTC').localize(group_by_value)
tz_info = None
if field_type == 'datetime' and self._context.get('tz') in pytz.all_timezones:
tz_info = self._context.get('tz')
group_by_value = babel.dates.format_datetime(
group_by_value, format=DISPLAY_DATE_FORMATS[group_by_modifier],
tzinfo=tz_info, locale=locale)
else:
group_by_value = babel.dates.format_date(
group_by_value, format=DISPLAY_DATE_FORMATS[group_by_modifier],
locale=locale)
if field_type == 'many2many' and isinstance(group_by_value, list):
group_by_value = str(tuple(group_by_value)) or False
record_values[group_by] = group_by_value
record_values['__count'] = 1
return records_values
##### qweb view hooks #####
@api.model
def qweb_render_view(self, view_id, domain):
assert view_id
return self.env['ir.qweb']._render(
view_id, {
**self.env['ir.ui.view']._prepare_qcontext(),
**self._qweb_prepare_qcontext(view_id, domain),
})
def _qweb_prepare_qcontext(self, view_id, domain):
"""
Base qcontext for rendering qweb views bound to this model
"""
return {
'model': self,
'domain': domain,
# not necessarily necessary as env is already part of the
# non-minimal qcontext
'context': self.env.context,
'records': lazy(self.search, domain),
}
@api.model
def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False):
r = super().fields_view_get(view_id, view_type, toolbar, submenu)
# avoid leaking the raw (un-rendered) template, also avoids bloating
# the response payload for no reason. Only send the root node,
# to send attributes such as `js_class`.
if r['type'] == 'qweb':
root = etree.fromstring(r['arch'])
r['arch'] = etree.tostring(etree.Element('qweb', root.attrib))
return r
@api.model
def _search_panel_field_image(self, field_name, **kwargs):
"""
Return the values in the image of the provided domain by field_name.
:param model_domain: domain whose image is returned
:param extra_domain: extra domain to use when counting records associated with field values
:param field_name: the name of a field (type many2one or selection)
:param enable_counters: whether to set the key '__count' in image values
:param only_counters: whether to retrieve information on the model_domain image or only
counts based on model_domain and extra_domain. In the later case,
the counts are set whatever is enable_counters.
:param limit: integer, maximal number of values to fetch
:param set_limit: boolean, whether to use the provided limit (if any)
:return: a dict of the form
{
id: { 'id': id, 'display_name': display_name, ('__count': c,) },
...
}
"""
enable_counters = kwargs.get('enable_counters')
only_counters = kwargs.get('only_counters')
extra_domain = kwargs.get('extra_domain', [])
no_extra = is_true_domain(extra_domain)
model_domain = kwargs.get('model_domain', [])
count_domain = AND([model_domain, extra_domain])
limit = kwargs.get('limit')
set_limit = kwargs.get('set_limit')
if only_counters:
return self._search_panel_domain_image(field_name, count_domain, True)
model_domain_image = self._search_panel_domain_image(field_name, model_domain,
enable_counters and no_extra,
set_limit and limit,
)
if enable_counters and not no_extra:
count_domain_image = self._search_panel_domain_image(field_name, count_domain, True)
for id, values in model_domain_image.items():
element = count_domain_image.get(id)
values['__count'] = element['__count'] if element else 0
return model_domain_image
@api.model
def _search_panel_domain_image(self, field_name, domain, set_count=False, limit=False):
"""
Return the values in the image of the provided domain by field_name.
:param domain: domain whose image is returned
:param field_name: the name of a field (type many2one or selection)
:param set_count: whether to set the key '__count' in image values. Default is False.
:param limit: integer, maximal number of values to fetch. Default is False.
:return: a dict of the form
{
id: { 'id': id, 'display_name': display_name, ('__count': c,) },
...
}
"""
field = self._fields[field_name]
if field.type == 'many2one':
def group_id_name(value):
return value
else:
# field type is selection: see doc above
desc = self.fields_get([field_name])[field_name]
field_name_selection = dict(desc['selection'])
def group_id_name(value):
return value, field_name_selection[value]
domain = AND([
domain,
[(field_name, '!=', False)],
])
groups = self.read_group(domain, [field_name], [field_name], limit=limit)
domain_image = {}
for group in groups:
id, display_name = group_id_name(group[field_name])
values = {
'id': id,
'display_name': display_name,
}
if set_count:
values['__count'] = group[field_name + '_count']
domain_image[id] = values
return domain_image
@api.model
def _search_panel_global_counters(self, values_range, parent_name):
"""
Modify in place values_range to transform the (local) counts
into global counts (local count + children local counts)
in case a parent field parent_name has been set on the range values.
Note that we save the initial (local) counts into an auxiliary dict
before they could be changed in the for loop below.
:param values_range: dict of the form
{
id: { 'id': id, '__count': c, parent_name: parent_id, ... }
...
}
:param parent_name: string, indicates which key determines the parent
"""
local_counters = lazymapping(lambda id: values_range[id]['__count'])
for id in values_range:
values = values_range[id]
# here count is the initial value = local count set on values
count = local_counters[id]
if count:
parent_id = values[parent_name]
while parent_id:
values = values_range[parent_id]
local_counters[parent_id]
values['__count'] += count
parent_id = values[parent_name]
@api.model
def _search_panel_sanitized_parent_hierarchy(self, records, parent_name, ids):
"""
Filter the provided list of records to ensure the following properties of
the resulting sublist:
1) it is closed for the parent relation
2) every record in it is an ancestor of a record with id in ids
(if ids = records.ids, that condition is automatically satisfied)
3) it is maximal among other sublists with properties 1 and 2.
:param records, the list of records to filter, the records must have the form
{ 'id': id, parent_name: False or (id, display_name),... }
:param parent_name, string, indicates which key determines the parent
:param ids: list of record ids
:return: the sublist of records with the above properties
}
"""
def get_parent_id(record):
value = record[parent_name]
return value and value[0]
allowed_records = { record['id']: record for record in records }
records_to_keep = {}
for id in ids:
record_id = id
ancestor_chain = {}
chain_is_fully_included = True
while chain_is_fully_included and record_id:
known_status = records_to_keep.get(record_id)
if known_status != None:
# the record and its known ancestors have already been considered
chain_is_fully_included = known_status
break
record = allowed_records.get(record_id)
if record:
ancestor_chain[record_id] = record
record_id = get_parent_id(record)
else:
chain_is_fully_included = False
for id, record in ancestor_chain.items():
records_to_keep[id] = chain_is_fully_included
# we keep initial order
return [rec for rec in records if records_to_keep.get(rec['id'])]
@api.model
def _search_panel_selection_range(self, field_name, **kwargs):
"""
Return the values of a field of type selection possibly enriched
with counts of associated records in domain.
:param enable_counters: whether to set the key '__count' on values returned.
Default is False.
:param expand: whether to return the full range of values for the selection
field or only the field image values. Default is False.
:param field_name: the name of a field of type selection
:param model_domain: domain used to determine the field image values and counts.
Default is [].
:return: a list of dicts of the form
{ 'id': id, 'display_name': display_name, ('__count': c,) }
with key '__count' set if enable_counters is True
"""
enable_counters = kwargs.get('enable_counters')
expand = kwargs.get('expand')
if enable_counters or not expand:
domain_image = self._search_panel_field_image(field_name, only_counters=expand, **kwargs)
if not expand:
return list(domain_image.values())
selection = self.fields_get([field_name])[field_name]['selection']
selection_range = []
for value, label in selection:
values = {
'id': value,
'display_name': label,
}
if enable_counters:
image_element = domain_image.get(value)
values['__count'] = image_element['__count'] if image_element else 0
selection_range.append(values)
return selection_range
@api.model
def search_panel_select_range(self, field_name, **kwargs):
"""
Return possible values of the field field_name (case select="one"),
possibly with counters, and the parent field (if any and required)
used to hierarchize them.
:param field_name: the name of a field;
of type many2one or selection.
:param category_domain: domain generated by categories. Default is [].
:param comodel_domain: domain of field values (if relational). Default is [].
:param enable_counters: whether to count records by value. Default is False.
:param expand: whether to return the full range of field values in comodel_domain
or only the field image values (possibly filtered and/or completed
with parents if hierarchize is set). Default is False.
:param filter_domain: domain generated by filters. Default is [].
:param hierarchize: determines if the categories must be displayed hierarchically
(if possible). If set to true and _parent_name is set on the
comodel field, the information necessary for the hierarchization will
be returned. Default is True.
:param limit: integer, maximal number of values to fetch. Default is None.
:param search_domain: base domain of search. Default is [].
with parents if hierarchize is set)
:return: {
'parent_field': parent field on the comodel of field, or False
'values': array of dictionaries containing some info on the records
available on the comodel of the field 'field_name'.
The display name, the __count (how many records with that value)
and possibly parent_field are fetched.
}
or an object with an error message when limit is defined and is reached.
"""
field = self._fields[field_name]
supported_types = ['many2one', 'selection']
if field.type not in supported_types:
types = dict(self.env["ir.model.fields"]._fields["ttype"]._description_selection(self.env))
raise UserError(_(
'Only types %(supported_types)s are supported for category (found type %(field_type)s)',
supported_types=", ".join(types[t] for t in supported_types),
field_type=types[field.type],
))
model_domain = kwargs.get('search_domain', [])
extra_domain = AND([
kwargs.get('category_domain', []),
kwargs.get('filter_domain', []),
])
if field.type == 'selection':
return {
'parent_field': False,
'values': self._search_panel_selection_range(field_name, model_domain=model_domain,
extra_domain=extra_domain, **kwargs
),
}
Comodel = self.env[field.comodel_name].with_context(hierarchical_naming=False)
field_names = ['display_name']
hierarchize = kwargs.get('hierarchize', True)
parent_name = False
if hierarchize and Comodel._parent_name in Comodel._fields:
parent_name = Comodel._parent_name
field_names.append(parent_name)
def get_parent_id(record):
value = record[parent_name]
return value and value[0]
else:
hierarchize = False
comodel_domain = kwargs.get('comodel_domain', [])
enable_counters = kwargs.get('enable_counters')
expand = kwargs.get('expand')
limit = kwargs.get('limit')
if enable_counters or not expand:
domain_image = self._search_panel_field_image(field_name,
model_domain=model_domain, extra_domain=extra_domain,
only_counters=expand,
set_limit= limit and not (expand or hierarchize or comodel_domain), **kwargs
)
if not (expand or hierarchize or comodel_domain):
values = list(domain_image.values())
if limit and len(values) == limit:
return {'error_msg': str(SEARCH_PANEL_ERROR_MESSAGE)}
return {
'parent_field': parent_name,
'values': values,
}
if not expand:
image_element_ids = list(domain_image.keys())
if hierarchize:
condition = [('id', 'parent_of', image_element_ids)]
else:
condition = [('id', 'in', image_element_ids)]
comodel_domain = AND([comodel_domain, condition])
comodel_records = Comodel.search_read(comodel_domain, field_names, limit=limit)
if hierarchize:
ids = [rec['id'] for rec in comodel_records] if expand else image_element_ids
comodel_records = self._search_panel_sanitized_parent_hierarchy(comodel_records, parent_name, ids)
if limit and len(comodel_records) == limit:
return {'error_msg': str(SEARCH_PANEL_ERROR_MESSAGE)}
field_range = {}
for record in comodel_records:
record_id = record['id']
values = {
'id': record_id,
'display_name': record['display_name'],
}
if hierarchize:
values[parent_name] = get_parent_id(record)
if enable_counters:
image_element = domain_image.get(record_id)
values['__count'] = image_element['__count'] if image_element else 0
field_range[record_id] = values
if hierarchize and enable_counters:
self._search_panel_global_counters(field_range, parent_name)
return {
'parent_field': parent_name,
'values': list(field_range.values()),
}
@api.model
def search_panel_select_multi_range(self, field_name, **kwargs):
"""
Return possible values of the field field_name (case select="multi"),
possibly with counters and groups.
:param field_name: the name of a filter field;
possible types are many2one, many2many, selection.
:param category_domain: domain generated by categories. Default is [].
:param comodel_domain: domain of field values (if relational)
(this parameter is used in _search_panel_range). Default is [].
:param enable_counters: whether to count records by value. Default is False.
:param expand: whether to return the full range of field values in comodel_domain
or only the field image values. Default is False.
:param filter_domain: domain generated by filters. Default is [].
:param group_by: extra field to read on comodel, to group comodel records
:param group_domain: dict, one domain for each activated group
for the group_by (if any). Those domains are
used to fech accurate counters for values in each group.
Default is [] (many2one case) or None.
:param limit: integer, maximal number of values to fetch. Default is None.
:param search_domain: base domain of search. Default is [].
:return: {
'values': a list of possible values, each being a dict with keys
'id' (value),
'name' (value label),
'__count' (how many records with that value),
'group_id' (value of group), set if a group_by has been provided,
'group_name' (label of group), set if a group_by has been provided
}
or an object with an error message when limit is defined and reached.
"""
field = self._fields[field_name]
supported_types = ['many2one', 'many2many', 'selection']
if field.type not in supported_types:
raise UserError(_('Only types %(supported_types)s are supported for filter (found type %(field_type)s)',
supported_types=supported_types, field_type=field.type))
model_domain = kwargs.get('search_domain', [])
extra_domain = AND([
kwargs.get('category_domain', []),
kwargs.get('filter_domain', []),
])
if field.type == 'selection':
return {
'values': self._search_panel_selection_range(field_name, model_domain=model_domain,
extra_domain=extra_domain, **kwargs
)
}
Comodel = self.env.get(field.comodel_name).with_context(hierarchical_naming=False)
field_names = ['display_name']
group_by = kwargs.get('group_by')
limit = kwargs.get('limit')
if group_by:
group_by_field = Comodel._fields[group_by]
field_names.append(group_by)
if group_by_field.type == 'many2one':
def group_id_name(value):
return value or (False, _("Not Set"))
elif group_by_field.type == 'selection':
desc = Comodel.fields_get([group_by])[group_by]
group_by_selection = dict(desc['selection'])
group_by_selection[False] = _("Not Set")
def group_id_name(value):
return value, group_by_selection[value]
else:
def group_id_name(value):
return (value, value) if value else (False, _("Not Set"))
comodel_domain = kwargs.get('comodel_domain', [])
enable_counters = kwargs.get('enable_counters')
expand = kwargs.get('expand')
if field.type == 'many2many':
comodel_records = Comodel.search_read(comodel_domain, field_names, limit=limit)
if expand and limit and len(comodel_records) == limit:
return {'error_msg': str(SEARCH_PANEL_ERROR_MESSAGE)}
group_domain = kwargs.get('group_domain')
field_range = []
for record in comodel_records:
record_id = record['id']
values= {
'id': record_id,
'display_name': record['display_name'],
}
if group_by:
group_id, group_name = group_id_name(record[group_by])
values['group_id'] = group_id
values['group_name'] = group_name
if enable_counters or not expand:
search_domain = AND([
model_domain,
[(field_name, 'in', record_id)],
])
local_extra_domain = extra_domain
if group_by and group_domain:
local_extra_domain = AND([
local_extra_domain,
group_domain.get(json.dumps(group_id), []),
])
search_count_domain = AND([
search_domain,
local_extra_domain
])
if enable_counters:
count = self.search_count(search_count_domain)
if not expand:
if enable_counters and is_true_domain(local_extra_domain):
inImage = count
else:
inImage = self.search(search_domain, limit=1)
if expand or inImage:
if enable_counters:
values['__count'] = count
field_range.append(values)
if not expand and limit and len(field_range) == limit:
return {'error_msg': str(SEARCH_PANEL_ERROR_MESSAGE)}
return { 'values': field_range, }
if field.type == 'many2one':
if enable_counters or not expand:
extra_domain = AND([
extra_domain,
kwargs.get('group_domain', []),
])
domain_image = self._search_panel_field_image(field_name,
model_domain=model_domain, extra_domain=extra_domain,
only_counters=expand,
set_limit=limit and not (expand or group_by or comodel_domain), **kwargs
)
if not (expand or group_by or comodel_domain):
values = list(domain_image.values())
if limit and len(values) == limit:
return {'error_msg': str(SEARCH_PANEL_ERROR_MESSAGE)}
return {'values': values, }
if not expand:
image_element_ids = list(domain_image.keys())
comodel_domain = AND([
comodel_domain,
[('id', 'in', image_element_ids)],
])
comodel_records = Comodel.search_read(comodel_domain, field_names, limit=limit)
if limit and len(comodel_records) == limit:
return {'error_msg': str(SEARCH_PANEL_ERROR_MESSAGE)}
field_range = []
for record in comodel_records:
record_id = record['id']
values= {
'id': record_id,
'display_name': record['display_name'],
}
if group_by:
group_id, group_name = group_id_name(record[group_by])
values['group_id'] = group_id
values['group_name'] = group_name
if enable_counters:
image_element = domain_image.get(record_id)
values['__count'] = image_element['__count'] if image_element else 0
field_range.append(values)
return { 'values': field_range, }
class ResCompany(models.Model):
_inherit = 'res.company'
@api.model_create_multi
def create(self, vals_list):
companies = super().create(vals_list)
style_fields = {'external_report_layout_id', 'font', 'primary_color', 'secondary_color'}
if any(not style_fields.isdisjoint(values) for values in vals_list):
self._update_asset_style()
return companies
def write(self, values):
res = super().write(values)
style_fields = {'external_report_layout_id', 'font', 'primary_color', 'secondary_color'}
if not style_fields.isdisjoint(values):
self._update_asset_style()
return res
def _get_asset_style_b64(self):
template_style = self.env.ref('web.styles_company_report', raise_if_not_found=False)
if not template_style:
return b''
# One bundle for everyone, so this method
# necessarily updates the style for every company at once
company_ids = self.sudo().search([])
company_styles = template_style._render({
'company_ids': company_ids,
})
return base64.b64encode(company_styles.encode())
def _update_asset_style(self):
asset_attachment = self.env.ref('web.asset_styles_company_report', raise_if_not_found=False)
if not asset_attachment:
return
asset_attachment = asset_attachment.sudo()
b64_val = self._get_asset_style_b64()
if b64_val != asset_attachment.datas:
asset_attachment.write({'datas': b64_val})
| 42.729598 | 35,081 |
14,255 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
import markupsafe
from markupsafe import Markup
from odoo import api, fields, models, tools
from odoo.addons.base.models.ir_qweb_fields import nl2br
from odoo.modules import get_resource_path
try:
import sass as libsass
except ImportError:
# If the `sass` python library isn't found, we fallback on the
# `sassc` executable in the path.
libsass = None
try:
from PIL.Image import Resampling
except ImportError:
from PIL import Image as Resampling
DEFAULT_PRIMARY = '#000000'
DEFAULT_SECONDARY = '#000000'
class BaseDocumentLayout(models.TransientModel):
"""
Customise the company document layout and display a live preview
"""
_name = 'base.document.layout'
_description = 'Company Document Layout'
@api.model
def _default_report_footer(self):
company = self.env.company
footer_fields = [field for field in [company.phone, company.email, company.website, company.vat] if isinstance(field, str) and len(field) > 0]
return Markup(' ').join(footer_fields)
@api.model
def _default_company_details(self):
company = self.env.company
address_format, company_data = company.partner_id._prepare_display_address()
address_format = self._clean_address_format(address_format, company_data)
# company_name may *still* be missing from prepared address in case commercial_company_name is falsy
if 'company_name' not in address_format:
address_format = '%(company_name)s\n' + address_format
company_data['company_name'] = company_data['company_name'] or company.name
return Markup(nl2br(address_format)) % company_data
def _clean_address_format(self, address_format, company_data):
missing_company_data = [k for k, v in company_data.items() if not v]
for key in missing_company_data:
if key in address_format:
address_format = address_format.replace(f'%({key})s\n', '')
return address_format
company_id = fields.Many2one(
'res.company', default=lambda self: self.env.company, required=True)
logo = fields.Binary(related='company_id.logo', readonly=False)
preview_logo = fields.Binary(related='logo', string="Preview logo")
report_header = fields.Html(related='company_id.report_header', readonly=False)
report_footer = fields.Html(related='company_id.report_footer', readonly=False, default=_default_report_footer)
company_details = fields.Html(related='company_id.company_details', readonly=False, default=_default_company_details)
# The paper format changes won't be reflected in the preview.
paperformat_id = fields.Many2one(related='company_id.paperformat_id', readonly=False)
external_report_layout_id = fields.Many2one(related='company_id.external_report_layout_id', readonly=False)
font = fields.Selection(related='company_id.font', readonly=False)
primary_color = fields.Char(related='company_id.primary_color', readonly=False)
secondary_color = fields.Char(related='company_id.secondary_color', readonly=False)
custom_colors = fields.Boolean(compute="_compute_custom_colors", readonly=False)
logo_primary_color = fields.Char(compute="_compute_logo_colors")
logo_secondary_color = fields.Char(compute="_compute_logo_colors")
layout_background = fields.Selection(related='company_id.layout_background', readonly=False)
layout_background_image = fields.Binary(related='company_id.layout_background_image', readonly=False)
report_layout_id = fields.Many2one('report.layout')
# All the sanitization get disabled as we want true raw html to be passed to an iframe.
preview = fields.Html(compute='_compute_preview', sanitize=False)
# Those following fields are required as a company to create invoice report
partner_id = fields.Many2one(related='company_id.partner_id', readonly=True)
phone = fields.Char(related='company_id.phone', readonly=True)
email = fields.Char(related='company_id.email', readonly=True)
website = fields.Char(related='company_id.website', readonly=True)
vat = fields.Char(related='company_id.vat', readonly=True)
name = fields.Char(related='company_id.name', readonly=True)
country_id = fields.Many2one(related="company_id.country_id", readonly=True)
@api.depends('logo_primary_color', 'logo_secondary_color', 'primary_color', 'secondary_color',)
def _compute_custom_colors(self):
for wizard in self:
logo_primary = wizard.logo_primary_color or ''
logo_secondary = wizard.logo_secondary_color or ''
# Force lower case on color to ensure that FF01AA == ff01aa
wizard.custom_colors = (
wizard.logo and wizard.primary_color and wizard.secondary_color
and not(
wizard.primary_color.lower() == logo_primary.lower()
and wizard.secondary_color.lower() == logo_secondary.lower()
)
)
@api.depends('logo')
def _compute_logo_colors(self):
for wizard in self:
if wizard._context.get('bin_size'):
wizard_for_image = wizard.with_context(bin_size=False)
else:
wizard_for_image = wizard
wizard.logo_primary_color, wizard.logo_secondary_color = wizard.extract_image_primary_secondary_colors(wizard_for_image.logo)
@api.depends('report_layout_id', 'logo', 'font', 'primary_color', 'secondary_color', 'report_header', 'report_footer', 'layout_background', 'layout_background_image', 'company_details')
def _compute_preview(self):
""" compute a qweb based preview to display on the wizard """
styles = self._get_asset_style()
for wizard in self:
if wizard.report_layout_id:
# guarantees that bin_size is always set to False,
# so the logo always contains the bin data instead of the binary size
if wizard.env.context.get('bin_size'):
wizard_with_logo = wizard.with_context(bin_size=False)
else:
wizard_with_logo = wizard
preview_css = markupsafe.Markup(self._get_css_for_preview(styles, wizard_with_logo.id))
ir_ui_view = wizard_with_logo.env['ir.ui.view']
wizard.preview = ir_ui_view._render_template('web.report_invoice_wizard_preview', {'company': wizard_with_logo, 'preview_css': preview_css})
else:
wizard.preview = False
@api.onchange('company_id')
def _onchange_company_id(self):
for wizard in self:
wizard.logo = wizard.company_id.logo
wizard.report_header = wizard.company_id.report_header
# company_details and report_footer can store empty strings (set by the user) or false (meaning the user didn't set a value). Since both are falsy values, we use isinstance of string to differentiate them
wizard.report_footer = wizard.company_id.report_footer if isinstance(wizard.company_id.report_footer, str) else wizard.report_footer
wizard.company_details = wizard.company_id.company_details if isinstance(wizard.company_id.company_details, str) else wizard.company_details
wizard.paperformat_id = wizard.company_id.paperformat_id
wizard.external_report_layout_id = wizard.company_id.external_report_layout_id
wizard.font = wizard.company_id.font
wizard.primary_color = wizard.company_id.primary_color
wizard.secondary_color = wizard.company_id.secondary_color
wizard_layout = wizard.env["report.layout"].search([
('view_id.key', '=', wizard.company_id.external_report_layout_id.key)
])
wizard.report_layout_id = wizard_layout or wizard_layout.search([], limit=1)
if not wizard.primary_color:
wizard.primary_color = wizard.logo_primary_color or DEFAULT_PRIMARY
if not wizard.secondary_color:
wizard.secondary_color = wizard.logo_secondary_color or DEFAULT_SECONDARY
@api.onchange('custom_colors')
def _onchange_custom_colors(self):
for wizard in self:
if wizard.logo and not wizard.custom_colors:
wizard.primary_color = wizard.logo_primary_color or DEFAULT_PRIMARY
wizard.secondary_color = wizard.logo_secondary_color or DEFAULT_SECONDARY
@api.onchange('report_layout_id')
def _onchange_report_layout_id(self):
for wizard in self:
wizard.external_report_layout_id = wizard.report_layout_id.view_id
@api.onchange('logo')
def _onchange_logo(self):
for wizard in self:
# It is admitted that if the user puts the original image back, it won't change colors
company = wizard.company_id
# at that point wizard.logo has been assigned the value present in DB
if wizard.logo == company.logo and company.primary_color and company.secondary_color:
continue
if wizard.logo_primary_color:
wizard.primary_color = wizard.logo_primary_color
if wizard.logo_secondary_color:
wizard.secondary_color = wizard.logo_secondary_color
@api.model
def extract_image_primary_secondary_colors(self, logo, white_threshold=225, mitigate=175):
"""
Identifies dominant colors
First resizes the original image to improve performance, then discards
transparent colors and white-ish colors, then calls the averaging
method twice to evaluate both primary and secondary colors.
:param logo: logo to process
:param white_threshold: arbitrary value defining the maximum value a color can reach
:param mitigate: arbitrary value defining the maximum value a band can reach
:return colors: hex values of primary and secondary colors
"""
if not logo:
return False, False
# The "===" gives different base64 encoding a correct padding
logo += b'===' if type(logo) == bytes else '==='
try:
# Catches exceptions caused by logo not being an image
image = tools.image_fix_orientation(tools.base64_to_image(logo))
except Exception:
return False, False
base_w, base_h = image.size
w = int(50 * base_w / base_h)
h = 50
# Converts to RGBA (if already RGBA, this is a noop)
image_converted = image.convert('RGBA')
image_resized = image_converted.resize((w, h), resample=Resampling.NEAREST)
colors = []
for color in image_resized.getcolors(w * h):
if not(color[1][0] > white_threshold and
color[1][1] > white_threshold and
color[1][2] > white_threshold) and color[1][3] > 0:
colors.append(color)
if not colors: # May happen when the whole image is white
return False, False
primary, remaining = tools.average_dominant_color(colors, mitigate=mitigate)
secondary = tools.average_dominant_color(remaining, mitigate=mitigate)[0] if remaining else primary
# Lightness and saturation are calculated here.
# - If both colors have a similar lightness, the most colorful becomes primary
# - When the difference in lightness is too great, the brightest color becomes primary
l_primary = tools.get_lightness(primary)
l_secondary = tools.get_lightness(secondary)
if (l_primary < 0.2 and l_secondary < 0.2) or (l_primary >= 0.2 and l_secondary >= 0.2):
s_primary = tools.get_saturation(primary)
s_secondary = tools.get_saturation(secondary)
if s_primary < s_secondary:
primary, secondary = secondary, primary
elif l_secondary > l_primary:
primary, secondary = secondary, primary
return tools.rgb_to_hex(primary), tools.rgb_to_hex(secondary)
@api.model
def action_open_base_document_layout(self, action_ref=None):
if not action_ref:
action_ref = 'web.action_base_document_layout_configurator'
res = self.env["ir.actions.actions"]._for_xml_id(action_ref)
self.env[res["res_model"]].check_access_rights('write')
return res
def document_layout_save(self):
# meant to be overridden
return self.env.context.get('report_action') or {'type': 'ir.actions.act_window_close'}
def _get_asset_style(self):
"""
Compile the style template. It is a qweb template expecting company ids to generate all the code in one batch.
We give a useless company_ids arg, but provide the PREVIEW_ID arg that will prepare the template for
'_get_css_for_preview' processing later.
:return:
"""
template_style = self.env.ref('web.styles_company_report', raise_if_not_found=False)
if not template_style:
return b''
company_styles = template_style._render({
'company_ids': self,
})
return company_styles
@api.model
def _get_css_for_preview(self, scss, new_id):
"""
Compile the scss into css.
"""
css_code = self._compile_scss(scss)
return css_code
@api.model
def _compile_scss(self, scss_source):
"""
This code will compile valid scss into css.
Parameters are the same from odoo/addons/base/models/assetsbundle.py
Simply copied and adapted slightly
"""
# No scss ? still valid, returns empty css
if not scss_source.strip():
return ""
precision = 8
output_style = 'expanded'
bootstrap_path = get_resource_path('web', 'static', 'lib', 'bootstrap', 'scss')
try:
return libsass.compile(
string=scss_source,
include_paths=[
bootstrap_path,
],
output_style=output_style,
precision=precision,
)
except libsass.CompileError as e:
raise libsass.CompileError(e.args[0])
| 45.983871 | 14,255 |
2,380 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class IrUiMenu(models.Model):
_inherit = "ir.ui.menu"
def load_web_menus(self, debug):
""" Loads all menu items (all applications and their sub-menus) and
processes them to be used by the webclient. Mainly, it associates with
each application (top level menu) the action of its first child menu
that is associated with an action (recursively), i.e. with the action
to execute when the opening the app.
:return: the menus (including the images in Base64)
"""
menus = self.load_menus(debug)
web_menus = {}
for menu in menus.values():
if not menu['id']:
# special root menu case
web_menus['root'] = {
"id": 'root',
"name": menu['name'],
"children": menu['children'],
"appID": False,
"xmlid": "",
"actionID": False,
"actionModel": False,
"webIcon": None,
"webIconData": None,
"backgroundImage": menu.get('backgroundImage'),
}
else:
action = menu['action']
if menu['id'] == menu['app_id']:
# if it's an app take action of first (sub)child having one defined
child = menu
while child and not action:
action = child['action']
child = menus[child['children'][0]] if child['children'] else False
action_model, action_id = action.split(',') if action else (False, False)
action_id = int(action_id) if action_id else False
web_menus[menu['id']] = {
"id": menu['id'],
"name": menu['name'],
"children": menu['children'],
"appID": menu['app_id'],
"xmlid": menu['xmlid'],
"actionID": action_id,
"actionModel": action_model,
"webIcon": menu['web_icon'],
"webIconData": menu['web_icon_data'],
}
return web_menus
| 38.387097 | 2,380 |
4,369 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import hashlib
from collections import OrderedDict
from werkzeug.urls import url_quote
from markupsafe import Markup
from odoo import api, models
from odoo.tools import pycompat
from odoo.tools import html_escape as escape
class Image(models.AbstractModel):
"""
Widget options:
``class``
set as attribute on the generated <img> tag
"""
_name = 'ir.qweb.field.image'
_description = 'Qweb Field Image'
_inherit = 'ir.qweb.field.image'
def _get_src_urls(self, record, field_name, options):
"""Considering the rendering options, returns the src and data-zoom-image urls.
:return: src, src_zoom urls
:rtype: tuple
"""
max_size = None
if options.get('resize'):
max_size = options.get('resize')
else:
max_width, max_height = options.get('max_width', 0), options.get('max_height', 0)
if max_width or max_height:
max_size = '%sx%s' % (max_width, max_height)
sha = hashlib.sha512(str(getattr(record, '__last_update')).encode('utf-8')).hexdigest()[:7]
max_size = '' if max_size is None else '/%s' % max_size
if options.get('filename-field') and getattr(record, options['filename-field'], None):
filename = record[options['filename-field']]
elif options.get('filename'):
filename = options['filename']
else:
filename = record.display_name
filename = filename.replace('/', '-').replace('\\', '-').replace('..', '--')
src = '/web/image/%s/%s/%s%s/%s?unique=%s' % (record._name, record.id, options.get('preview_image', field_name), max_size, url_quote(filename), sha)
src_zoom = None
if options.get('zoom') and getattr(record, options['zoom'], None):
src_zoom = '/web/image/%s/%s/%s%s/%s?unique=%s' % (record._name, record.id, options['zoom'], max_size, url_quote(filename), sha)
elif options.get('zoom'):
src_zoom = options['zoom']
return src, src_zoom
@api.model
def record_to_html(self, record, field_name, options):
assert options['tagName'] != 'img',\
"Oddly enough, the root tag of an image field can not be img. " \
"That is because the image goes into the tag, or it gets the " \
"hose again."
if options.get('qweb_img_raw_data', False):
return super(Image, self).record_to_html(record, field_name, options)
aclasses = ['img', 'img-fluid'] if options.get('qweb_img_responsive', True) else ['img']
aclasses += options.get('class', '').split()
classes = ' '.join(map(escape, aclasses))
src, src_zoom = self._get_src_urls(record, field_name, options)
if options.get('alt-field') and getattr(record, options['alt-field'], None):
alt = escape(record[options['alt-field']])
elif options.get('alt'):
alt = options['alt']
else:
alt = escape(record.display_name)
itemprop = None
if options.get('itemprop'):
itemprop = options['itemprop']
atts = OrderedDict()
atts["src"] = src
atts["itemprop"] = itemprop
atts["class"] = classes
atts["style"] = options.get('style')
atts["alt"] = alt
atts["data-zoom"] = src_zoom and u'1' or None
atts["data-zoom-image"] = src_zoom
atts["data-no-post-process"] = options.get('data-no-post-process')
atts = self.env['ir.qweb']._post_processing_att('img', atts, options.get('template_options'))
img = ['<img']
for name, value in atts.items():
if value:
img.append(' ')
img.append(escape(pycompat.to_text(name)))
img.append('="')
img.append(escape(pycompat.to_text(value)))
img.append('"')
img.append('/>')
return Markup(''.join(img))
class ImageUrlConverter(models.AbstractModel):
_description = 'Qweb Field Image'
_inherit = 'ir.qweb.field.image_url'
def _get_src_urls(self, record, field_name, options):
image_url = record[options.get('preview_image', field_name)]
return image_url, options.get("zoom", None)
| 37.34188 | 4,369 |
7,829 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
#
# py.js documentation build configuration file, created by
# sphinx-quickstart on Sun Sep 9 19:36:23 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'py.js'
copyright = u'2012, Xavier Morel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.6'
# The full version, including alpha/beta/rc tags.
release = '0.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# Default sphinx domain
default_domain = 'js'
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# default code-block highlighting
highlight_language = 'javascript'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyjsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'pyjs.tex', u'py.js Documentation',
u'Xavier Morel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyjs', u'py.js Documentation',
[u'Xavier Morel'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pyjs', u'py.js Documentation',
u'Xavier Morel', 'pyjs', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| 31.696356 | 7,829 |
89,434 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import copy
import datetime
import functools
import hashlib
import io
import itertools
import json
import logging
import operator
import os
import re
import sys
import tempfile
import unicodedata
from collections import OrderedDict, defaultdict
import babel.messages.pofile
import werkzeug
import werkzeug.exceptions
import werkzeug.utils
import werkzeug.wrappers
import werkzeug.wsgi
from lxml import etree, html
from markupsafe import Markup
from werkzeug.urls import url_encode, url_decode, iri_to_uri
import odoo
import odoo.modules.registry
from odoo.api import call_kw
from odoo.addons.base.models.ir_qweb import render as qweb_render
from odoo.modules import get_resource_path, module
from odoo.tools import html_escape, pycompat, ustr, apply_inheritance_specs, lazy_property, osutil
from odoo.tools.mimetypes import guess_mimetype
from odoo.tools.translate import _
from odoo.tools.misc import str2bool, xlsxwriter, file_open, file_path
from odoo.tools.safe_eval import safe_eval, time
from odoo import http
from odoo.http import content_disposition, dispatch_rpc, request, serialize_exception as _serialize_exception
from odoo.exceptions import AccessError, UserError, AccessDenied
from odoo.models import check_method_name
from odoo.service import db, security
_logger = logging.getLogger(__name__)
CONTENT_MAXAGE = http.STATIC_CACHE_LONG # menus, translations, static qweb
DBNAME_PATTERN = '^[a-zA-Z0-9][a-zA-Z0-9_.-]+$'
COMMENT_PATTERN = r'Modified by [\s\w\-.]+ from [\s\w\-.]+'
def none_values_filtered(func):
@functools.wraps(func)
def wrap(iterable):
return func(v for v in iterable if v is not None)
return wrap
def allow_empty_iterable(func):
"""
Some functions do not accept empty iterables (e.g. max, min with no default value)
This returns the function `func` such that it returns None if the iterable
is empty instead of raising a ValueError.
"""
@functools.wraps(func)
def wrap(iterable):
iterator = iter(iterable)
try:
value = next(iterator)
return func(itertools.chain([value], iterator))
except StopIteration:
return None
return wrap
OPERATOR_MAPPING = {
'max': none_values_filtered(allow_empty_iterable(max)),
'min': none_values_filtered(allow_empty_iterable(min)),
'sum': sum,
'bool_and': all,
'bool_or': any,
}
#----------------------------------------------------------
# Odoo Web helpers
#----------------------------------------------------------
db_list = http.db_list
db_monodb = http.db_monodb
def clean(name): return name.replace('\x3c', '')
def serialize_exception(f):
@functools.wraps(f)
def wrap(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
_logger.exception("An exception occurred during an http request")
se = _serialize_exception(e)
error = {
'code': 200,
'message': "Odoo Server Error",
'data': se
}
return werkzeug.exceptions.InternalServerError(json.dumps(error))
return wrap
def abort_and_redirect(url):
response = request.redirect(url, 302)
response = http.root.get_response(request.httprequest, response, explicit_session=False)
werkzeug.exceptions.abort(response)
def ensure_db(redirect='/web/database/selector'):
# This helper should be used in web client auth="none" routes
# if those routes needs a db to work with.
# If the heuristics does not find any database, then the users will be
# redirected to db selector or any url specified by `redirect` argument.
# If the db is taken out of a query parameter, it will be checked against
# `http.db_filter()` in order to ensure it's legit and thus avoid db
# forgering that could lead to xss attacks.
db = request.params.get('db') and request.params.get('db').strip()
# Ensure db is legit
if db and db not in http.db_filter([db]):
db = None
if db and not request.session.db:
# User asked a specific database on a new session.
# That mean the nodb router has been used to find the route
# Depending on installed module in the database, the rendering of the page
# may depend on data injected by the database route dispatcher.
# Thus, we redirect the user to the same page but with the session cookie set.
# This will force using the database route dispatcher...
r = request.httprequest
url_redirect = werkzeug.urls.url_parse(r.base_url)
if r.query_string:
# in P3, request.query_string is bytes, the rest is text, can't mix them
query_string = iri_to_uri(r.query_string)
url_redirect = url_redirect.replace(query=query_string)
request.session.db = db
abort_and_redirect(url_redirect.to_url())
# if db not provided, use the session one
if not db and request.session.db and http.db_filter([request.session.db]):
db = request.session.db
# if no database provided and no database in session, use monodb
if not db:
db = db_monodb(request.httprequest)
# if no db can be found til here, send to the database selector
# the database selector will redirect to database manager if needed
if not db:
werkzeug.exceptions.abort(request.redirect(redirect, 303))
# always switch the session to the computed db
if db != request.session.db:
request.session.logout()
abort_and_redirect(request.httprequest.url)
request.session.db = db
def fs2web(path):
"""convert FS path into web path"""
return '/'.join(path.split(os.path.sep))
def get_last_modified(files):
""" Returns the modification time of the most recently modified
file provided
:param list(str) files: names of files to check
:return: most recent modification time amongst the fileset
:rtype: datetime.datetime
"""
files = list(files)
if files:
return max(datetime.datetime.fromtimestamp(os.path.getmtime(f))
for f in files)
return datetime.datetime(1970, 1, 1)
def make_conditional(response, last_modified=None, etag=None, max_age=0):
""" Makes the provided response conditional based upon the request,
and mandates revalidation from clients
Uses Werkzeug's own :meth:`ETagResponseMixin.make_conditional`, after
setting ``last_modified`` and ``etag`` correctly on the response object
:param response: Werkzeug response
:type response: werkzeug.wrappers.Response
:param datetime.datetime last_modified: last modification date of the response content
:param str etag: some sort of checksum of the content (deep etag)
:return: the response object provided
:rtype: werkzeug.wrappers.Response
"""
response.cache_control.must_revalidate = True
response.cache_control.max_age = max_age
if last_modified:
response.last_modified = last_modified
if etag:
response.set_etag(etag)
return response.make_conditional(request.httprequest)
def _get_login_redirect_url(uid, redirect=None):
""" Decide if user requires a specific post-login redirect, e.g. for 2FA, or if they are
fully logged and can proceed to the requested URL
"""
if request.session.uid: # fully logged
return redirect or '/web'
# partial session (MFA)
url = request.env(user=uid)['res.users'].browse(uid)._mfa_url()
if not redirect:
return url
parsed = werkzeug.urls.url_parse(url)
qs = parsed.decode_query()
qs['redirect'] = redirect
return parsed.replace(query=werkzeug.urls.url_encode(qs)).to_url()
def login_and_redirect(db, login, key, redirect_url='/web'):
uid = request.session.authenticate(db, login, key)
redirect_url = _get_login_redirect_url(uid, redirect_url)
return set_cookie_and_redirect(redirect_url)
def set_cookie_and_redirect(redirect_url):
redirect = request.redirect(redirect_url, 303)
redirect.autocorrect_location_header = False
return redirect
def clean_action(action, env):
action_type = action.setdefault('type', 'ir.actions.act_window_close')
if action_type == 'ir.actions.act_window':
action = fix_view_modes(action)
# When returning an action, keep only relevant fields/properties
readable_fields = env[action['type']]._get_readable_fields()
action_type_fields = env[action['type']]._fields.keys()
cleaned_action = {
field: value
for field, value in action.items()
# keep allowed fields and custom properties fields
if field in readable_fields or field not in action_type_fields
}
# Warn about custom properties fields, because use is discouraged
action_name = action.get('name') or action
custom_properties = action.keys() - readable_fields - action_type_fields
if custom_properties:
_logger.warning("Action %r contains custom properties %s. Passing them "
"via the `params` or `context` properties is recommended instead",
action_name, ', '.join(map(repr, custom_properties)))
return cleaned_action
# I think generate_views,fix_view_modes should go into js ActionManager
def generate_views(action):
"""
While the server generates a sequence called "views" computing dependencies
between a bunch of stuff for views coming directly from the database
(the ``ir.actions.act_window model``), it's also possible for e.g. buttons
to return custom view dictionaries generated on the fly.
In that case, there is no ``views`` key available on the action.
Since the web client relies on ``action['views']``, generate it here from
``view_mode`` and ``view_id``.
Currently handles two different cases:
* no view_id, multiple view_mode
* single view_id, single view_mode
:param dict action: action descriptor dictionary to generate a views key for
"""
view_id = action.get('view_id') or False
if isinstance(view_id, (list, tuple)):
view_id = view_id[0]
# providing at least one view mode is a requirement, not an option
view_modes = action['view_mode'].split(',')
if len(view_modes) > 1:
if view_id:
raise ValueError('Non-db action dictionaries should provide '
'either multiple view modes or a single view '
'mode and an optional view id.\n\n Got view '
'modes %r and view id %r for action %r' % (
view_modes, view_id, action))
action['views'] = [(False, mode) for mode in view_modes]
return
action['views'] = [(view_id, view_modes[0])]
def fix_view_modes(action):
""" For historical reasons, Odoo has weird dealings in relation to
view_mode and the view_type attribute (on window actions):
* one of the view modes is ``tree``, which stands for both list views
and tree views
* the choice is made by checking ``view_type``, which is either
``form`` for a list view or ``tree`` for an actual tree view
This methods simply folds the view_type into view_mode by adding a
new view mode ``list`` which is the result of the ``tree`` view_mode
in conjunction with the ``form`` view_type.
TODO: this should go into the doc, some kind of "peculiarities" section
:param dict action: an action descriptor
:returns: nothing, the action is modified in place
"""
if not action.get('views'):
generate_views(action)
if action.pop('view_type', 'form') != 'form':
return action
if 'view_mode' in action:
action['view_mode'] = ','.join(
mode if mode != 'tree' else 'list'
for mode in action['view_mode'].split(','))
action['views'] = [
[id, mode if mode != 'tree' else 'list']
for id, mode in action['views']
]
return action
def _local_web_translations(trans_file):
messages = []
try:
with open(trans_file) as t_file:
po = babel.messages.pofile.read_po(t_file)
except Exception:
return
for x in po:
if x.id and x.string and "openerp-web" in x.auto_comments:
messages.append({'id': x.id, 'string': x.string})
return messages
def xml2json_from_elementtree(el, preserve_whitespaces=False):
""" xml2json-direct
Simple and straightforward XML-to-JSON converter in Python
New BSD Licensed
http://code.google.com/p/xml2json-direct/
"""
res = {}
if el.tag[0] == "{":
ns, name = el.tag.rsplit("}", 1)
res["tag"] = name
res["namespace"] = ns[1:]
else:
res["tag"] = el.tag
res["attrs"] = {}
for k, v in el.items():
res["attrs"][k] = v
kids = []
if el.text and (preserve_whitespaces or el.text.strip() != ''):
kids.append(el.text)
for kid in el:
kids.append(xml2json_from_elementtree(kid, preserve_whitespaces))
if kid.tail and (preserve_whitespaces or kid.tail.strip() != ''):
kids.append(kid.tail)
res["children"] = kids
return res
class HomeStaticTemplateHelpers(object):
"""
Helper Class that wraps the reading of static qweb templates files
and xpath inheritance applied to those templates
/!\ Template inheritance order is defined by ir.module.module natural order
which is "sequence, name"
Then a topological sort is applied, which just puts dependencies
of a module before that module
"""
NAME_TEMPLATE_DIRECTIVE = 't-name'
STATIC_INHERIT_DIRECTIVE = 't-inherit'
STATIC_INHERIT_MODE_DIRECTIVE = 't-inherit-mode'
PRIMARY_MODE = 'primary'
EXTENSION_MODE = 'extension'
DEFAULT_MODE = PRIMARY_MODE
def __init__(self, addons, db, checksum_only=False, debug=False):
'''
:param str|list addons: plain list or comma separated list of addons
:param str db: the current db we are working on
:param bool checksum_only: only computes the checksum of all files for addons
:param str debug: the debug mode of the session
'''
super(HomeStaticTemplateHelpers, self).__init__()
self.addons = addons.split(',') if isinstance(addons, str) else addons
self.db = db
self.debug = debug
self.checksum_only = checksum_only
self.template_dict = OrderedDict()
def _get_parent_template(self, addon, template):
"""Computes the real addon name and the template name
of the parent template (the one that is inherited from)
:param str addon: the addon the template is declared in
:param etree template: the current template we are are handling
:returns: (str, str)
"""
original_template_name = template.attrib[self.STATIC_INHERIT_DIRECTIVE]
split_name_attempt = original_template_name.split('.', 1)
parent_addon, parent_name = tuple(split_name_attempt) if len(split_name_attempt) == 2 else (addon, original_template_name)
if parent_addon not in self.template_dict:
if original_template_name in self.template_dict[addon]:
parent_addon = addon
parent_name = original_template_name
else:
raise ValueError(_('Module %s not loaded or inexistent, or templates of addon being loaded (%s) are misordered') % (parent_addon, addon))
if parent_name not in self.template_dict[parent_addon]:
raise ValueError(_("No template found to inherit from. Module %s and template name %s") % (parent_addon, parent_name))
return parent_addon, parent_name
def _compute_xml_tree(self, addon, file_name, source):
"""Computes the xml tree that 'source' contains
Applies inheritance specs in the process
:param str addon: the current addon we are reading files for
:param str file_name: the current name of the file we are reading
:param str source: the content of the file
:returns: etree
"""
try:
all_templates_tree = etree.parse(io.BytesIO(source), parser=etree.XMLParser(remove_comments=True)).getroot()
except etree.ParseError as e:
_logger.error("Could not parse file %s: %s" % (file_name, e.msg))
raise e
self.template_dict.setdefault(addon, OrderedDict())
for template_tree in list(all_templates_tree):
if self.NAME_TEMPLATE_DIRECTIVE in template_tree.attrib:
template_name = template_tree.attrib[self.NAME_TEMPLATE_DIRECTIVE]
dotted_names = template_name.split('.', 1)
if len(dotted_names) > 1 and dotted_names[0] == addon:
template_name = dotted_names[1]
else:
# self.template_dict[addon] grows after processing each template
template_name = 'anonymous_template_%s' % len(self.template_dict[addon])
if self.STATIC_INHERIT_DIRECTIVE in template_tree.attrib:
inherit_mode = template_tree.attrib.get(self.STATIC_INHERIT_MODE_DIRECTIVE, self.DEFAULT_MODE)
if inherit_mode not in [self.PRIMARY_MODE, self.EXTENSION_MODE]:
raise ValueError(_("Invalid inherit mode. Module %s and template name %s") % (addon, template_name))
parent_addon, parent_name = self._get_parent_template(addon, template_tree)
# After several performance tests, we found out that deepcopy is the most efficient
# solution in this case (compared with copy, xpath with '.' and stringifying).
parent_tree = copy.deepcopy(self.template_dict[parent_addon][parent_name])
xpaths = list(template_tree)
# owl chokes on comments, disable debug comments for now
# pylint: disable=W0125
if False: # self.debug and inherit_mode == self.EXTENSION_MODE:
for xpath in xpaths:
xpath.insert(0, etree.Comment(" Modified by %s from %s " % (template_name, addon)))
elif inherit_mode == self.PRIMARY_MODE:
parent_tree.tag = template_tree.tag
inherited_template = apply_inheritance_specs(parent_tree, xpaths)
if inherit_mode == self.PRIMARY_MODE: # New template_tree: A' = B(A)
for attr_name, attr_val in template_tree.attrib.items():
if attr_name not in ('t-inherit', 't-inherit-mode'):
inherited_template.set(attr_name, attr_val)
if self.debug:
self._remove_inheritance_comments(inherited_template)
self.template_dict[addon][template_name] = inherited_template
else: # Modifies original: A = B(A)
self.template_dict[parent_addon][parent_name] = inherited_template
else:
if template_name in self.template_dict[addon]:
raise ValueError(_("Template %s already exists in module %s") % (template_name, addon))
self.template_dict[addon][template_name] = template_tree
return all_templates_tree
def _remove_inheritance_comments(self, inherited_template):
'''Remove the comments added in the template already, they come from other templates extending
the base of this inheritance
:param inherited_template:
'''
for comment in inherited_template.xpath('//comment()'):
if re.match(COMMENT_PATTERN, comment.text.strip()):
comment.getparent().remove(comment)
def _read_addon_file(self, path_or_url):
"""Read the content of a file or an ``ir.attachment`` record given by
``path_or_url``.
:param str path_or_url:
:returns: bytes
:raises FileNotFoundError: if the path does not match a module file
or an attachment
"""
try:
with file_open(path_or_url, 'rb') as fp:
contents = fp.read()
except FileNotFoundError as e:
attachment = request.env['ir.attachment'].sudo().search([
('url', '=', path_or_url),
('type', '=', 'binary'),
], limit=1)
if attachment:
contents = attachment.raw
else:
raise e
return contents
def _concat_xml(self, file_dict):
"""Concatenate xml files
:param dict(list) file_dict:
key: addon name
value: list of files for an addon
:returns: (concatenation_result, checksum)
:rtype: (bytes, str)
"""
checksum = hashlib.new('sha512') # sha512/256
if not file_dict:
return b'', checksum.hexdigest()
root = None
for addon, fnames in file_dict.items():
for fname in fnames:
contents = self._read_addon_file(fname)
checksum.update(contents)
if not self.checksum_only:
xml = self._compute_xml_tree(addon, fname, contents)
if root is None:
root = etree.Element('templates')
for addon in self.template_dict.values():
for template in addon.values():
root.append(template)
return etree.tostring(root, encoding='utf-8') if root is not None else b'', checksum.hexdigest()[:64]
def _get_asset_paths(self, bundle):
"""Proxy for ir_asset._get_asset_paths
Useful to make 'self' testable.
"""
return request.env['ir.asset']._get_asset_paths(addons=self.addons, bundle=bundle, xml=True)
def _get_qweb_templates(self, bundle):
"""One and only entry point that gets and evaluates static qweb templates
:rtype: (str, str)
"""
xml_paths = defaultdict(list)
# group paths by module, keeping them in order
for path, addon, _ in self._get_asset_paths(bundle):
addon_paths = xml_paths[addon]
if path not in addon_paths:
addon_paths.append(path)
content, checksum = self._concat_xml(xml_paths)
return content, checksum
@classmethod
def get_qweb_templates_checksum(cls, addons=None, db=None, debug=False, bundle=None):
return cls(addons, db, checksum_only=True, debug=debug)._get_qweb_templates(bundle)[1]
@classmethod
def get_qweb_templates(cls, addons=None, db=None, debug=False, bundle=None):
return cls(addons, db, debug=debug)._get_qweb_templates(bundle)[0]
# Shared parameters for all login/signup flows
SIGN_UP_REQUEST_PARAMS = {'db', 'login', 'debug', 'token', 'message', 'error', 'scope', 'mode',
'redirect', 'redirect_hostname', 'email', 'name', 'partner_id',
'password', 'confirm_password', 'city', 'country_id', 'lang'}
class GroupsTreeNode:
"""
This class builds an ordered tree of groups from the result of a `read_group(lazy=False)`.
The `read_group` returns a list of dictionnaries and each dictionnary is used to
build a leaf. The entire tree is built by inserting all leaves.
"""
def __init__(self, model, fields, groupby, groupby_type, root=None):
self._model = model
self._export_field_names = fields # exported field names (e.g. 'journal_id', 'account_id/name', ...)
self._groupby = groupby
self._groupby_type = groupby_type
self.count = 0 # Total number of records in the subtree
self.children = OrderedDict()
self.data = [] # Only leaf nodes have data
if root:
self.insert_leaf(root)
def _get_aggregate(self, field_name, data, group_operator):
# When exporting one2many fields, multiple data lines might be exported for one record.
# Blank cells of additionnal lines are filled with an empty string. This could lead to '' being
# aggregated with an integer or float.
data = (value for value in data if value != '')
if group_operator == 'avg':
return self._get_avg_aggregate(field_name, data)
aggregate_func = OPERATOR_MAPPING.get(group_operator)
if not aggregate_func:
_logger.warning("Unsupported export of group_operator '%s' for field %s on model %s" % (group_operator, field_name, self._model._name))
return
if self.data:
return aggregate_func(data)
return aggregate_func((child.aggregated_values.get(field_name) for child in self.children.values()))
def _get_avg_aggregate(self, field_name, data):
aggregate_func = OPERATOR_MAPPING.get('sum')
if self.data:
return aggregate_func(data) / self.count
children_sums = (child.aggregated_values.get(field_name) * child.count for child in self.children.values())
return aggregate_func(children_sums) / self.count
def _get_aggregated_field_names(self):
""" Return field names of exported field having a group operator """
aggregated_field_names = []
for field_name in self._export_field_names:
if field_name == '.id':
field_name = 'id'
if '/' in field_name:
# Currently no support of aggregated value for nested record fields
# e.g. line_ids/analytic_line_ids/amount
continue
field = self._model._fields[field_name]
if field.group_operator:
aggregated_field_names.append(field_name)
return aggregated_field_names
# Lazy property to memoize aggregated values of children nodes to avoid useless recomputations
@lazy_property
def aggregated_values(self):
aggregated_values = {}
# Transpose the data matrix to group all values of each field in one iterable
field_values = zip(*self.data)
for field_name in self._export_field_names:
field_data = self.data and next(field_values) or []
if field_name in self._get_aggregated_field_names():
field = self._model._fields[field_name]
aggregated_values[field_name] = self._get_aggregate(field_name, field_data, field.group_operator)
return aggregated_values
def child(self, key):
"""
Return the child identified by `key`.
If it doesn't exists inserts a default node and returns it.
:param key: child key identifier (groupby value as returned by read_group,
usually (id, display_name))
:return: the child node
"""
if key not in self.children:
self.children[key] = GroupsTreeNode(self._model, self._export_field_names, self._groupby, self._groupby_type)
return self.children[key]
def insert_leaf(self, group):
"""
Build a leaf from `group` and insert it in the tree.
:param group: dict as returned by `read_group(lazy=False)`
"""
leaf_path = [group.get(groupby_field) for groupby_field in self._groupby]
domain = group.pop('__domain')
count = group.pop('__count')
records = self._model.search(domain, offset=0, limit=False, order=False)
# Follow the path from the top level group to the deepest
# group which actually contains the records' data.
node = self # root
node.count += count
for node_key in leaf_path:
# Go down to the next node or create one if it does not exist yet.
node = node.child(node_key)
# Update count value and aggregated value.
node.count += count
node.data = records.export_data(self._export_field_names).get('datas',[])
class ExportXlsxWriter:
def __init__(self, field_names, row_count=0):
self.field_names = field_names
self.output = io.BytesIO()
self.workbook = xlsxwriter.Workbook(self.output, {'in_memory': True})
self.base_style = self.workbook.add_format({'text_wrap': True})
self.header_style = self.workbook.add_format({'bold': True})
self.header_bold_style = self.workbook.add_format({'text_wrap': True, 'bold': True, 'bg_color': '#e9ecef'})
self.date_style = self.workbook.add_format({'text_wrap': True, 'num_format': 'yyyy-mm-dd'})
self.datetime_style = self.workbook.add_format({'text_wrap': True, 'num_format': 'yyyy-mm-dd hh:mm:ss'})
self.worksheet = self.workbook.add_worksheet()
self.value = False
self.float_format = '#,##0.00'
decimal_places = [res['decimal_places'] for res in request.env['res.currency'].search_read([], ['decimal_places'])]
self.monetary_format = f'#,##0.{max(decimal_places or [2]) * "0"}'
if row_count > self.worksheet.xls_rowmax:
raise UserError(_('There are too many rows (%s rows, limit: %s) to export as Excel 2007-2013 (.xlsx) format. Consider splitting the export.') % (row_count, self.worksheet.xls_rowmax))
def __enter__(self):
self.write_header()
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
self.close()
def write_header(self):
# Write main header
for i, fieldname in enumerate(self.field_names):
self.write(0, i, fieldname, self.header_style)
self.worksheet.set_column(0, i, 30) # around 220 pixels
def close(self):
self.workbook.close()
with self.output:
self.value = self.output.getvalue()
def write(self, row, column, cell_value, style=None):
self.worksheet.write(row, column, cell_value, style)
def write_cell(self, row, column, cell_value):
cell_style = self.base_style
if isinstance(cell_value, bytes):
try:
# because xlsx uses raw export, we can get a bytes object
# here. xlsxwriter does not support bytes values in Python 3 ->
# assume this is base64 and decode to a string, if this
# fails note that you can't export
cell_value = pycompat.to_text(cell_value)
except UnicodeDecodeError:
raise UserError(_("Binary fields can not be exported to Excel unless their content is base64-encoded. That does not seem to be the case for %s.", self.field_names)[column])
if isinstance(cell_value, str):
if len(cell_value) > self.worksheet.xls_strmax:
cell_value = _("The content of this cell is too long for an XLSX file (more than %s characters). Please use the CSV format for this export.", self.worksheet.xls_strmax)
else:
cell_value = cell_value.replace("\r", " ")
elif isinstance(cell_value, datetime.datetime):
cell_style = self.datetime_style
elif isinstance(cell_value, datetime.date):
cell_style = self.date_style
elif isinstance(cell_value, float):
cell_style.set_num_format(self.float_format)
self.write(row, column, cell_value, cell_style)
class GroupExportXlsxWriter(ExportXlsxWriter):
def __init__(self, fields, row_count=0):
super().__init__([f['label'].strip() for f in fields], row_count)
self.fields = fields
def write_group(self, row, column, group_name, group, group_depth=0):
group_name = group_name[1] if isinstance(group_name, tuple) and len(group_name) > 1 else group_name
if group._groupby_type[group_depth] != 'boolean':
group_name = group_name or _("Undefined")
row, column = self._write_group_header(row, column, group_name, group, group_depth)
# Recursively write sub-groups
for child_group_name, child_group in group.children.items():
row, column = self.write_group(row, column, child_group_name, child_group, group_depth + 1)
for record in group.data:
row, column = self._write_row(row, column, record)
return row, column
def _write_row(self, row, column, data):
for value in data:
self.write_cell(row, column, value)
column += 1
return row + 1, 0
def _write_group_header(self, row, column, label, group, group_depth=0):
aggregates = group.aggregated_values
label = '%s%s (%s)' % (' ' * group_depth, label, group.count)
self.write(row, column, label, self.header_bold_style)
for field in self.fields[1:]: # No aggregates allowed in the first column because of the group title
column += 1
aggregated_value = aggregates.get(field['name'])
if field.get('type') == 'monetary':
self.header_bold_style.set_num_format(self.monetary_format)
elif field.get('type') == 'float':
self.header_bold_style.set_num_format(self.float_format)
else:
aggregated_value = str(aggregated_value if aggregated_value is not None else '')
self.write(row, column, aggregated_value, self.header_bold_style)
return row + 1, 0
#----------------------------------------------------------
# Odoo Web web Controllers
#----------------------------------------------------------
class Home(http.Controller):
@http.route('/', type='http', auth="none")
def index(self, s_action=None, db=None, **kw):
return request.redirect_query('/web', query=request.params)
# ideally, this route should be `auth="user"` but that don't work in non-monodb mode.
@http.route('/web', type='http', auth="none")
def web_client(self, s_action=None, **kw):
ensure_db()
if not request.session.uid:
return request.redirect('/web/login', 303)
if kw.get('redirect'):
return request.redirect(kw.get('redirect'), 303)
request.uid = request.session.uid
try:
context = request.env['ir.http'].webclient_rendering_context()
response = request.render('web.webclient_bootstrap', qcontext=context)
response.headers['X-Frame-Options'] = 'DENY'
return response
except AccessError:
return request.redirect('/web/login?error=access')
@http.route('/web/webclient/load_menus/<string:unique>', type='http', auth='user', methods=['GET'])
def web_load_menus(self, unique):
"""
Loads the menus for the webclient
:param unique: this parameters is not used, but mandatory: it is used by the HTTP stack to make a unique request
:return: the menus (including the images in Base64)
"""
menus = request.env["ir.ui.menu"].load_web_menus(request.session.debug)
body = json.dumps(menus, default=ustr)
response = request.make_response(body, [
# this method must specify a content-type application/json instead of using the default text/html set because
# the type of the route is set to HTTP, but the rpc is made with a get and expects JSON
('Content-Type', 'application/json'),
('Cache-Control', 'public, max-age=' + str(CONTENT_MAXAGE)),
])
return response
def _login_redirect(self, uid, redirect=None):
return _get_login_redirect_url(uid, redirect)
@http.route('/web/login', type='http', auth="none")
def web_login(self, redirect=None, **kw):
ensure_db()
request.params['login_success'] = False
if request.httprequest.method == 'GET' and redirect and request.session.uid:
return request.redirect(redirect)
if not request.uid:
request.uid = odoo.SUPERUSER_ID
values = {k: v for k, v in request.params.items() if k in SIGN_UP_REQUEST_PARAMS}
try:
values['databases'] = http.db_list()
except odoo.exceptions.AccessDenied:
values['databases'] = None
if request.httprequest.method == 'POST':
old_uid = request.uid
try:
uid = request.session.authenticate(request.session.db, request.params['login'], request.params['password'])
request.params['login_success'] = True
return request.redirect(self._login_redirect(uid, redirect=redirect))
except odoo.exceptions.AccessDenied as e:
request.uid = old_uid
if e.args == odoo.exceptions.AccessDenied().args:
values['error'] = _("Wrong login/password")
else:
values['error'] = e.args[0]
else:
if 'error' in request.params and request.params.get('error') == 'access':
values['error'] = _('Only employees can access this database. Please contact the administrator.')
if 'login' not in values and request.session.get('auth_login'):
values['login'] = request.session.get('auth_login')
if not odoo.tools.config['list_db']:
values['disable_database_manager'] = True
response = request.render('web.login', values)
response.headers['X-Frame-Options'] = 'DENY'
return response
@http.route('/web/become', type='http', auth='user', sitemap=False)
def switch_to_admin(self):
uid = request.env.user.id
if request.env.user._is_system():
uid = request.session.uid = odoo.SUPERUSER_ID
# invalidate session token cache as we've changed the uid
request.env['res.users'].clear_caches()
request.session.session_token = security.compute_session_token(request.session, request.env)
return request.redirect(self._login_redirect(uid))
@http.route('/web/health', type='http', auth='none', save_session=False)
def health(self):
data = json.dumps({
'status': 'pass',
})
headers = [('Content-Type', 'application/json'),
('Cache-Control', 'no-store')]
return request.make_response(data, headers)
class WebClient(http.Controller):
@http.route('/web/webclient/locale/<string:lang>', type='http', auth="none")
def load_locale(self, lang):
magic_file_finding = [lang.replace("_", '-').lower(), lang.split('_')[0]]
for code in magic_file_finding:
try:
return http.Response(
werkzeug.wsgi.wrap_file(
request.httprequest.environ,
file_open('web/static/lib/moment/locale/%s.js' % code, 'rb')
),
content_type='application/javascript; charset=utf-8',
headers=[('Cache-Control', 'max-age=%s' % http.STATIC_CACHE)],
direct_passthrough=True,
)
except IOError:
_logger.debug("No moment locale for code %s", code)
return request.make_response("", headers=[
('Content-Type', 'application/javascript'),
('Cache-Control', 'max-age=%s' % http.STATIC_CACHE),
])
@http.route('/web/webclient/qweb/<string:unique>', type='http', auth="none", cors="*")
def qweb(self, unique, mods=None, db=None, bundle=None):
if not request.db and mods is None:
mods = odoo.conf.server_wide_modules or []
content = HomeStaticTemplateHelpers.get_qweb_templates(mods, db, debug=request.session.debug, bundle=bundle)
return request.make_response(content, [
('Content-Type', 'text/xml'),
('Cache-Control','public, max-age=' + str(CONTENT_MAXAGE))
])
@http.route('/web/webclient/bootstrap_translations', type='json', auth="none")
def bootstrap_translations(self, mods=None):
""" Load local translations from *.po files, as a temporary solution
until we have established a valid session. This is meant only
for translating the login page and db management chrome, using
the browser's language. """
# For performance reasons we only load a single translation, so for
# sub-languages (that should only be partially translated) we load the
# main language PO instead - that should be enough for the login screen.
context = dict(request.context)
request.session._fix_lang(context)
lang = context['lang'].split('_')[0]
if mods is None:
mods = odoo.conf.server_wide_modules or []
if request.db:
mods = request.env.registry._init_modules | set(mods)
translations_per_module = {}
for addon_name in mods:
manifest = http.addons_manifest.get(addon_name)
if manifest and manifest.get('bootstrap'):
addons_path = http.addons_manifest[addon_name]['addons_path']
f_name = os.path.join(addons_path, addon_name, "i18n", lang + ".po")
if not os.path.exists(f_name):
continue
translations_per_module[addon_name] = {'messages': _local_web_translations(f_name)}
return {"modules": translations_per_module,
"lang_parameters": None}
@http.route('/web/webclient/translations/<string:unique>', type='http', auth="public", cors="*")
def translations(self, unique, mods=None, lang=None):
"""
Load the translations for the specified language and modules
:param unique: this parameters is not used, but mandatory: it is used by the HTTP stack to make a unique request
:param mods: the modules, a comma separated list
:param lang: the language of the user
:return:
"""
if mods:
mods = mods.split(',')
elif mods is None:
mods = list(request.env.registry._init_modules) + (odoo.conf.server_wide_modules or [])
translations_per_module, lang_params = request.env["ir.translation"].get_translations_for_webclient(mods, lang)
body = json.dumps({
'lang': lang_params and lang_params["code"],
'lang_parameters': lang_params,
'modules': translations_per_module,
'multi_lang': len(request.env['res.lang'].sudo().get_installed()) > 1,
})
response = request.make_response(body, [
# this method must specify a content-type application/json instead of using the default text/html set because
# the type of the route is set to HTTP, but the rpc is made with a get and expects JSON
('Content-Type', 'application/json'),
('Cache-Control', 'public, max-age=' + str(CONTENT_MAXAGE)),
])
return response
@http.route('/web/webclient/version_info', type='json', auth="none")
def version_info(self):
return odoo.service.common.exp_version()
@http.route('/web/tests', type='http', auth="user")
def test_suite(self, mod=None, **kwargs):
return request.render('web.qunit_suite')
@http.route('/web/tests/mobile', type='http', auth="none")
def test_mobile_suite(self, mod=None, **kwargs):
return request.render('web.qunit_mobile_suite')
@http.route('/web/benchmarks', type='http', auth="none")
def benchmarks(self, mod=None, **kwargs):
return request.render('web.benchmark_suite')
class Database(http.Controller):
def _render_template(self, **d):
d.setdefault('manage',True)
d['insecure'] = odoo.tools.config.verify_admin_password('admin')
d['list_db'] = odoo.tools.config['list_db']
d['langs'] = odoo.service.db.exp_list_lang()
d['countries'] = odoo.service.db.exp_list_countries()
d['pattern'] = DBNAME_PATTERN
# databases list
d['databases'] = []
try:
d['databases'] = http.db_list()
d['incompatible_databases'] = odoo.service.db.list_db_incompatible(d['databases'])
except odoo.exceptions.AccessDenied:
monodb = db_monodb()
if monodb:
d['databases'] = [monodb]
templates = {}
with file_open("web/static/src/public/database_manager.qweb.html", "r") as fd:
template = fd.read()
with file_open("web/static/src/public/database_manager.master_input.qweb.html", "r") as fd:
templates['master_input'] = fd.read()
with file_open("web/static/src/public/database_manager.create_form.qweb.html", "r") as fd:
templates['create_form'] = fd.read()
def load(template_name, options):
return (html.fragment_fromstring(templates[template_name]), template_name)
return qweb_render(html.document_fromstring(template), d, load=load)
@http.route('/web/database/selector', type='http', auth="none")
def selector(self, **kw):
request._cr = None
return self._render_template(manage=False)
@http.route('/web/database/manager', type='http', auth="none")
def manager(self, **kw):
request._cr = None
return self._render_template()
@http.route('/web/database/create', type='http', auth="none", methods=['POST'], csrf=False)
def create(self, master_pwd, name, lang, password, **post):
insecure = odoo.tools.config.verify_admin_password('admin')
if insecure and master_pwd:
dispatch_rpc('db', 'change_admin_password', ["admin", master_pwd])
try:
if not re.match(DBNAME_PATTERN, name):
raise Exception(_('Invalid database name. Only alphanumerical characters, underscore, hyphen and dot are allowed.'))
# country code could be = "False" which is actually True in python
country_code = post.get('country_code') or False
dispatch_rpc('db', 'create_database', [master_pwd, name, bool(post.get('demo')), lang, password, post['login'], country_code, post['phone']])
request.session.authenticate(name, post['login'], password)
return request.redirect('/web')
except Exception as e:
error = "Database creation error: %s" % (str(e) or repr(e))
return self._render_template(error=error)
@http.route('/web/database/duplicate', type='http', auth="none", methods=['POST'], csrf=False)
def duplicate(self, master_pwd, name, new_name):
insecure = odoo.tools.config.verify_admin_password('admin')
if insecure and master_pwd:
dispatch_rpc('db', 'change_admin_password', ["admin", master_pwd])
try:
if not re.match(DBNAME_PATTERN, new_name):
raise Exception(_('Invalid database name. Only alphanumerical characters, underscore, hyphen and dot are allowed.'))
dispatch_rpc('db', 'duplicate_database', [master_pwd, name, new_name])
request._cr = None # duplicating a database leads to an unusable cursor
return request.redirect('/web/database/manager')
except Exception as e:
error = "Database duplication error: %s" % (str(e) or repr(e))
return self._render_template(error=error)
@http.route('/web/database/drop', type='http', auth="none", methods=['POST'], csrf=False)
def drop(self, master_pwd, name):
insecure = odoo.tools.config.verify_admin_password('admin')
if insecure and master_pwd:
dispatch_rpc('db', 'change_admin_password', ["admin", master_pwd])
try:
dispatch_rpc('db','drop', [master_pwd, name])
request._cr = None # dropping a database leads to an unusable cursor
return request.redirect('/web/database/manager')
except Exception as e:
error = "Database deletion error: %s" % (str(e) or repr(e))
return self._render_template(error=error)
@http.route('/web/database/backup', type='http', auth="none", methods=['POST'], csrf=False)
def backup(self, master_pwd, name, backup_format = 'zip'):
insecure = odoo.tools.config.verify_admin_password('admin')
if insecure and master_pwd:
dispatch_rpc('db', 'change_admin_password', ["admin", master_pwd])
try:
odoo.service.db.check_super(master_pwd)
ts = datetime.datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S")
filename = "%s_%s.%s" % (name, ts, backup_format)
headers = [
('Content-Type', 'application/octet-stream; charset=binary'),
('Content-Disposition', content_disposition(filename)),
]
dump_stream = odoo.service.db.dump_db(name, None, backup_format)
response = werkzeug.wrappers.Response(dump_stream, headers=headers, direct_passthrough=True)
return response
except Exception as e:
_logger.exception('Database.backup')
error = "Database backup error: %s" % (str(e) or repr(e))
return self._render_template(error=error)
@http.route('/web/database/restore', type='http', auth="none", methods=['POST'], csrf=False)
def restore(self, master_pwd, backup_file, name, copy=False):
insecure = odoo.tools.config.verify_admin_password('admin')
if insecure and master_pwd:
dispatch_rpc('db', 'change_admin_password', ["admin", master_pwd])
try:
data_file = None
db.check_super(master_pwd)
with tempfile.NamedTemporaryFile(delete=False) as data_file:
backup_file.save(data_file)
db.restore_db(name, data_file.name, str2bool(copy))
return request.redirect('/web/database/manager')
except Exception as e:
error = "Database restore error: %s" % (str(e) or repr(e))
return self._render_template(error=error)
finally:
if data_file:
os.unlink(data_file.name)
@http.route('/web/database/change_password', type='http', auth="none", methods=['POST'], csrf=False)
def change_password(self, master_pwd, master_pwd_new):
try:
dispatch_rpc('db', 'change_admin_password', [master_pwd, master_pwd_new])
return request.redirect('/web/database/manager')
except Exception as e:
error = "Master password update error: %s" % (str(e) or repr(e))
return self._render_template(error=error)
@http.route('/web/database/list', type='json', auth='none')
def list(self):
"""
Used by Mobile application for listing database
:return: List of databases
:rtype: list
"""
return http.db_list()
class Session(http.Controller):
@http.route('/web/session/get_session_info', type='json', auth="none")
def get_session_info(self):
request.session.check_security()
request.uid = request.session.uid
return request.env['ir.http'].session_info()
@http.route('/web/session/authenticate', type='json', auth="none")
def authenticate(self, db, login, password, base_location=None):
request.session.authenticate(db, login, password)
return request.env['ir.http'].session_info()
@http.route('/web/session/change_password', type='json', auth="user")
def change_password(self, fields):
old_password, new_password,confirm_password = operator.itemgetter('old_pwd', 'new_password','confirm_pwd')(
{f['name']: f['value'] for f in fields})
if not (old_password.strip() and new_password.strip() and confirm_password.strip()):
return {'error': _('You cannot leave any password empty.')}
if new_password != confirm_password:
return {'error': _('The new password and its confirmation must be identical.')}
msg = _("Error, password not changed !")
try:
if request.env['res.users'].change_password(old_password, new_password):
return {'new_password': new_password}
except AccessDenied as e:
msg = e.args[0]
if msg == AccessDenied().args[0]:
msg = _('The old password you provided is incorrect, your password was not changed.')
except UserError as e:
msg = e.args[0]
return {'error': msg}
@http.route('/web/session/get_lang_list', type='json', auth="none")
def get_lang_list(self):
try:
return dispatch_rpc('db', 'list_lang', []) or []
except Exception as e:
return {"error": e, "title": _("Languages")}
@http.route('/web/session/modules', type='json', auth="user")
def modules(self):
# return all installed modules. Web client is smart enough to not load a module twice
return list(request.env.registry._init_modules | set([module.current_test] if module.current_test else []))
@http.route('/web/session/save_session_action', type='json', auth="user")
def save_session_action(self, the_action):
"""
This method store an action object in the session object and returns an integer
identifying that action. The method get_session_action() can be used to get
back the action.
:param the_action: The action to save in the session.
:type the_action: anything
:return: A key identifying the saved action.
:rtype: integer
"""
return request.session.save_action(the_action)
@http.route('/web/session/get_session_action', type='json', auth="user")
def get_session_action(self, key):
"""
Gets back a previously saved action. This method can return None if the action
was saved since too much time (this case should be handled in a smart way).
:param key: The key given by save_session_action()
:type key: integer
:return: The saved action or None.
:rtype: anything
"""
return request.session.get_action(key)
@http.route('/web/session/check', type='json', auth="user")
def check(self):
request.session.check_security()
return None
@http.route('/web/session/account', type='json', auth="user")
def account(self):
ICP = request.env['ir.config_parameter'].sudo()
params = {
'response_type': 'token',
'client_id': ICP.get_param('database.uuid') or '',
'state': json.dumps({'d': request.db, 'u': ICP.get_param('web.base.url')}),
'scope': 'userinfo',
}
return 'https://accounts.odoo.com/oauth2/auth?' + url_encode(params)
@http.route('/web/session/destroy', type='json', auth="user")
def destroy(self):
request.session.logout()
@http.route('/web/session/logout', type='http', auth="none")
def logout(self, redirect='/web'):
request.session.logout(keep_db=True)
return request.redirect(redirect, 303)
class DataSet(http.Controller):
@http.route('/web/dataset/search_read', type='json', auth="user")
def search_read(self, model, fields=False, offset=0, limit=False, domain=None, sort=None):
return self.do_search_read(model, fields, offset, limit, domain, sort)
def do_search_read(self, model, fields=False, offset=0, limit=False, domain=None, sort=None):
""" Performs a search() followed by a read() (if needed) using the
provided search criteria
:param str model: the name of the model to search on
:param fields: a list of the fields to return in the result records
:type fields: [str]
:param int offset: from which index should the results start being returned
:param int limit: the maximum number of records to return
:param list domain: the search domain for the query
:param list sort: sorting directives
:returns: A structure (dict) with two keys: ids (all the ids matching
the (domain, context) pair) and records (paginated records
matching fields selection set)
:rtype: list
"""
Model = request.env[model]
return Model.web_search_read(domain, fields, offset=offset, limit=limit, order=sort)
@http.route('/web/dataset/load', type='json', auth="user")
def load(self, model, id, fields):
value = {}
r = request.env[model].browse([id]).read()
if r:
value = r[0]
return {'value': value}
def call_common(self, model, method, args, domain_id=None, context_id=None):
return self._call_kw(model, method, args, {})
def _call_kw(self, model, method, args, kwargs):
check_method_name(method)
return call_kw(request.env[model], method, args, kwargs)
@http.route('/web/dataset/call', type='json', auth="user")
def call(self, model, method, args, domain_id=None, context_id=None):
return self._call_kw(model, method, args, {})
@http.route(['/web/dataset/call_kw', '/web/dataset/call_kw/<path:path>'], type='json', auth="user")
def call_kw(self, model, method, args, kwargs, path=None):
return self._call_kw(model, method, args, kwargs)
@http.route('/web/dataset/call_button', type='json', auth="user")
def call_button(self, model, method, args, kwargs):
action = self._call_kw(model, method, args, kwargs)
if isinstance(action, dict) and action.get('type') != '':
return clean_action(action, env=request.env)
return False
@http.route('/web/dataset/resequence', type='json', auth="user")
def resequence(self, model, ids, field='sequence', offset=0):
""" Re-sequences a number of records in the model, by their ids
The re-sequencing starts at the first model of ``ids``, the sequence
number is incremented by one after each record and starts at ``offset``
:param ids: identifiers of the records to resequence, in the new sequence order
:type ids: list(id)
:param str field: field used for sequence specification, defaults to
"sequence"
:param int offset: sequence number for first record in ``ids``, allows
starting the resequencing from an arbitrary number,
defaults to ``0``
"""
m = request.env[model]
if not m.fields_get([field]):
return False
# python 2.6 has no start parameter
for i, record in enumerate(m.browse(ids)):
record.write({field: i + offset})
return True
class View(http.Controller):
@http.route('/web/view/edit_custom', type='json', auth="user")
def edit_custom(self, custom_id, arch):
"""
Edit a custom view
:param int custom_id: the id of the edited custom view
:param str arch: the edited arch of the custom view
:returns: dict with acknowledged operation (result set to True)
"""
custom_view = request.env['ir.ui.view.custom'].browse(custom_id)
custom_view.write({ 'arch': arch })
return {'result': True}
class Binary(http.Controller):
@http.route(['/web/content',
'/web/content/<string:xmlid>',
'/web/content/<string:xmlid>/<string:filename>',
'/web/content/<int:id>',
'/web/content/<int:id>/<string:filename>',
'/web/content/<string:model>/<int:id>/<string:field>',
'/web/content/<string:model>/<int:id>/<string:field>/<string:filename>'], type='http', auth="public")
def content_common(self, xmlid=None, model='ir.attachment', id=None, field='datas',
filename=None, filename_field='name', unique=None, mimetype=None,
download=None, data=None, token=None, access_token=None, **kw):
return request.env['ir.http']._get_content_common(xmlid=xmlid, model=model, res_id=id, field=field, unique=unique, filename=filename,
filename_field=filename_field, download=download, mimetype=mimetype, access_token=access_token, token=token)
@http.route(['/web/assets/debug/<string:filename>',
'/web/assets/debug/<path:extra>/<string:filename>',
'/web/assets/<int:id>/<string:filename>',
'/web/assets/<int:id>-<string:unique>/<string:filename>',
'/web/assets/<int:id>-<string:unique>/<path:extra>/<string:filename>'], type='http', auth="public")
def content_assets(self, id=None, filename=None, unique=None, extra=None, **kw):
domain = [('url', '!=', False)]
if extra:
domain += [('url', '=like', f'/web/assets/%/{extra}/{filename}')]
else:
domain += [
('url', '=like', f'/web/assets/%/{filename}'),
('url', 'not like', f'/web/assets/%/%/{filename}')
]
id = id or request.env['ir.attachment'].sudo().search(domain, limit=1).id
return request.env['ir.http']._get_content_common(xmlid=None, model='ir.attachment', res_id=id, field='datas', unique=unique, filename=filename,
filename_field='name', download=None, mimetype=None, access_token=None, token=None)
@http.route(['/web/image',
'/web/image/<string:xmlid>',
'/web/image/<string:xmlid>/<string:filename>',
'/web/image/<string:xmlid>/<int:width>x<int:height>',
'/web/image/<string:xmlid>/<int:width>x<int:height>/<string:filename>',
'/web/image/<string:model>/<int:id>/<string:field>',
'/web/image/<string:model>/<int:id>/<string:field>/<string:filename>',
'/web/image/<string:model>/<int:id>/<string:field>/<int:width>x<int:height>',
'/web/image/<string:model>/<int:id>/<string:field>/<int:width>x<int:height>/<string:filename>',
'/web/image/<int:id>',
'/web/image/<int:id>/<string:filename>',
'/web/image/<int:id>/<int:width>x<int:height>',
'/web/image/<int:id>/<int:width>x<int:height>/<string:filename>',
'/web/image/<int:id>-<string:unique>',
'/web/image/<int:id>-<string:unique>/<string:filename>',
'/web/image/<int:id>-<string:unique>/<int:width>x<int:height>',
'/web/image/<int:id>-<string:unique>/<int:width>x<int:height>/<string:filename>'], type='http', auth="public")
def content_image(self, xmlid=None, model='ir.attachment', id=None, field='datas',
filename_field='name', unique=None, filename=None, mimetype=None,
download=None, width=0, height=0, crop=False, access_token=None,
**kwargs):
# other kwargs are ignored on purpose
return request.env['ir.http']._content_image(xmlid=xmlid, model=model, res_id=id, field=field,
filename_field=filename_field, unique=unique, filename=filename, mimetype=mimetype,
download=download, width=width, height=height, crop=crop,
quality=int(kwargs.get('quality', 0)), access_token=access_token)
# backward compatibility
@http.route(['/web/binary/image'], type='http', auth="public")
def content_image_backward_compatibility(self, model, id, field, resize=None, **kw):
width = None
height = None
if resize:
width, height = resize.split(",")
return request.env['ir.http']._content_image(model=model, res_id=id, field=field, width=width, height=height)
@http.route('/web/binary/upload', type='http', auth="user")
@serialize_exception
def upload(self, ufile, callback=None):
# TODO: might be useful to have a configuration flag for max-length file uploads
out = """<script language="javascript" type="text/javascript">
var win = window.top.window;
win.jQuery(win).trigger(%s, %s);
</script>"""
try:
data = ufile.read()
args = [len(data), ufile.filename,
ufile.content_type, pycompat.to_text(base64.b64encode(data))]
except Exception as e:
args = [False, str(e)]
return out % (json.dumps(clean(callback)), json.dumps(args)) if callback else json.dumps(args)
@http.route('/web/binary/upload_attachment', type='http', auth="user")
@serialize_exception
def upload_attachment(self, model, id, ufile, callback=None):
files = request.httprequest.files.getlist('ufile')
Model = request.env['ir.attachment']
out = """<script language="javascript" type="text/javascript">
var win = window.top.window;
win.jQuery(win).trigger(%s, %s);
</script>"""
args = []
for ufile in files:
filename = ufile.filename
if request.httprequest.user_agent.browser == 'safari':
# Safari sends NFD UTF-8 (where é is composed by 'e' and [accent])
# we need to send it the same stuff, otherwise it'll fail
filename = unicodedata.normalize('NFD', ufile.filename)
try:
attachment = Model.create({
'name': filename,
'datas': base64.encodebytes(ufile.read()),
'res_model': model,
'res_id': int(id)
})
attachment._post_add_create()
except AccessError:
args.append({'error': _("You are not allowed to upload an attachment here.")})
except Exception:
args.append({'error': _("Something horrible happened")})
_logger.exception("Fail to upload attachment %s" % ufile.filename)
else:
args.append({
'filename': clean(filename),
'mimetype': ufile.content_type,
'id': attachment.id,
'size': attachment.file_size
})
return out % (json.dumps(clean(callback)), json.dumps(args)) if callback else json.dumps(args)
@http.route([
'/web/binary/company_logo',
'/logo',
'/logo.png',
], type='http', auth="none", cors="*")
def company_logo(self, dbname=None, **kw):
imgname = 'logo'
imgext = '.png'
placeholder = functools.partial(get_resource_path, 'web', 'static', 'img')
uid = None
if request.session.db:
dbname = request.session.db
uid = request.session.uid
elif dbname is None:
dbname = db_monodb()
if not uid:
uid = odoo.SUPERUSER_ID
if not dbname:
response = http.send_file(placeholder(imgname + imgext))
else:
try:
# create an empty registry
registry = odoo.modules.registry.Registry(dbname)
with registry.cursor() as cr:
company = int(kw['company']) if kw and kw.get('company') else False
if company:
cr.execute("""SELECT logo_web, write_date
FROM res_company
WHERE id = %s
""", (company,))
else:
cr.execute("""SELECT c.logo_web, c.write_date
FROM res_users u
LEFT JOIN res_company c
ON c.id = u.company_id
WHERE u.id = %s
""", (uid,))
row = cr.fetchone()
if row and row[0]:
image_base64 = base64.b64decode(row[0])
image_data = io.BytesIO(image_base64)
mimetype = guess_mimetype(image_base64, default='image/png')
imgext = '.' + mimetype.split('/')[1]
if imgext == '.svg+xml':
imgext = '.svg'
response = http.send_file(image_data, filename=imgname + imgext, mimetype=mimetype, mtime=row[1])
else:
response = http.send_file(placeholder('nologo.png'))
except Exception:
response = http.send_file(placeholder(imgname + imgext))
return response
@http.route(['/web/sign/get_fonts','/web/sign/get_fonts/<string:fontname>'], type='json', auth='public')
def get_fonts(self, fontname=None):
"""This route will return a list of base64 encoded fonts.
Those fonts will be proposed to the user when creating a signature
using mode 'auto'.
:return: base64 encoded fonts
:rtype: list
"""
supported_exts = ('.ttf', '.otf', '.woff', '.woff2')
fonts = []
fonts_directory = file_path(os.path.join('web', 'static', 'fonts', 'sign'))
if fontname:
font_path = os.path.join(fonts_directory, fontname)
with file_open(font_path, 'rb', filter_ext=supported_exts) as font_file:
font = base64.b64encode(font_file.read())
fonts.append(font)
else:
font_filenames = sorted([fn for fn in os.listdir(fonts_directory) if fn.endswith(supported_exts)])
for filename in font_filenames:
font_file = file_open(os.path.join(fonts_directory, filename), 'rb', filter_ext=supported_exts)
font = base64.b64encode(font_file.read())
fonts.append(font)
return fonts
class Action(http.Controller):
@http.route('/web/action/load', type='json', auth="user")
def load(self, action_id, additional_context=None):
Actions = request.env['ir.actions.actions']
value = False
try:
action_id = int(action_id)
except ValueError:
try:
action = request.env.ref(action_id)
assert action._name.startswith('ir.actions.')
action_id = action.id
except Exception:
action_id = 0 # force failed read
base_action = Actions.browse([action_id]).sudo().read(['type'])
if base_action:
ctx = dict(request.context)
action_type = base_action[0]['type']
if action_type == 'ir.actions.report':
ctx.update({'bin_size': True})
if additional_context:
ctx.update(additional_context)
request.context = ctx
action = request.env[action_type].sudo().browse([action_id]).read()
if action:
value = clean_action(action[0], env=request.env)
return value
@http.route('/web/action/run', type='json', auth="user")
def run(self, action_id):
action = request.env['ir.actions.server'].browse([action_id])
result = action.run()
return clean_action(result, env=action.env) if result else False
class Export(http.Controller):
@http.route('/web/export/formats', type='json', auth="user")
def formats(self):
""" Returns all valid export formats
:returns: for each export format, a pair of identifier and printable name
:rtype: [(str, str)]
"""
return [
{'tag': 'xlsx', 'label': 'XLSX', 'error': None if xlsxwriter else "XlsxWriter 0.9.3 required"},
{'tag': 'csv', 'label': 'CSV'},
]
def fields_get(self, model):
Model = request.env[model]
fields = Model.fields_get()
return fields
@http.route('/web/export/get_fields', type='json', auth="user")
def get_fields(self, model, prefix='', parent_name= '',
import_compat=True, parent_field_type=None,
parent_field=None, exclude=None):
fields = self.fields_get(model)
if import_compat:
if parent_field_type in ['many2one', 'many2many']:
rec_name = request.env[model]._rec_name_fallback()
fields = {'id': fields['id'], rec_name: fields[rec_name]}
else:
fields['.id'] = {**fields['id']}
fields['id']['string'] = _('External ID')
if parent_field:
parent_field['string'] = _('External ID')
fields['id'] = parent_field
fields_sequence = sorted(fields.items(),
key=lambda field: odoo.tools.ustr(field[1].get('string', '').lower()))
records = []
for field_name, field in fields_sequence:
if import_compat and not field_name == 'id':
if exclude and field_name in exclude:
continue
if field.get('readonly'):
# If none of the field's states unsets readonly, skip the field
if all(dict(attrs).get('readonly', True)
for attrs in field.get('states', {}).values()):
continue
if not field.get('exportable', True):
continue
id = prefix + (prefix and '/'or '') + field_name
val = id
if field_name == 'name' and import_compat and parent_field_type in ['many2one', 'many2many']:
# Add name field when expand m2o and m2m fields in import-compatible mode
val = prefix
name = parent_name + (parent_name and '/' or '') + field['string']
record = {'id': id, 'string': name,
'value': val, 'children': False,
'field_type': field.get('type'),
'required': field.get('required'),
'relation_field': field.get('relation_field')}
records.append(record)
if len(id.split('/')) < 3 and 'relation' in field:
ref = field.pop('relation')
record['value'] += '/id'
record['params'] = {'model': ref, 'prefix': id, 'name': name, 'parent_field': field}
record['children'] = True
return records
@http.route('/web/export/namelist', type='json', auth="user")
def namelist(self, model, export_id):
# TODO: namelist really has no reason to be in Python (although itertools.groupby helps)
export = request.env['ir.exports'].browse([export_id]).read()[0]
export_fields_list = request.env['ir.exports.line'].browse(export['export_fields']).read()
fields_data = self.fields_info(
model, [f['name'] for f in export_fields_list])
return [
{'name': field['name'], 'label': fields_data[field['name']]}
for field in export_fields_list
]
def fields_info(self, model, export_fields):
info = {}
fields = self.fields_get(model)
if ".id" in export_fields:
fields['.id'] = fields.get('id', {'string': 'ID'})
# To make fields retrieval more efficient, fetch all sub-fields of a
# given field at the same time. Because the order in the export list is
# arbitrary, this requires ordering all sub-fields of a given field
# together so they can be fetched at the same time
#
# Works the following way:
# * sort the list of fields to export, the default sorting order will
# put the field itself (if present, for xmlid) and all of its
# sub-fields right after it
# * then, group on: the first field of the path (which is the same for
# a field and for its subfields and the length of splitting on the
# first '/', which basically means grouping the field on one side and
# all of the subfields on the other. This way, we have the field (for
# the xmlid) with length 1, and all of the subfields with the same
# base but a length "flag" of 2
# * if we have a normal field (length 1), just add it to the info
# mapping (with its string) as-is
# * otherwise, recursively call fields_info via graft_subfields.
# all graft_subfields does is take the result of fields_info (on the
# field's model) and prepend the current base (current field), which
# rebuilds the whole sub-tree for the field
#
# result: because we're not fetching the fields_get for half the
# database models, fetching a namelist with a dozen fields (including
# relational data) falls from ~6s to ~300ms (on the leads model).
# export lists with no sub-fields (e.g. import_compatible lists with
# no o2m) are even more efficient (from the same 6s to ~170ms, as
# there's a single fields_get to execute)
for (base, length), subfields in itertools.groupby(
sorted(export_fields),
lambda field: (field.split('/', 1)[0], len(field.split('/', 1)))):
subfields = list(subfields)
if length == 2:
# subfields is a seq of $base/*rest, and not loaded yet
info.update(self.graft_subfields(
fields[base]['relation'], base, fields[base]['string'],
subfields
))
elif base in fields:
info[base] = fields[base]['string']
return info
def graft_subfields(self, model, prefix, prefix_string, fields):
export_fields = [field.split('/', 1)[1] for field in fields]
return (
(prefix + '/' + k, prefix_string + '/' + v)
for k, v in self.fields_info(model, export_fields).items())
class ExportFormat(object):
@property
def content_type(self):
""" Provides the format's content type """
raise NotImplementedError()
@property
def extension(self):
raise NotImplementedError()
def filename(self, base):
""" Creates a filename *without extension* for the item / format of
model ``base``.
"""
if base not in request.env:
return base
model_description = request.env['ir.model']._get(base).name
return f"{model_description} ({base})"
def from_data(self, fields, rows):
""" Conversion method from Odoo's export data to whatever the
current export class outputs
:params list fields: a list of fields to export
:params list rows: a list of records to export
:returns:
:rtype: bytes
"""
raise NotImplementedError()
def from_group_data(self, fields, groups):
raise NotImplementedError()
def base(self, data):
params = json.loads(data)
model, fields, ids, domain, import_compat = \
operator.itemgetter('model', 'fields', 'ids', 'domain', 'import_compat')(params)
Model = request.env[model].with_context(import_compat=import_compat, **params.get('context', {}))
if not Model._is_an_ordinary_table():
fields = [field for field in fields if field['name'] != 'id']
field_names = [f['name'] for f in fields]
if import_compat:
columns_headers = field_names
else:
columns_headers = [val['label'].strip() for val in fields]
groupby = params.get('groupby')
if not import_compat and groupby:
groupby_type = [Model._fields[x.split(':')[0]].type for x in groupby]
domain = [('id', 'in', ids)] if ids else domain
groups_data = Model.read_group(domain, [x if x != '.id' else 'id' for x in field_names], groupby, lazy=False)
# read_group(lazy=False) returns a dict only for final groups (with actual data),
# not for intermediary groups. The full group tree must be re-constructed.
tree = GroupsTreeNode(Model, field_names, groupby, groupby_type)
for leaf in groups_data:
tree.insert_leaf(leaf)
response_data = self.from_group_data(fields, tree)
else:
records = Model.browse(ids) if ids else Model.search(domain, offset=0, limit=False, order=False)
export_data = records.export_data(field_names).get('datas',[])
response_data = self.from_data(columns_headers, export_data)
# TODO: call `clean_filename` directly in `content_disposition`?
return request.make_response(response_data,
headers=[('Content-Disposition',
content_disposition(
osutil.clean_filename(self.filename(model) + self.extension))),
('Content-Type', self.content_type)],
)
class CSVExport(ExportFormat, http.Controller):
@http.route('/web/export/csv', type='http', auth="user")
@serialize_exception
def index(self, data):
return self.base(data)
@property
def content_type(self):
return 'text/csv;charset=utf8'
@property
def extension(self):
return '.csv'
def from_group_data(self, fields, groups):
raise UserError(_("Exporting grouped data to csv is not supported."))
def from_data(self, fields, rows):
fp = io.BytesIO()
writer = pycompat.csv_writer(fp, quoting=1)
writer.writerow(fields)
for data in rows:
row = []
for d in data:
# Spreadsheet apps tend to detect formulas on leading =, + and -
if isinstance(d, str) and d.startswith(('=', '-', '+')):
d = "'" + d
row.append(pycompat.to_text(d))
writer.writerow(row)
return fp.getvalue()
class ExcelExport(ExportFormat, http.Controller):
@http.route('/web/export/xlsx', type='http', auth="user")
@serialize_exception
def index(self, data):
return self.base(data)
@property
def content_type(self):
return 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
@property
def extension(self):
return '.xlsx'
def from_group_data(self, fields, groups):
with GroupExportXlsxWriter(fields, groups.count) as xlsx_writer:
x, y = 1, 0
for group_name, group in groups.children.items():
x, y = xlsx_writer.write_group(x, y, group_name, group)
return xlsx_writer.value
def from_data(self, fields, rows):
with ExportXlsxWriter(fields, len(rows)) as xlsx_writer:
for row_index, row in enumerate(rows):
for cell_index, cell_value in enumerate(row):
if isinstance(cell_value, (list, tuple)):
cell_value = pycompat.to_text(cell_value)
xlsx_writer.write_cell(row_index + 1, cell_index, cell_value)
return xlsx_writer.value
class ReportController(http.Controller):
#------------------------------------------------------
# Report controllers
#------------------------------------------------------
@http.route([
'/report/<converter>/<reportname>',
'/report/<converter>/<reportname>/<docids>',
], type='http', auth='user', website=True)
def report_routes(self, reportname, docids=None, converter=None, **data):
report = request.env['ir.actions.report']._get_report_from_name(reportname)
context = dict(request.env.context)
if docids:
docids = [int(i) for i in docids.split(',')]
if data.get('options'):
data.update(json.loads(data.pop('options')))
if data.get('context'):
data['context'] = json.loads(data['context'])
context.update(data['context'])
if converter == 'html':
html = report.with_context(context)._render_qweb_html(docids, data=data)[0]
return request.make_response(html)
elif converter == 'pdf':
pdf = report.with_context(context)._render_qweb_pdf(docids, data=data)[0]
pdfhttpheaders = [('Content-Type', 'application/pdf'), ('Content-Length', len(pdf))]
return request.make_response(pdf, headers=pdfhttpheaders)
elif converter == 'text':
text = report.with_context(context)._render_qweb_text(docids, data=data)[0]
texthttpheaders = [('Content-Type', 'text/plain'), ('Content-Length', len(text))]
return request.make_response(text, headers=texthttpheaders)
else:
raise werkzeug.exceptions.HTTPException(description='Converter %s not implemented.' % converter)
#------------------------------------------------------
# Misc. route utils
#------------------------------------------------------
@http.route(['/report/barcode', '/report/barcode/<type>/<path:value>'], type='http', auth="public")
def report_barcode(self, type, value, **kwargs):
"""Contoller able to render barcode images thanks to reportlab.
Samples::
<img t-att-src="'/report/barcode/QR/%s' % o.name"/>
<img t-att-src="'/report/barcode/?type=%s&value=%s&width=%s&height=%s' %
('QR', o.name, 200, 200)"/>
:param type: Accepted types: 'Codabar', 'Code11', 'Code128', 'EAN13', 'EAN8', 'Extended39',
'Extended93', 'FIM', 'I2of5', 'MSI', 'POSTNET', 'QR', 'Standard39', 'Standard93',
'UPCA', 'USPS_4State'
:param width: Pixel width of the barcode
:param height: Pixel height of the barcode
:param humanreadable: Accepted values: 0 (default) or 1. 1 will insert the readable value
at the bottom of the output image
:param quiet: Accepted values: 0 (default) or 1. 1 will display white
margins on left and right.
:param mask: The mask code to be used when rendering this QR-code.
Masks allow adding elements on top of the generated image,
such as the Swiss cross in the center of QR-bill codes.
:param barLevel: QR code Error Correction Levels. Default is 'L'.
ref: https://hg.reportlab.com/hg-public/reportlab/file/830157489e00/src/reportlab/graphics/barcode/qr.py#l101
"""
try:
barcode = request.env['ir.actions.report'].barcode(type, value, **kwargs)
except (ValueError, AttributeError):
raise werkzeug.exceptions.HTTPException(description='Cannot convert into barcode.')
return request.make_response(barcode, headers=[('Content-Type', 'image/png')])
@http.route(['/report/download'], type='http', auth="user")
def report_download(self, data, context=None):
"""This function is used by 'action_manager_report.js' in order to trigger the download of
a pdf/controller report.
:param data: a javascript array JSON.stringified containg report internal url ([0]) and
type [1]
:returns: Response with an attachment header
"""
requestcontent = json.loads(data)
url, type = requestcontent[0], requestcontent[1]
reportname = '???'
try:
if type in ['qweb-pdf', 'qweb-text']:
converter = 'pdf' if type == 'qweb-pdf' else 'text'
extension = 'pdf' if type == 'qweb-pdf' else 'txt'
pattern = '/report/pdf/' if type == 'qweb-pdf' else '/report/text/'
reportname = url.split(pattern)[1].split('?')[0]
docids = None
if '/' in reportname:
reportname, docids = reportname.split('/')
if docids:
# Generic report:
response = self.report_routes(reportname, docids=docids, converter=converter, context=context)
else:
# Particular report:
data = dict(url_decode(url.split('?')[1]).items()) # decoding the args represented in JSON
if 'context' in data:
context, data_context = json.loads(context or '{}'), json.loads(data.pop('context'))
context = json.dumps({**context, **data_context})
response = self.report_routes(reportname, converter=converter, context=context, **data)
report = request.env['ir.actions.report']._get_report_from_name(reportname)
filename = "%s.%s" % (report.name, extension)
if docids:
ids = [int(x) for x in docids.split(",")]
obj = request.env[report.model].browse(ids)
if report.print_report_name and not len(obj) > 1:
report_name = safe_eval(report.print_report_name, {'object': obj, 'time': time})
filename = "%s.%s" % (report_name, extension)
response.headers.add('Content-Disposition', content_disposition(filename))
return response
else:
return
except Exception as e:
_logger.exception("Error while generating report %s", reportname)
se = _serialize_exception(e)
error = {
'code': 200,
'message': "Odoo Server Error",
'data': se
}
res = request.make_response(html_escape(json.dumps(error)))
raise werkzeug.exceptions.InternalServerError(response=res) from e
@http.route(['/report/check_wkhtmltopdf'], type='json', auth="user")
def check_wkhtmltopdf(self):
return request.env['ir.actions.report'].get_wkhtmltopdf_state()
| 43.94742 | 89,433 |
4,740 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from collections import deque
import io
import json
from odoo import http, _
from odoo.http import content_disposition, request
from odoo.tools import ustr, osutil
from odoo.tools.misc import xlsxwriter
class TableExporter(http.Controller):
@http.route('/web/pivot/check_xlsxwriter', type='json', auth='none')
def check_xlsxwriter(self):
return xlsxwriter is not None
@http.route('/web/pivot/export_xlsx', type='http', auth="user")
def export_xlsx(self, data, **kw):
jdata = json.loads(data)
output = io.BytesIO()
workbook = xlsxwriter.Workbook(output, {'in_memory': True})
worksheet = workbook.add_worksheet(jdata['title'])
header_bold = workbook.add_format({'bold': True, 'pattern': 1, 'bg_color': '#AAAAAA'})
header_plain = workbook.add_format({'pattern': 1, 'bg_color': '#AAAAAA'})
bold = workbook.add_format({'bold': True})
measure_count = jdata['measure_count']
origin_count = jdata['origin_count']
# Step 1: writing col group headers
col_group_headers = jdata['col_group_headers']
# x,y: current coordinates
# carry: queue containing cell information when a cell has a >= 2 height
# and the drawing code needs to add empty cells below
x, y, carry = 1, 0, deque()
for i, header_row in enumerate(col_group_headers):
worksheet.write(i, 0, '', header_plain)
for header in header_row:
while (carry and carry[0]['x'] == x):
cell = carry.popleft()
for j in range(measure_count * (2 * origin_count - 1)):
worksheet.write(y, x+j, '', header_plain)
if cell['height'] > 1:
carry.append({'x': x, 'height': cell['height'] - 1})
x = x + measure_count * (2 * origin_count - 1)
for j in range(header['width']):
worksheet.write(y, x + j, header['title'] if j == 0 else '', header_plain)
if header['height'] > 1:
carry.append({'x': x, 'height': header['height'] - 1})
x = x + header['width']
while (carry and carry[0]['x'] == x):
cell = carry.popleft()
for j in range(measure_count * (2 * origin_count - 1)):
worksheet.write(y, x+j, '', header_plain)
if cell['height'] > 1:
carry.append({'x': x, 'height': cell['height'] - 1})
x = x + measure_count * (2 * origin_count - 1)
x, y = 1, y + 1
# Step 2: writing measure headers
measure_headers = jdata['measure_headers']
if measure_headers:
worksheet.write(y, 0, '', header_plain)
for measure in measure_headers:
style = header_bold if measure['is_bold'] else header_plain
worksheet.write(y, x, measure['title'], style)
for i in range(1, 2 * origin_count - 1):
worksheet.write(y, x+i, '', header_plain)
x = x + (2 * origin_count - 1)
x, y = 1, y + 1
# set minimum width of cells to 16 which is around 88px
worksheet.set_column(0, len(measure_headers), 16)
# Step 3: writing origin headers
origin_headers = jdata['origin_headers']
if origin_headers:
worksheet.write(y, 0, '', header_plain)
for origin in origin_headers:
style = header_bold if origin['is_bold'] else header_plain
worksheet.write(y, x, origin['title'], style)
x = x + 1
y = y + 1
# Step 4: writing data
x = 0
for row in jdata['rows']:
worksheet.write(y, x, row['indent'] * ' ' + ustr(row['title']), header_plain)
for cell in row['values']:
x = x + 1
if cell.get('is_bold', False):
worksheet.write(y, x, cell['value'], bold)
else:
worksheet.write(y, x, cell['value'])
x, y = 0, y + 1
workbook.close()
xlsx_data = output.getvalue()
filename = osutil.clean_filename(_("Pivot %(title)s (%(model_name)s)", title=jdata['title'], model_name=jdata['model']))
response = request.make_response(xlsx_data,
headers=[('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
('Content-Disposition', content_disposition(filename + '.xlsx'))],
)
return response
| 42.702703 | 4,740 |
1,570 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
from odoo.exceptions import UserError
from odoo.http import Controller, request, Response, route
class Profiling(Controller):
@route('/web/set_profiling', type='http', auth='public', sitemap=False)
def profile(self, profile=None, collectors=None, **params):
if collectors is not None:
collectors = collectors.split(',')
else:
collectors = ['sql', 'traces_async']
profile = profile and profile != '0'
try:
state = request.env['ir.profile'].set_profiling(profile, collectors=collectors, params=params)
return Response(json.dumps(state), mimetype='application/json')
except UserError as e:
return Response(response='error: %s' % e, status=500, mimetype='text/plain')
@route(['/web/speedscope', '/web/speedscope/<model("ir.profile"):profile>'], type='http', sitemap=False, auth='user')
def speedscope(self, profile=None):
# don't server speedscope index if profiling is not enabled
if not request.env['ir.profile']._enabled_until():
return request.not_found()
icp = request.env['ir.config_parameter']
context = {
'profile': profile,
'url_root': request.httprequest.url_root,
'cdn': icp.sudo().get_param('speedscope_cdn', "https://cdn.jsdelivr.net/npm/[email protected]/dist/release/")
}
return request.render('web.view_speedscope_index', context)
| 46.176471 | 1,570 |
656 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Indian - Sale Report(GST)',
'icon': '/l10n_in/static/description/icon.png',
'version': '1.0',
'description': """GST Sale Report""",
'category': 'Accounting/Localizations/Sale',
'depends': [
'l10n_in',
'sale',
],
'data': [
'views/report_sale_order.xml',
'views/sale_views.xml',
'views/res_partner_views.xml',
],
'demo': [
'data/product_demo.xml',
],
'installable': True,
'application': False,
'auto_install': True,
'license': 'LGPL-3',
}
| 25.230769 | 656 |
737 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class SaleAdvancePaymentInv(models.TransientModel):
_inherit = "sale.advance.payment.inv"
def _prepare_invoice_values(self, order, name, amount, so_line):
res = super()._prepare_invoice_values(order, name, amount, so_line)
if order.l10n_in_journal_id:
res['journal_id'] = order.l10n_in_journal_id.id
if order.l10n_in_company_country_code == 'IN':
res['l10n_in_gst_treatment'] = order.l10n_in_gst_treatment
if order.l10n_in_reseller_partner_id:
res['l10n_in_reseller_partner_id'] = order.l10n_in_reseller_partner_id
return res
| 40.944444 | 737 |
507 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class AccountMove(models.Model):
_inherit = "account.move"
def _l10n_in_get_shipping_partner(self):
shipping_partner = super()._l10n_in_get_shipping_partner()
return self.partner_shipping_id or shipping_partner
@api.model
def _l10n_in_get_shipping_partner_gstin(self, shipping_partner):
return shipping_partner.l10n_in_shipping_gstin
| 31.6875 | 507 |
3,187 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class SaleOrder(models.Model):
_inherit = "sale.order"
l10n_in_reseller_partner_id = fields.Many2one('res.partner',
string='Reseller', domain="[('vat', '!=', False), '|', ('company_id', '=', False), ('company_id', '=', company_id)]", readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]})
l10n_in_journal_id = fields.Many2one('account.journal', string="Journal", compute="_compute_l10n_in_journal_id", store=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]})
l10n_in_gst_treatment = fields.Selection([
('regular', 'Registered Business - Regular'),
('composition', 'Registered Business - Composition'),
('unregistered', 'Unregistered Business'),
('consumer', 'Consumer'),
('overseas', 'Overseas'),
('special_economic_zone', 'Special Economic Zone'),
('deemed_export', 'Deemed Export'),
], string="GST Treatment", readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, compute="_compute_l10n_in_gst_treatment", store=True)
l10n_in_company_country_code = fields.Char(related='company_id.account_fiscal_country_id.code', string="Country code")
@api.depends('partner_id')
def _compute_l10n_in_gst_treatment(self):
for order in self:
# set default value as False so CacheMiss error never occurs for this field.
order.l10n_in_gst_treatment = False
if order.l10n_in_company_country_code == 'IN':
l10n_in_gst_treatment = order.partner_id.l10n_in_gst_treatment
if not l10n_in_gst_treatment and order.partner_id.country_id and order.partner_id.country_id.code != 'IN':
l10n_in_gst_treatment = 'overseas'
if not l10n_in_gst_treatment:
l10n_in_gst_treatment = order.partner_id.vat and 'regular' or 'consumer'
order.l10n_in_gst_treatment = l10n_in_gst_treatment
@api.depends('company_id')
def _compute_l10n_in_journal_id(self):
for order in self:
# set default value as False so CacheMiss error never occurs for this field.
order.l10n_in_journal_id = False
if order.l10n_in_company_country_code == 'IN':
domain = [('company_id', '=', order.company_id.id), ('type', '=', 'sale')]
journal = self.env['account.journal'].search(domain, limit=1)
if journal:
order.l10n_in_journal_id = journal.id
def _prepare_invoice(self):
invoice_vals = super(SaleOrder, self)._prepare_invoice()
if self.l10n_in_company_country_code == 'IN':
invoice_vals['l10n_in_reseller_partner_id'] = self.l10n_in_reseller_partner_id.id
if self.l10n_in_journal_id:
invoice_vals['journal_id'] = self.l10n_in_journal_id.id
invoice_vals['l10n_in_gst_treatment'] = self.l10n_in_gst_treatment
return invoice_vals
| 56.910714 | 3,187 |
809 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = 'res.partner'
l10n_in_shipping_gstin = fields.Char("Shipping GSTIN")
@api.constrains('l10n_in_shipping_gstin')
def _check_l10n_in_shipping_gstin(self):
check_vat_in = self.env['res.partner'].check_vat_in
wrong_shipping_gstin_partner = self.filtered(lambda p: p.l10n_in_shipping_gstin and not check_vat_in(p.l10n_in_shipping_gstin))
if wrong_shipping_gstin_partner:
raise ValidationError(_("The shipping GSTIN number [%s] does not seem to be valid") %(",".join(p.l10n_in_shipping_gstin for p in wrong_shipping_gstin_partner)))
| 44.944444 | 809 |
497 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class L10nInExemptedReport(models.Model):
_inherit = "l10n_in.exempted.report"
def _from(self):
from_str = super(L10nInExemptedReport, self)._from()
from_str += """ AND aml.product_id != COALESCE(
(SELECT value from ir_config_parameter where key = 'sale.default_deposit_product_id'), '0')::int
"""
return from_str
| 33.133333 | 497 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.