size
int64
0
304k
ext
stringclasses
1 value
lang
stringclasses
1 value
branch
stringclasses
1 value
content
stringlengths
0
304k
avg_line_length
float64
0
238
max_line_length
int64
0
304k
5,061
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models class Lead2OpportunityMassConvert(models.TransientModel): _name = 'crm.lead2opportunity.partner.mass' _description = 'Convert Lead to Opportunity (in mass)' _inherit = 'crm.lead2opportunity.partner' lead_id = fields.Many2one(required=False) lead_tomerge_ids = fields.Many2many( 'crm.lead', 'crm_convert_lead_mass_lead_rel', string='Active Leads', context={'active_test': False}, default=lambda self: self.env.context.get('active_ids', []), ) user_ids = fields.Many2many('res.users', string='Salespersons') deduplicate = fields.Boolean('Apply deduplication', default=True, help='Merge with existing leads/opportunities of each partner') action = fields.Selection(selection_add=[ ('each_exist_or_create', 'Use existing partner or create'), ], string='Related Customer', ondelete={ 'each_exist_or_create': lambda recs: recs.write({'action': 'exist'}), }) force_assignment = fields.Boolean(default=False) @api.depends('duplicated_lead_ids') def _compute_name(self): for convert in self: convert.name = 'convert' @api.depends('lead_tomerge_ids') def _compute_action(self): for convert in self: convert.action = 'each_exist_or_create' @api.depends('lead_tomerge_ids') def _compute_partner_id(self): for convert in self: convert.partner_id = False @api.depends('user_ids') def _compute_team_id(self): """ When changing the user, also set a team_id or restrict team id to the ones user_id is member of. """ for convert in self: # setting user as void should not trigger a new team computation if not convert.user_id and not convert.user_ids and convert.team_id: continue user = convert.user_id or convert.user_ids and convert.user_ids[0] or self.env.user if convert.team_id and user in convert.team_id.member_ids | convert.team_id.user_id: continue team = self.env['crm.team']._get_default_team_id(user_id=user.id, domain=None) convert.team_id = team.id @api.depends('lead_tomerge_ids') def _compute_duplicated_lead_ids(self): for convert in self: duplicated = self.env['crm.lead'] for lead in convert.lead_tomerge_ids: duplicated_leads = self.env['crm.lead']._get_lead_duplicates( partner=lead.partner_id, email=lead.partner_id and lead.partner_id.email or lead.email_from, include_lost=False) if len(duplicated_leads) > 1: duplicated += lead convert.duplicated_lead_ids = duplicated.ids def _convert_and_allocate(self, leads, user_ids, team_id=False): """ When "massively" (more than one at a time) converting leads to opportunities, check the salesteam_id and salesmen_ids and update the values before calling super. """ self.ensure_one() salesmen_ids = [] if self.user_ids: salesmen_ids = self.user_ids.ids return super(Lead2OpportunityMassConvert, self)._convert_and_allocate(leads, salesmen_ids, team_id=team_id) def action_mass_convert(self): self.ensure_one() if self.name == 'convert' and self.deduplicate: # TDE CLEANME: still using active_ids from context active_ids = self._context.get('active_ids', []) merged_lead_ids = set() remaining_lead_ids = set() for lead in self.lead_tomerge_ids: if lead not in merged_lead_ids: duplicated_leads = self.env['crm.lead']._get_lead_duplicates( partner=lead.partner_id, email=lead.partner_id.email or lead.email_from, include_lost=False ) if len(duplicated_leads) > 1: lead = duplicated_leads.merge_opportunity() merged_lead_ids.update(duplicated_leads.ids) remaining_lead_ids.add(lead.id) # rebuild list of lead IDS to convert, following given order final_ids = [lead_id for lead_id in active_ids if lead_id not in merged_lead_ids] final_ids += [lead_id for lead_id in remaining_lead_ids if lead_id not in final_ids] self = self.with_context(active_ids=final_ids) # only update active_ids when there are set return self.action_apply() def _convert_handle_partner(self, lead, action, partner_id): if self.action == 'each_exist_or_create': partner_id = lead._find_matching_partner(email_only=True).id action = 'create' return super(Lead2OpportunityMassConvert, self)._convert_handle_partner(lead, action, partner_id)
46.431193
5,061
2,567
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models class MergeOpportunity(models.TransientModel): """ Merge opportunities together. If we're talking about opportunities, it's just because it makes more sense to merge opps than leads, because the leads are more ephemeral objects. But since opportunities are leads, it's also possible to merge leads together (resulting in a new lead), or leads and opps together (resulting in a new opp). """ _name = 'crm.merge.opportunity' _description = 'Merge Opportunities' @api.model def default_get(self, fields): """ Use active_ids from the context to fetch the leads/opps to merge. In order to get merged, these leads/opps can't be in 'Dead' or 'Closed' """ record_ids = self._context.get('active_ids') result = super(MergeOpportunity, self).default_get(fields) if record_ids: if 'opportunity_ids' in fields: opp_ids = self.env['crm.lead'].browse(record_ids).filtered(lambda opp: opp.probability < 100).ids result['opportunity_ids'] = [(6, 0, opp_ids)] return result opportunity_ids = fields.Many2many('crm.lead', 'merge_opportunity_rel', 'merge_id', 'opportunity_id', string='Leads/Opportunities') user_id = fields.Many2one('res.users', 'Salesperson', index=True) team_id = fields.Many2one( 'crm.team', 'Sales Team', index=True, compute='_compute_team_id', readonly=False, store=True) def action_merge(self): self.ensure_one() merge_opportunity = self.opportunity_ids.merge_opportunity(self.user_id.id, self.team_id.id) return merge_opportunity.redirect_lead_opportunity_view() @api.depends('user_id') def _compute_team_id(self): """ When changing the user, also set a team_id or restrict team id to the ones user_id is member of. """ for wizard in self: if wizard.user_id: user_in_team = False if wizard.team_id: user_in_team = wizard.env['crm.team'].search_count([('id', '=', wizard.team_id.id), '|', ('user_id', '=', wizard.user_id.id), ('member_ids', '=', wizard.user_id.id)]) if not user_in_team: wizard.team_id = wizard.env['crm.team'].search(['|', ('user_id', '=', wizard.user_id.id), ('member_ids', '=', wizard.user_id.id)], limit=1)
45.839286
2,567
2,344
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models, _ from odoo.exceptions import AccessError class Digest(models.Model): _inherit = 'digest.digest' kpi_crm_lead_created = fields.Boolean('New Leads/Opportunities') kpi_crm_lead_created_value = fields.Integer(compute='_compute_kpi_crm_lead_created_value') kpi_crm_opportunities_won = fields.Boolean('Opportunities Won') kpi_crm_opportunities_won_value = fields.Integer(compute='_compute_kpi_crm_opportunities_won_value') def _compute_kpi_crm_lead_created_value(self): if not self.env.user.has_group('sales_team.group_sale_salesman'): raise AccessError(_("Do not have access, skip this data for user's digest email")) for record in self: start, end, company = record._get_kpi_compute_parameters() record.kpi_crm_lead_created_value = self.env['crm.lead'].search_count([ ('create_date', '>=', start), ('create_date', '<', end), ('company_id', '=', company.id) ]) def _compute_kpi_crm_opportunities_won_value(self): if not self.env.user.has_group('sales_team.group_sale_salesman'): raise AccessError(_("Do not have access, skip this data for user's digest email")) for record in self: start, end, company = record._get_kpi_compute_parameters() record.kpi_crm_opportunities_won_value = self.env['crm.lead'].search_count([ ('type', '=', 'opportunity'), ('probability', '=', '100'), ('date_closed', '>=', start), ('date_closed', '<', end), ('company_id', '=', company.id) ]) def _compute_kpis_actions(self, company, user): res = super(Digest, self)._compute_kpis_actions(company, user) res['kpi_crm_lead_created'] = 'crm.crm_lead_action_pipeline&menu_id=%s' % self.env.ref('crm.crm_menu_root').id res['kpi_crm_opportunities_won'] = 'crm.crm_lead_action_pipeline&menu_id=%s' % self.env.ref('crm.crm_menu_root').id if user.has_group('crm.group_use_lead'): res['kpi_crm_lead_created'] = 'crm.crm_lead_all_leads&menu_id=%s' % self.env.ref('crm.crm_menu_root').id return res
50.956522
2,344
33,120
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import datetime import logging import random import threading from ast import literal_eval from odoo import api, exceptions, fields, models, _ from odoo.osv import expression from odoo.tools import float_compare, float_round from odoo.tools.safe_eval import safe_eval _logger = logging.getLogger(__name__) class Team(models.Model): _name = 'crm.team' _inherit = ['mail.alias.mixin', 'crm.team'] _description = 'Sales Team' use_leads = fields.Boolean('Leads', help="Check this box to filter and qualify incoming requests as leads before converting them into opportunities and assigning them to a salesperson.") use_opportunities = fields.Boolean('Pipeline', default=True, help="Check this box to manage a presales process with opportunities.") alias_id = fields.Many2one( 'mail.alias', string='Alias', ondelete="restrict", required=True, help="The email address associated with this channel. New emails received will automatically create new leads assigned to the channel.") # assignment assignment_enabled = fields.Boolean('Lead Assign', compute='_compute_assignment_enabled') assignment_auto_enabled = fields.Boolean('Auto Assignment', compute='_compute_assignment_enabled') assignment_optout = fields.Boolean('Skip auto assignment') assignment_max = fields.Integer( 'Lead Average Capacity', compute='_compute_assignment_max', help='Monthly average leads capacity for all salesmen belonging to the team') assignment_domain = fields.Char( 'Assignment Domain', tracking=True, help='Additional filter domain when fetching unassigned leads to allocate to the team.') # statistics about leads / opportunities / both lead_unassigned_count = fields.Integer( string='# Unassigned Leads', compute='_compute_lead_unassigned_count') lead_all_assigned_month_count = fields.Integer( string='# Leads/Opps assigned this month', compute='_compute_lead_all_assigned_month_count', help="Number of leads and opportunities assigned this last month.") opportunities_count = fields.Integer( string='# Opportunities', compute='_compute_opportunities_data') opportunities_amount = fields.Monetary( string='Opportunities Revenues', compute='_compute_opportunities_data') opportunities_overdue_count = fields.Integer( string='# Overdue Opportunities', compute='_compute_opportunities_overdue_data') opportunities_overdue_amount = fields.Monetary( string='Overdue Opportunities Revenues', compute='_compute_opportunities_overdue_data',) # alias: improve fields coming from _inherits, use inherited to avoid replacing them alias_user_id = fields.Many2one( 'res.users', related='alias_id.alias_user_id', readonly=False, inherited=True, domain=lambda self: [('groups_id', 'in', self.env.ref('sales_team.group_sale_salesman_all_leads').id)]) @api.depends('crm_team_member_ids.assignment_max') def _compute_assignment_max(self): for team in self: team.assignment_max = sum(member.assignment_max for member in team.crm_team_member_ids) def _compute_assignment_enabled(self): assign_enabled = self.env['ir.config_parameter'].sudo().get_param('crm.lead.auto.assignment', False) auto_assign_enabled = False if assign_enabled: assign_cron = self.sudo().env.ref('crm.ir_cron_crm_lead_assign', raise_if_not_found=False) auto_assign_enabled = assign_cron.active if assign_cron else False self.assignment_enabled = assign_enabled self.assignment_auto_enabled = auto_assign_enabled def _compute_lead_unassigned_count(self): leads_data = self.env['crm.lead'].read_group([ ('team_id', 'in', self.ids), ('type', '=', 'lead'), ('user_id', '=', False), ], ['team_id'], ['team_id']) counts = {datum['team_id'][0]: datum['team_id_count'] for datum in leads_data} for team in self: team.lead_unassigned_count = counts.get(team.id, 0) @api.depends('crm_team_member_ids.lead_month_count') def _compute_lead_all_assigned_month_count(self): for team in self: team.lead_all_assigned_month_count = sum(member.lead_month_count for member in team.crm_team_member_ids) def _compute_opportunities_data(self): opportunity_data = self.env['crm.lead'].read_group([ ('team_id', 'in', self.ids), ('probability', '<', 100), ('type', '=', 'opportunity'), ], ['expected_revenue:sum', 'team_id'], ['team_id']) counts = {datum['team_id'][0]: datum['team_id_count'] for datum in opportunity_data} amounts = {datum['team_id'][0]: datum['expected_revenue'] for datum in opportunity_data} for team in self: team.opportunities_count = counts.get(team.id, 0) team.opportunities_amount = amounts.get(team.id, 0) def _compute_opportunities_overdue_data(self): opportunity_data = self.env['crm.lead'].read_group([ ('team_id', 'in', self.ids), ('probability', '<', 100), ('type', '=', 'opportunity'), ('date_deadline', '<', fields.Date.to_string(fields.Datetime.now())) ], ['expected_revenue', 'team_id'], ['team_id']) counts = {datum['team_id'][0]: datum['team_id_count'] for datum in opportunity_data} amounts = {datum['team_id'][0]: (datum['expected_revenue']) for datum in opportunity_data} for team in self: team.opportunities_overdue_count = counts.get(team.id, 0) team.opportunities_overdue_amount = amounts.get(team.id, 0) @api.onchange('use_leads', 'use_opportunities') def _onchange_use_leads_opportunities(self): if not self.use_leads and not self.use_opportunities: self.alias_name = False @api.constrains('assignment_domain') def _constrains_assignment_domain(self): for team in self: try: domain = literal_eval(team.assignment_domain or '[]') if domain: self.env['crm.lead'].search(domain, limit=1) except Exception: raise exceptions.ValidationError(_('Assignment domain for team %(team)s is incorrectly formatted', team=team.name)) # ------------------------------------------------------------ # ORM # ------------------------------------------------------------ def write(self, vals): result = super(Team, self).write(vals) if 'use_leads' in vals or 'use_opportunities' in vals: for team in self: alias_vals = team._alias_get_creation_values() team.write({ 'alias_name': alias_vals.get('alias_name', team.alias_name), 'alias_defaults': alias_vals.get('alias_defaults'), }) return result def unlink(self): """ When unlinking, concatenate ``crm.lead.scoring.frequency`` linked to the team into "no team" statistics. """ frequencies = self.env['crm.lead.scoring.frequency'].search([('team_id', 'in', self.ids)]) if frequencies: existing_noteam = self.env['crm.lead.scoring.frequency'].sudo().search([ ('team_id', '=', False), ('variable', 'in', frequencies.mapped('variable')) ]) for frequency in frequencies: # skip void-like values if float_compare(frequency.won_count, 0.1, 2) != 1 and float_compare(frequency.lost_count, 0.1, 2) != 1: continue match = existing_noteam.filtered(lambda frequ_nt: frequ_nt.variable == frequency.variable and frequ_nt.value == frequency.value) if match: # remove extra .1 that may exist in db as those are artifacts of initializing # frequency table. Final value of 0 will be set to 0.1. exist_won_count = float_round(match.won_count, precision_digits=0, rounding_method='HALF-UP') exist_lost_count = float_round(match.lost_count, precision_digits=0, rounding_method='HALF-UP') add_won_count = float_round(frequency.won_count, precision_digits=0, rounding_method='HALF-UP') add_lost_count = float_round(frequency.lost_count, precision_digits=0, rounding_method='HALF-UP') new_won_count = exist_won_count + add_won_count new_lost_count = exist_lost_count + add_lost_count match.won_count = new_won_count if float_compare(new_won_count, 0.1, 2) == 1 else 0.1 match.lost_count = new_lost_count if float_compare(new_lost_count, 0.1, 2) == 1 else 0.1 else: existing_noteam += self.env['crm.lead.scoring.frequency'].sudo().create({ 'lost_count': frequency.lost_count if float_compare(frequency.lost_count, 0.1, 2) == 1 else 0.1, 'team_id': False, 'value': frequency.value, 'variable': frequency.variable, 'won_count': frequency.won_count if float_compare(frequency.won_count, 0.1, 2) == 1 else 0.1, }) return super(Team, self).unlink() # ------------------------------------------------------------ # MESSAGING # ------------------------------------------------------------ def _alias_get_creation_values(self): values = super(Team, self)._alias_get_creation_values() values['alias_model_id'] = self.env['ir.model']._get('crm.lead').id if self.id: if not self.use_leads and not self.use_opportunities: values['alias_name'] = False values['alias_defaults'] = defaults = literal_eval(self.alias_defaults or "{}") has_group_use_lead = self.env.user.has_group('crm.group_use_lead') defaults['type'] = 'lead' if has_group_use_lead and self.use_leads else 'opportunity' defaults['team_id'] = self.id return values # ------------------------------------------------------------ # LEAD ASSIGNMENT # ------------------------------------------------------------ @api.model def _cron_assign_leads(self, work_days=None): """ Cron method assigning leads. Leads are allocated to all teams and assigned to their members. It is based on either cron configuration either forced through ``work_days`` parameter. When based on cron configuration purpose of cron is to assign leads to sales persons. Assigned workload is set to the workload those sales people should perform between two cron iterations. If their maximum capacity is reached assign process will not assign them any more lead. e.g. cron is active with interval_number 3, interval_type days. This means cron runs every 3 days. Cron will assign leads for 3 work days to salespersons each 3 days unless their maximum capacity is reached. If cron runs on an hour- or minute-based schedule minimum assignment performed is equivalent to 0.2 workdays to avoid rounding issues. Max assignment performed is for 30 days as it is better to run more often than planning for more than one month. Assign process is best designed to run every few hours (~4 times / day) or each few days. See ``CrmTeam.action_assign_leads()`` and its sub methods for more details about assign process. :param float work_days: see ``CrmTeam.action_assign_leads()``; """ assign_cron = self.sudo().env.ref('crm.ir_cron_crm_lead_assign', raise_if_not_found=False) if not work_days and assign_cron and assign_cron.active: if assign_cron.interval_type == 'months': work_days = 30 # maximum one month of work elif assign_cron.interval_type == 'weeks': work_days = min(30, assign_cron.interval_number * 7) # max at 30 (better lead repartition) elif assign_cron.interval_type == 'days': work_days = min(30, assign_cron.interval_number * 1) # max at 30 (better lead repartition) elif assign_cron.interval_type == 'hours': work_days = max(0.2, assign_cron.interval_number / 24) # min at 0.2 to avoid small numbers issues elif assign_cron.interval_type == 'minutes': work_days = max(0.2, assign_cron.interval_number / 1440) # min at 0.2 to avoid small numbers issues work_days = work_days if work_days else 1 # avoid void values self.env['crm.team'].search([ '&', '|', ('use_leads', '=', True), ('use_opportunities', '=', True), ('assignment_optout', '=', False) ])._action_assign_leads(work_days=work_days) return True def action_assign_leads(self, work_days=1, log=True): """ Manual (direct) leads assignment. This method both * assigns leads to teams given by self; * assigns leads to salespersons belonging to self; See sub methods for more details about assign process. :param float work_days: number of work days to consider when assigning leads to teams or salespersons. We consider that Member.assignment_max (or its equivalent on team model) targets 30 work days. We make a ratio between expected number of work days and maximum assignment for those 30 days to know lead count to assign. :return action: a client notification giving some insights on assign process; """ teams_data, members_data = self._action_assign_leads(work_days=work_days) # format result messages logs = self._action_assign_leads_logs(teams_data, members_data) html_message = '<br />'.join(logs) notif_message = ' '.join(logs) # log a note in case of manual assign (as this method will mainly be called # on singleton record set, do not bother doing a specific message per team) log_action = _("Lead Assignment requested by %(user_name)s", user_name=self.env.user.name) log_message = "<p>%s<br /><br />%s</p>" % (log_action, html_message) self._message_log_batch(bodies=dict((team.id, log_message) for team in self)) return { 'type': 'ir.actions.client', 'tag': 'display_notification', 'params': { 'type': 'success', 'title': _("Leads Assigned"), 'message': notif_message, 'next': { 'type': 'ir.actions.act_window_close' }, } } def _action_assign_leads(self, work_days=1): """ Private method for lead assignment. This method both * assigns leads to teams given by self; * assigns leads to salespersons belonging to self; See sub methods for more details about assign process. :param float work_days: see ``CrmTeam.action_assign_leads()``; :return teams_data, members_data: structure-based result of assignment process. For more details about data see ``CrmTeam._allocate_leads()`` and ``CrmTeamMember._assign_and_convert_leads``; """ if not self.env.user.has_group('sales_team.group_sale_manager') and not self.env.user.has_group('base.group_system'): raise exceptions.UserError(_('Lead/Opportunities automatic assignment is limited to managers or administrators')) _logger.info('### START Lead Assignment (%d teams, %d sales persons, %.2f work_days)', len(self), len(self.crm_team_member_ids), work_days) teams_data = self._allocate_leads(work_days=work_days) _logger.info('### Team repartition done. Starting salesmen assignment.') members_data = self.crm_team_member_ids._assign_and_convert_leads(work_days=work_days) _logger.info('### END Lead Assignment') return teams_data, members_data def _action_assign_leads_logs(self, teams_data, members_data): """ Tool method to prepare notification about assignment process result. :param teams_data: see ``CrmTeam._allocate_leads()``; :param members_data: see ``CrmTeamMember._assign_and_convert_leads()``; :return list: list of formatted logs, ready to be formatted into a nice plaintext or html message at caller's will """ # extract some statistics assigned = sum(len(teams_data[team]['assigned']) + len(teams_data[team]['merged']) for team in teams_data) duplicates = sum(len(teams_data[team]['duplicates']) for team in teams_data) members = len(members_data) members_assigned = sum(len(member_data['assigned']) for member_data in members_data.values()) # format user notification message_parts = [] # 1- duplicates removal if duplicates: message_parts.append(_("%(duplicates)s duplicates leads have been merged.", duplicates=duplicates)) # 2- nothing assigned at all if not assigned and not members_assigned: if len(self) == 1: if not self.assignment_max: message_parts.append( _("No allocated leads to %(team_name)s team because it has no capacity. Add capacity to its salespersons.", team_name=self.name)) else: message_parts.append( _("No allocated leads to %(team_name)s team and its salespersons because no unassigned lead matches its domain.", team_name=self.name)) else: message_parts.append( _("No allocated leads to any team or salesperson. Check your Sales Teams and Salespersons configuration as well as unassigned leads.")) # 3- team allocation if not assigned and members_assigned: if len(self) == 1: message_parts.append( _("No new lead allocated to %(team_name)s team because no unassigned lead matches its domain.", team_name=self.name)) else: message_parts.append(_("No new lead allocated to the teams because no lead match their domains.")) elif assigned: if len(self) == 1: message_parts.append( _("%(assigned)s leads allocated to %(team_name)s team.", assigned=assigned, team_name=self.name)) else: message_parts.append( _("%(assigned)s leads allocated among %(team_count)s teams.", assigned=assigned, team_count=len(self))) # 4- salespersons assignment if not members_assigned and assigned: message_parts.append( _("No lead assigned to salespersons because no unassigned lead matches their domains.")) elif members_assigned: message_parts.append( _("%(members_assigned)s leads assigned among %(member_count)s salespersons.", members_assigned=members_assigned, member_count=members)) return message_parts def _allocate_leads(self, work_days=1): """ Allocate leads to teams given by self. This method sets ``team_id`` field on lead records that are unassigned (no team and no responsible). No salesperson is assigned in this process. Its purpose is simply to allocate leads within teams. This process allocates all available leads on teams weighted by their maximum assignment by month that indicates their relative workload. Heuristic of this method is the following: * find unassigned leads for each team, aka leads being * without team, without user -> not assigned; * not in a won stage, and not having False/0 (lost) or 100 (won) probability) -> live leads; * if set, a delay after creation can be applied (see BUNDLE_HOURS_DELAY) parameter explanations here below; * matching the team's assignment domain (empty means everything); * assign a weight to each team based on their assignment_max that indicates their relative workload; * pick a random team using a weighted random choice and find a lead to assign: * remove already assigned leads from the available leads. If there is not any lead spare to assign, remove team from active teams; * pick the first lead and set the current team; * when setting a team on leads, leads are also merged with their duplicates. Purpose is to clean database and avoid assigning duplicates to same or different teams; * add lead and its duplicates to already assigned leads; * pick another random team until their is no more leads to assign to any team; This process ensure that teams having overlapping domains will all receive leads as lead allocation is done one lead at a time. This allocation will be proportional to their size (assignment of their members). :config int crm.assignment.bundle: deprecated :config int crm.assignment.commit.bundle: optional config parameter allowing to set size of lead batch to be committed together. By default 100 which is a good trade-off between transaction time and speed :config int crm.assignment.delay: optional config parameter giving a delay before taking a lead into assignment process (BUNDLE_HOURS_DELAY) given in hours. Purpose if to allow other crons or automated actions to make their job. This option is mainly historic as its purpose was to let automated actions prepare leads and score before PLS was added into CRM. This is now not required anymore but still supported; :param float work_days: see ``CrmTeam.action_assign_leads()``; :return teams_data: dict() with each team assignment result: team: { 'assigned': set of lead IDs directly assigned to the team (no duplicate or merged found); 'merged': set of lead IDs merged and assigned to the team (main leads being results of merge process); 'duplicates': set of lead IDs found as duplicates and merged into other leads. Those leads are unlinked during assign process and are already removed at return of this method; }, ... """ if work_days < 0.2 or work_days > 30: raise ValueError( _('Leads team allocation should be done for at least 0.2 or maximum 30 work days, not %.2f.', work_days) ) BUNDLE_HOURS_DELAY = int(self.env['ir.config_parameter'].sudo().get_param('crm.assignment.delay', default=0)) BUNDLE_COMMIT_SIZE = int(self.env['ir.config_parameter'].sudo().get_param('crm.assignment.commit.bundle', 100)) auto_commit = not getattr(threading.current_thread(), 'testing', False) # leads max_create_dt = self.env.cr.now() - datetime.timedelta(hours=BUNDLE_HOURS_DELAY) duplicates_lead_cache = dict() # teams data teams_data, population, weights = dict(), list(), list() for team in self: if not team.assignment_max: continue lead_domain = expression.AND([ literal_eval(team.assignment_domain or '[]'), [('create_date', '<=', max_create_dt)], ['&', ('team_id', '=', False), ('user_id', '=', False)], ['|', ('stage_id', '=', False), ('stage_id.is_won', '=', False)] ]) leads = self.env["crm.lead"].search(lead_domain) # Fill duplicate cache: search for duplicate lead before the assignation # avoid to flush during the search at every assignation for lead in leads: if lead not in duplicates_lead_cache: duplicates_lead_cache[lead] = lead._get_lead_duplicates(email=lead.email_from) teams_data[team] = { "team": team, "leads": leads, "assigned": set(), "merged": set(), "duplicates": set(), } population.append(team) weights.append(team.assignment_max) # Start a new transaction, since data fetching take times # and the first commit occur at the end of the bundle, # the first transaction can be long which we want to avoid if auto_commit: self._cr.commit() # assignment process data global_data = dict(assigned=set(), merged=set(), duplicates=set()) leads_done_ids, lead_unlink_ids, counter = set(), set(), 0 while population: counter += 1 team = random.choices(population, weights=weights, k=1)[0] # filter remaining leads, remove team if no more leads for it teams_data[team]["leads"] = teams_data[team]["leads"].filtered(lambda l: l.id not in leads_done_ids).exists() if not teams_data[team]["leads"]: population_index = population.index(team) population.pop(population_index) weights.pop(population_index) continue # assign + deduplicate and concatenate results in teams_data to keep some history candidate_lead = teams_data[team]["leads"][0] assign_res = team._allocate_leads_deduplicate(candidate_lead, duplicates_cache=duplicates_lead_cache) for key in ('assigned', 'merged', 'duplicates'): teams_data[team][key].update(assign_res[key]) leads_done_ids.update(assign_res[key]) global_data[key].update(assign_res[key]) lead_unlink_ids.update(assign_res['duplicates']) # auto-commit except in testing mode. As this process may be time consuming or we # may encounter errors, already commit what is allocated to avoid endless cron loops. if auto_commit and counter % BUNDLE_COMMIT_SIZE == 0: # unlink duplicates once self.env['crm.lead'].browse(lead_unlink_ids).unlink() lead_unlink_ids = set() self._cr.commit() # unlink duplicates once self.env['crm.lead'].browse(lead_unlink_ids).unlink() if auto_commit: self._cr.commit() # some final log _logger.info('## Assigned %s leads', (len(global_data['assigned']) + len(global_data['merged']))) for team, team_data in teams_data.items(): _logger.info( '## Assigned %s leads to team %s', len(team_data['assigned']) + len(team_data['merged']), team.id) _logger.info( '\tLeads: direct assign %s / merge result %s / duplicates merged: %s', team_data['assigned'], team_data['merged'], team_data['duplicates']) return teams_data def _allocate_leads_deduplicate(self, leads, duplicates_cache=None): """ Assign leads to sales team given by self by calling lead tool method _handle_salesmen_assignment. In this method we deduplicate leads allowing to reduce number of resulting leads before assigning them to salesmen. :param leads: recordset of leads to assign to current team; :param duplicates_cache: if given, avoid to perform a duplicate search and fetch information in it instead; """ self.ensure_one() duplicates_cache = duplicates_cache if duplicates_cache is not None else dict() # classify leads leads_assigned = self.env['crm.lead'] # direct team assign leads_done_ids, leads_merged_ids, leads_dup_ids = set(), set(), set() # classification leads_dups_dict = dict() # lead -> its duplicate for lead in leads: if lead.id not in leads_done_ids: # fill cache if not already done if lead not in duplicates_cache: duplicates_cache[lead] = lead._get_lead_duplicates(email=lead.email_from) lead_duplicates = duplicates_cache[lead].exists() if len(lead_duplicates) > 1: leads_dups_dict[lead] = lead_duplicates leads_done_ids.update((lead + lead_duplicates).ids) else: leads_assigned += lead leads_done_ids.add(lead.id) # assign team to direct assign (leads_assigned) + dups keys (to ensure their team # if they are elected master of merge process) dups_to_assign = [lead for lead in leads_dups_dict] leads_assigned.union(*dups_to_assign)._handle_salesmen_assignment(user_ids=None, team_id=self.id) for lead in leads.filtered(lambda lead: lead in leads_dups_dict): lead_duplicates = leads_dups_dict[lead] merged = lead_duplicates._merge_opportunity(user_id=False, team_id=False, auto_unlink=False, max_length=0) leads_dup_ids.update((lead_duplicates - merged).ids) leads_merged_ids.add(merged.id) return { 'assigned': set(leads_assigned.ids), 'merged': leads_merged_ids, 'duplicates': leads_dup_ids, } # ------------------------------------------------------------ # ACTIONS # ------------------------------------------------------------ #TODO JEM : refactor this stuff with xml action, proper customization, @api.model def action_your_pipeline(self): action = self.env["ir.actions.actions"]._for_xml_id("crm.crm_lead_action_pipeline") return self._action_update_to_pipeline(action) @api.model def action_opportunity_forecast(self): action = self.env['ir.actions.actions']._for_xml_id('crm.crm_lead_action_forecast') return self._action_update_to_pipeline(action) @api.model def _action_update_to_pipeline(self, action): user_team_id = self.env.user.sale_team_id.id if user_team_id: # To ensure that the team is readable in multi company user_team_id = self.search([('id', '=', user_team_id)], limit=1).id else: user_team_id = self.search([], limit=1).id action['help'] = _("""<p class='o_view_nocontent_smiling_face'>Add new opportunities</p><p> Looks like you are not a member of a Sales Team. You should add yourself as a member of one of the Sales Team. </p>""") if user_team_id: action['help'] += _("<p>As you don't belong to any Sales Team, Odoo opens the first one by default.</p>") action_context = safe_eval(action['context'], {'uid': self.env.uid}) if user_team_id: action_context['default_team_id'] = user_team_id action['context'] = action_context return action def _compute_dashboard_button_name(self): super(Team, self)._compute_dashboard_button_name() team_with_pipelines = self.filtered(lambda el: el.use_opportunities) team_with_pipelines.update({'dashboard_button_name': _("Pipeline")}) def action_primary_channel_button(self): self.ensure_one() if self.use_opportunities: action = self.env['ir.actions.actions']._for_xml_id('crm.crm_case_form_view_salesteams_opportunity') rcontext = { 'team': self, } action['help'] = self.env['ir.ui.view']._render_template('crm.crm_action_helper', values=rcontext) return action return super(Team,self).action_primary_channel_button() def _graph_get_model(self): if self.use_opportunities: return 'crm.lead' return super(Team,self)._graph_get_model() def _graph_date_column(self): if self.use_opportunities: return 'create_date' return super(Team,self)._graph_date_column() def _graph_y_query(self): if self.use_opportunities: return 'count(*)' return super(Team,self)._graph_y_query() def _extra_sql_conditions(self): if self.use_opportunities: return "AND type LIKE 'opportunity'" return super(Team,self)._extra_sql_conditions() def _graph_title_and_key(self): if self.use_opportunities: return ['', _('New Opportunities')] # no more title return super(Team, self)._graph_title_and_key()
49.879518
33,120
1,156
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models, _ class LostReason(models.Model): _name = "crm.lost.reason" _description = 'Opp. Lost Reason' name = fields.Char('Description', required=True, translate=True) active = fields.Boolean('Active', default=True) leads_count = fields.Integer('Leads Count', compute='_compute_leads_count') def _compute_leads_count(self): lead_data = self.env['crm.lead'].with_context(active_test=False).read_group([('lost_reason', 'in', self.ids)], ['lost_reason'], ['lost_reason']) mapped_data = dict((data['lost_reason'][0], data['lost_reason_count']) for data in lead_data) for reason in self: reason.leads_count = mapped_data.get(reason.id, 0) def action_lost_leads(self): return { 'name': _('Leads'), 'view_mode': 'tree,form', 'domain': [('lost_reason', 'in', self.ids)], 'res_model': 'crm.lead', 'type': 'ir.actions.act_window', 'context': {'create': False, 'active_test': False}, }
39.862069
1,156
9,386
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import datetime import logging import math import threading import random from ast import literal_eval from odoo import api, exceptions, fields, models, _ from odoo.osv import expression _logger = logging.getLogger(__name__) class Team(models.Model): _inherit = 'crm.team.member' # assignment assignment_enabled = fields.Boolean(related="crm_team_id.assignment_enabled") assignment_domain = fields.Char('Assignment Domain', tracking=True) assignment_optout = fields.Boolean('Skip auto assignment') assignment_max = fields.Integer('Average Leads Capacity (on 30 days)', default=30) lead_month_count = fields.Integer( 'Leads (30 days)', compute='_compute_lead_month_count', help='Lead assigned to this member those last 30 days') @api.depends('user_id', 'crm_team_id') def _compute_lead_month_count(self): for member in self: if member.user_id.id and member.crm_team_id.id: member.lead_month_count = self.env['crm.lead'].with_context(active_test=False).search_count( member._get_lead_month_domain() ) else: member.lead_month_count = 0 @api.constrains('assignment_domain') def _constrains_assignment_domain(self): for member in self: try: domain = literal_eval(member.assignment_domain or '[]') if domain: self.env['crm.lead'].search(domain, limit=1) except Exception: raise exceptions.ValidationError(_( 'Member assignment domain for user %(user)s and team %(team)s is incorrectly formatted', user=member.user_id.name, team=member.crm_team_id.name )) def _get_lead_month_domain(self): limit_date = fields.Datetime.now() - datetime.timedelta(days=30) return [ ('user_id', '=', self.user_id.id), ('team_id', '=', self.crm_team_id.id), ('date_open', '>=', limit_date), ] # ------------------------------------------------------------ # LEAD ASSIGNMENT # ------------------------------------------------------------ def _assign_and_convert_leads(self, work_days=1): """ Main processing method to assign leads to sales team members. It also converts them into opportunities. This method should be called after ``_allocate_leads`` as this method assigns leads already allocated to the member's team. Its main purpose is therefore to distribute team workload on its members based on their capacity. Preparation * prepare lead domain for each member. It is done using a logical AND with team's domain and member's domain. Member domains further restricts team domain; * prepare a set of available leads for each member by searching for leads matching domain with a sufficient limit to ensure all members will receive leads; * prepare a weighted population sample. Population are members that should received leads. Initial weight is the number of leads to assign to that specific member. This is minimum value between * remaining this month: assignment_max - number of lead already assigned this month; * days-based assignment: assignment_max with a ratio based on ``work_days`` parameter (see ``CrmTeam.action_assign_leads()``) * e.g. Michel Poilvache (max: 30 - currently assigned: 15) limit for 2 work days: min(30-15, 30/15) -> 2 leads assigned * e.g. Michel Tartopoil (max: 30 - currently assigned: 26) limit for 10 work days: min(30-26, 30/3) -> 4 leads assigned This method then follows the following heuristic * take a weighted random choice in population; * find first available (not yet assigned) lead in its lead set; * if found: * convert it into an opportunity and assign member as salesperson; * lessen member's weight so that other members have an higher probability of being picked up next; * if not found: consider this member is out of assignment process, remove it from population so that it is not picked up anymore; Assignment is performed one lead at a time for fairness purpose. Indeed members may have overlapping domains within a given team. To ensure some fairness in process once a member receives a lead, a new choice is performed with updated weights. This is not optimal from performance point of view but increases probability leads are correctly distributed within the team. :param float work_days: see ``CrmTeam.action_assign_leads()``; :return members_data: dict() with each member assignment result: membership: { 'assigned': set of lead IDs directly assigned to the member; }, ... """ if work_days < 0.2 or work_days > 30: raise ValueError( _('Leads team allocation should be done for at least 0.2 or maximum 30 work days, not %.2f.', work_days) ) members_data, population, weights = dict(), list(), list() members = self.filtered(lambda member: not member.assignment_optout and member.assignment_max > 0) if not members: return members_data # prepare a global lead count based on total leads to assign to salespersons lead_limit = sum( member._get_assignment_quota(work_days=work_days) for member in members ) # could probably be optimized for member in members: lead_domain = expression.AND([ literal_eval(member.assignment_domain or '[]'), ['&', '&', ('user_id', '=', False), ('date_open', '=', False), ('team_id', '=', member.crm_team_id.id)] ]) leads = self.env["crm.lead"].search(lead_domain, order='probability DESC', limit=lead_limit) to_assign = member._get_assignment_quota(work_days=work_days) members_data[member.id] = { "team_member": member, "max": member.assignment_max, "to_assign": to_assign, "leads": leads, "assigned": self.env["crm.lead"], } population.append(member.id) weights.append(to_assign) leads_done_ids = set() counter = 0 # auto-commit except in testing mode auto_commit = not getattr(threading.current_thread(), 'testing', False) commit_bundle_size = int(self.env['ir.config_parameter'].sudo().get_param('crm.assignment.commit.bundle', 100)) while population and any(weights): counter += 1 member_id = random.choices(population, weights=weights, k=1)[0] member_index = population.index(member_id) member_data = members_data[member_id] lead = next((lead for lead in member_data['leads'] if lead.id not in leads_done_ids), False) if lead: leads_done_ids.add(lead.id) members_data[member_id]["assigned"] += lead weights[member_index] = weights[member_index] - 1 lead.with_context(mail_auto_subscribe_no_notify=True).convert_opportunity( lead.partner_id.id, user_ids=member_data['team_member'].user_id.ids ) if auto_commit and counter % commit_bundle_size == 0: self._cr.commit() else: weights[member_index] = 0 if weights[member_index] <= 0: population.pop(member_index) weights.pop(member_index) # failsafe if counter > 100000: population = list() if auto_commit: self._cr.commit() # log results and return result_data = dict( (member_info["team_member"], {"assigned": member_info["assigned"]}) for member_id, member_info in members_data.items() ) _logger.info('Assigned %s leads to %s salesmen', len(leads_done_ids), len(members)) for member, member_info in result_data.items(): _logger.info('-> member %s: assigned %d leads (%s)', member.id, len(member_info["assigned"]), member_info["assigned"]) return result_data def _get_assignment_quota(self, work_days=1): """ Compute assignment quota based on work_days. This quota includes a compensation to speedup getting to the lead average (``assignment_max``). As this field is a counter for "30 days" -> divide by requested work days in order to have base assign number then add compensation. :param float work_days: see ``CrmTeam.action_assign_leads()``; """ assign_ratio = work_days / 30.0 to_assign = self.assignment_max * assign_ratio compensation = max(0, self.assignment_max - (self.lead_month_count + to_assign)) * 0.2 return round(to_assign + compensation)
44.273585
9,386
965
py
PYTHON
15.0
# -*- coding: utf-8 -*- from odoo import fields, models class LeadScoringFrequency(models.Model): _name = 'crm.lead.scoring.frequency' _description = 'Lead Scoring Frequency' variable = fields.Char('Variable', index=True) value = fields.Char('Value') won_count = fields.Float('Won Count', digits=(16, 1)) # Float because we add 0.1 to avoid zero Frequency issue lost_count = fields.Float('Lost Count', digits=(16, 1)) # Float because we add 0.1 to avoid zero Frequency issue team_id = fields.Many2one('crm.team', 'Sales Team', ondelete="cascade") class FrequencyField(models.Model): _name = 'crm.lead.scoring.frequency.field' _description = 'Fields that can be used for predictive lead scoring computation' name = fields.Char(related="field_id.field_description") field_id = fields.Many2one( 'ir.model.fields', domain=[('model_id.model', '=', 'crm.lead')], required=True, ondelete='cascade', )
41.956522
965
2,389
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models class CalendarEvent(models.Model): _inherit = 'calendar.event' @api.model def default_get(self, fields): if self.env.context.get('default_opportunity_id'): self = self.with_context( default_res_model_id=self.env.ref('crm.model_crm_lead').id, default_res_id=self.env.context['default_opportunity_id'] ) defaults = super(CalendarEvent, self).default_get(fields) # sync res_model / res_id to opportunity id (aka creating meeting from lead chatter) if 'opportunity_id' not in defaults: if self._is_crm_lead(defaults, self.env.context): defaults['opportunity_id'] = defaults.get('res_id', False) or self.env.context.get('default_res_id', False) return defaults opportunity_id = fields.Many2one( 'crm.lead', 'Opportunity', domain="[('type', '=', 'opportunity')]", index=True, ondelete='set null') def _compute_is_highlighted(self): super(CalendarEvent, self)._compute_is_highlighted() if self.env.context.get('active_model') == 'crm.lead': opportunity_id = self.env.context.get('active_id') for event in self: if event.opportunity_id.id == opportunity_id: event.is_highlighted = True @api.model_create_multi def create(self, vals): events = super(CalendarEvent, self).create(vals) for event in events: if event.opportunity_id and not event.activity_ids: event.opportunity_id.log_meeting(event.name, event.start, event.duration) return events def _is_crm_lead(self, defaults, ctx=None): """ This method checks if the concerned model is a CRM lead. The information is not always in the defaults values, this is why it is necessary to check the context too. """ res_model = defaults.get('res_model', False) or ctx and ctx.get('default_res_model') res_model_id = defaults.get('res_model_id', False) or ctx and ctx.get('default_res_model_id') return res_model and res_model == 'crm.lead' or res_model_id and self.env['ir.model'].sudo().browse(res_model_id).model == 'crm.lead'
43.436364
2,389
1,373
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models class UtmCampaign(models.Model): _inherit = 'utm.campaign' use_leads = fields.Boolean('Use Leads', compute='_compute_use_leads') crm_lead_count = fields.Integer('Leads/Opportunities count', groups='sales_team.group_sale_salesman', compute="_compute_crm_lead_count") def _compute_use_leads(self): self.use_leads = self.env.user.has_group('crm.group_use_lead') def _compute_crm_lead_count(self): lead_data = self.env['crm.lead'].with_context(active_test=False).read_group([ ('campaign_id', 'in', self.ids)], ['campaign_id'], ['campaign_id']) mapped_data = {datum['campaign_id'][0]: datum['campaign_id_count'] for datum in lead_data} for campaign in self: campaign.crm_lead_count = mapped_data.get(campaign.id, 0) def action_redirect_to_leads_opportunities(self): view = 'crm.crm_lead_all_leads' if self.use_leads else 'crm.crm_lead_opportunities' action = self.env['ir.actions.act_window']._for_xml_id(view) action['view_mode'] = 'tree,kanban,graph,pivot,form,calendar' action['domain'] = [('campaign_id', 'in', self.ids)] action['context'] = {'active_test': False, 'create': False} return action
45.766667
1,373
657
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models class RecurringPlan(models.Model): _name = "crm.recurring.plan" _description = "CRM Recurring revenue plans" _order = "sequence" name = fields.Char('Plan Name', required=True, translate=True) number_of_months = fields.Integer('# Months', required=True) active = fields.Boolean('Active', default=True) sequence = fields.Integer('Sequence', default=10) _sql_constraints = [ ('check_number_of_months', 'CHECK(number_of_months >= 0)', 'The number of month can\'t be negative.'), ]
34.578947
657
2,236
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models AVAILABLE_PRIORITIES = [ ('0', 'Low'), ('1', 'Medium'), ('2', 'High'), ('3', 'Very High'), ] class Stage(models.Model): """ Model for case stages. This models the main stages of a document management flow. Main CRM objects (leads, opportunities, project issues, ...) will now use only stages, instead of state and stages. Stages are for example used to display the kanban view of records. """ _name = "crm.stage" _description = "CRM Stages" _rec_name = 'name' _order = "sequence, name, id" @api.model def default_get(self, fields): """ As we have lots of default_team_id in context used to filter out leads and opportunities, we pop this key from default of stage creation. Otherwise stage will be created for a given team only which is not the standard behavior of stages. """ if 'default_team_id' in self.env.context: ctx = dict(self.env.context) ctx.pop('default_team_id') self = self.with_context(ctx) return super(Stage, self).default_get(fields) name = fields.Char('Stage Name', required=True, translate=True) sequence = fields.Integer('Sequence', default=1, help="Used to order stages. Lower is better.") is_won = fields.Boolean('Is Won Stage?') requirements = fields.Text('Requirements', help="Enter here the internal requirements for this stage (ex: Offer sent to customer). It will appear as a tooltip over the stage's name.") team_id = fields.Many2one('crm.team', string='Sales Team', ondelete="set null", help='Specific team that uses this stage. Other teams will not be able to see or use this stage.') fold = fields.Boolean('Folded in Pipeline', help='This stage is folded in the kanban view when there are no records in that stage to display.') # This field for interface only team_count = fields.Integer('team_count', compute='_compute_team_count') @api.depends('name') def _compute_team_count(self): self.team_count = self.env['crm.team'].search_count([])
44.72
2,236
1,063
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import models class MailActivity(models.Model): _inherit = "mail.activity" def action_create_calendar_event(self): """ Small override of the action that creates a calendar. If the activity is linked to a crm.lead through the "opportunity_id" field, we include in the action context the default values used when scheduling a meeting from the crm.lead form view. e.g: It will set the partner_id of the crm.lead as default attendee of the meeting. """ action = super(MailActivity, self).action_create_calendar_event() opportunity = self.calendar_event_id.opportunity_id if opportunity: opportunity_action_context = opportunity.action_schedule_meeting(smart_calendar=False).get('context', {}) opportunity_action_context['initial_date'] = self.calendar_event_id.start action['context'].update(opportunity_action_context) return action
40.884615
1,063
1,168
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models from odoo.addons.base.models.ir_model import MODULE_UNINSTALL_FLAG class IrConfigParameter(models.Model): _inherit = 'ir.config_parameter' def write(self, vals): result = super(IrConfigParameter, self).write(vals) if any(record.key == "crm.pls_fields" for record in self): self.flush() self.env.registry.setup_models(self.env.cr) return result @api.model_create_multi def create(self, vals_list): records = super(IrConfigParameter, self).create(vals_list) if any(record.key == "crm.pls_fields" for record in records): self.flush() self.env.registry.setup_models(self.env.cr) return records def unlink(self): pls_emptied = any(record.key == "crm.pls_fields" for record in self) result = super(IrConfigParameter, self).unlink() if pls_emptied and not self._context.get(MODULE_UNINSTALL_FLAG): self.flush() self.env.registry.setup_models(self.env.cr) return result
36.5
1,168
323
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models class Users(models.Model): _inherit = 'res.users' target_sales_won = fields.Integer('Won in Opportunities Target') target_sales_done = fields.Integer('Activities Done Target')
29.363636
323
10,332
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from datetime import timedelta from dateutil.relativedelta import relativedelta from odoo import api, exceptions, fields, models, _ class ResConfigSettings(models.TransientModel): _inherit = 'res.config.settings' group_use_lead = fields.Boolean(string="Leads", implied_group='crm.group_use_lead') group_use_recurring_revenues = fields.Boolean(string="Recurring Revenues", implied_group='crm.group_use_recurring_revenues') # Membership is_membership_multi = fields.Boolean(string='Multi Teams', config_parameter='sales_team.membership_multi') # Lead assignment crm_use_auto_assignment = fields.Boolean( string='Rule-Based Assignment', config_parameter='crm.lead.auto.assignment') crm_auto_assignment_action = fields.Selection([ ('manual', 'Manually'), ('auto', 'Repeatedly')], string='Auto Assignment Action', compute='_compute_crm_auto_assignment_data', readonly=False, store=True, help='Manual assign allow to trigger assignment from team form view using an action button. Automatic configures a cron running repeatedly assignment in all teams.') crm_auto_assignment_interval_type = fields.Selection([ ('minutes', 'Minutes'), ('hours', 'Hours'), ('days', 'Days'), ('weeks', 'Weeks')], string='Auto Assignment Interval Unit', compute='_compute_crm_auto_assignment_data', readonly=False, store=True, help='Interval type between each cron run (e.g. each 2 days or each 2 hours)') crm_auto_assignment_interval_number = fields.Integer( string="Repeat every", compute='_compute_crm_auto_assignment_data', readonly=False, store=True, help='Number of interval type between each cron run (e.g. each 2 days or each 4 days)') crm_auto_assignment_run_datetime = fields.Datetime( string="Auto Assignment Next Execution Date", compute='_compute_crm_auto_assignment_data', readonly=False, store=True) # IAP module_crm_iap_mine = fields.Boolean("Generate new leads based on their country, industries, size, etc.") module_crm_iap_enrich = fields.Boolean("Enrich your leads automatically with company data based on their email address.") module_website_crm_iap_reveal = fields.Boolean("Create Leads/Opportunities from your website's traffic") lead_enrich_auto = fields.Selection([ ('manual', 'Enrich leads on demand only'), ('auto', 'Enrich all leads automatically'), ], string='Enrich lead automatically', default='manual', config_parameter='crm.iap.lead.enrich.setting') lead_mining_in_pipeline = fields.Boolean("Create a lead mining request directly from the opportunity pipeline.", config_parameter='crm.lead_mining_in_pipeline') predictive_lead_scoring_start_date = fields.Date(string='Lead Scoring Starting Date', compute="_compute_pls_start_date", inverse="_inverse_pls_start_date_str") predictive_lead_scoring_start_date_str = fields.Char(string='Lead Scoring Starting Date in String', config_parameter='crm.pls_start_date') predictive_lead_scoring_fields = fields.Many2many('crm.lead.scoring.frequency.field', string='Lead Scoring Frequency Fields', compute="_compute_pls_fields", inverse="_inverse_pls_fields_str") predictive_lead_scoring_fields_str = fields.Char(string='Lead Scoring Frequency Fields in String', config_parameter='crm.pls_fields') predictive_lead_scoring_field_labels = fields.Char(compute='_compute_predictive_lead_scoring_field_labels') @api.depends('crm_use_auto_assignment') def _compute_crm_auto_assignment_data(self): assign_cron = self.sudo().env.ref('crm.ir_cron_crm_lead_assign', raise_if_not_found=False) for setting in self: if setting.crm_use_auto_assignment and assign_cron: setting.crm_auto_assignment_action = 'auto' if assign_cron.active else 'manual' setting.crm_auto_assignment_interval_type = assign_cron.interval_type or 'days' setting.crm_auto_assignment_interval_number = assign_cron.interval_number or 1 setting.crm_auto_assignment_run_datetime = assign_cron.nextcall else: setting.crm_auto_assignment_action = 'manual' setting.crm_auto_assignment_interval_type = setting.crm_auto_assignment_run_datetime = False setting.crm_auto_assignment_interval_number = 1 @api.onchange('crm_auto_assignment_interval_type', 'crm_auto_assignment_interval_number') def _onchange_crm_auto_assignment_run_datetime(self): if self.crm_auto_assignment_interval_number <= 0: raise exceptions.UserError(_('Repeat frequency should be positive.')) elif self.crm_auto_assignment_interval_number >= 100: raise exceptions.UserError(_('Invalid repeat frequency. Consider changing frequency type instead of using large numbers.')) self.crm_auto_assignment_run_datetime = self._get_crm_auto_assignmment_run_datetime( self.crm_auto_assignment_run_datetime, self.crm_auto_assignment_interval_type, self.crm_auto_assignment_interval_number ) @api.depends('predictive_lead_scoring_fields_str') def _compute_pls_fields(self): """ As config_parameters does not accept m2m field, we get the fields back from the Char config field, to ease the configuration in config panel """ for setting in self: if setting.predictive_lead_scoring_fields_str: names = setting.predictive_lead_scoring_fields_str.split(',') fields = self.env['ir.model.fields'].search([('name', 'in', names), ('model', '=', 'crm.lead')]) setting.predictive_lead_scoring_fields = self.env['crm.lead.scoring.frequency.field'].search([('field_id', 'in', fields.ids)]) else: setting.predictive_lead_scoring_fields = None def _inverse_pls_fields_str(self): """ As config_parameters does not accept m2m field, we store the fields with a comma separated string into a Char config field """ for setting in self: if setting.predictive_lead_scoring_fields: setting.predictive_lead_scoring_fields_str = ','.join(setting.predictive_lead_scoring_fields.mapped('field_id.name')) else: setting.predictive_lead_scoring_fields_str = '' @api.depends('predictive_lead_scoring_start_date_str') def _compute_pls_start_date(self): """ As config_parameters does not accept Date field, we get the date back from the Char config field, to ease the configuration in config panel """ for setting in self: lead_scoring_start_date = setting.predictive_lead_scoring_start_date_str # if config param is deleted / empty, set the date 8 days prior to current date if not lead_scoring_start_date: setting.predictive_lead_scoring_start_date = fields.Date.to_date(fields.Date.today() - timedelta(days=8)) else: try: setting.predictive_lead_scoring_start_date = fields.Date.to_date(lead_scoring_start_date) except ValueError: # the config parameter is malformed, so set the date 8 days prior to current date setting.predictive_lead_scoring_start_date = fields.Date.to_date(fields.Date.today() - timedelta(days=8)) def _inverse_pls_start_date_str(self): """ As config_parameters does not accept Date field, we store the date formated string into a Char config field """ for setting in self: if setting.predictive_lead_scoring_start_date: setting.predictive_lead_scoring_start_date_str = fields.Date.to_string(setting.predictive_lead_scoring_start_date) @api.depends('predictive_lead_scoring_fields') def _compute_predictive_lead_scoring_field_labels(self): for setting in self: if setting.predictive_lead_scoring_fields: field_names = [_('Stage')] + [field.name for field in setting.predictive_lead_scoring_fields] setting.predictive_lead_scoring_field_labels = _('%s and %s', ', '.join(field_names[:-1]), field_names[-1]) else: setting.predictive_lead_scoring_field_labels = _('Stage') def set_values(self): group_lead_before = self.env.ref('crm.group_use_lead') in self.env.user.groups_id super(ResConfigSettings, self).set_values() # update use leads / opportunities setting on all teams according to settings update group_lead_after = self.env.ref('crm.group_use_lead') in self.env.user.groups_id if group_lead_before != group_lead_after: teams = self.env['crm.team'].search([]) teams.filtered('use_opportunities').use_leads = group_lead_after for team in teams: team.alias_id.write(team._alias_get_creation_values()) # synchronize cron with settings assign_cron = self.sudo().env.ref('crm.ir_cron_crm_lead_assign', raise_if_not_found=False) if assign_cron: assign_cron.active = self.crm_use_auto_assignment and self.crm_auto_assignment_action == 'auto' assign_cron.interval_type = self.crm_auto_assignment_interval_type assign_cron.interval_number = self.crm_auto_assignment_interval_number # keep nextcall on cron as it is required whatever the setting assign_cron.nextcall = self.crm_auto_assignment_run_datetime if self.crm_auto_assignment_run_datetime else assign_cron.nextcall # TDE FIXME: re create cron if not found ? def _get_crm_auto_assignmment_run_datetime(self, run_datetime, run_interval, run_interval_number): if not run_interval: return False if run_interval == 'manual': return run_datetime if run_datetime else False return fields.Datetime.now() + relativedelta(**{run_interval: run_interval_number}) def action_crm_assign_leads(self): self.ensure_one() return self.env['crm.team'].search([('assignment_optout', '=', False)]).action_assign_leads(work_days=2, log=False)
64.173913
10,332
124,746
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import logging import pytz import threading from collections import OrderedDict, defaultdict from datetime import date, datetime, timedelta from psycopg2 import sql from odoo import api, fields, models, tools, SUPERUSER_ID from odoo.addons.iap.tools import iap_tools from odoo.addons.mail.tools import mail_validation from odoo.addons.phone_validation.tools import phone_validation from odoo.exceptions import UserError, AccessError from odoo.osv import expression from odoo.tools.translate import _ from odoo.tools import date_utils, email_re, email_split, is_html_empty from . import crm_stage _logger = logging.getLogger(__name__) CRM_LEAD_FIELDS_TO_MERGE = [ # UTM mixin 'campaign_id', 'medium_id', 'source_id', # Mail mixin 'email_cc', # description 'name', 'user_id', 'company_id', 'team_id', # pipeline 'stage_id', # revenues 'expected_revenue', # dates 'create_date', 'date_action_last', # partner / contact 'partner_id', 'title', 'partner_name', 'contact_name', 'email_from', 'mobile', 'phone', 'website', # address 'street', 'street2', 'zip', 'city', 'state_id', 'country_id', ] # Subset of partner fields: sync any of those PARTNER_FIELDS_TO_SYNC = [ 'mobile', 'title', 'function', 'website', ] # Subset of partner fields: sync all or none to avoid mixed addresses PARTNER_ADDRESS_FIELDS_TO_SYNC = [ 'street', 'street2', 'city', 'zip', 'state_id', 'country_id', ] # Those values have been determined based on benchmark to minimise # computation time, number of transaction and transaction time. PLS_COMPUTE_BATCH_STEP = 50000 # odoo.models.PREFETCH_MAX = 1000 but larger cluster can speed up global computation PLS_UPDATE_BATCH_STEP = 5000 class Lead(models.Model): _name = "crm.lead" _description = "Lead/Opportunity" _order = "priority desc, id desc" _inherit = ['mail.thread.cc', 'mail.thread.blacklist', 'mail.thread.phone', 'mail.activity.mixin', 'utm.mixin', 'format.address.mixin', ] _primary_email = 'email_from' _check_company_auto = True # Description name = fields.Char( 'Opportunity', index=True, required=True, compute='_compute_name', readonly=False, store=True) user_id = fields.Many2one( 'res.users', string='Salesperson', default=lambda self: self.env.user, domain="['&', ('share', '=', False), ('company_ids', 'in', user_company_ids)]", check_company=True, index=True, tracking=True) user_company_ids = fields.Many2many( 'res.company', compute='_compute_user_company_ids', help='UX: Limit to lead company or all if no company') user_email = fields.Char('User Email', related='user_id.email', readonly=True) user_login = fields.Char('User Login', related='user_id.login', readonly=True) team_id = fields.Many2one( 'crm.team', string='Sales Team', check_company=True, index=True, tracking=True, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]", compute='_compute_team_id', ondelete="set null", readonly=False, store=True) company_id = fields.Many2one( 'res.company', string='Company', index=True, compute='_compute_company_id', readonly=False, store=True) referred = fields.Char('Referred By') description = fields.Html('Notes') active = fields.Boolean('Active', default=True, tracking=True) type = fields.Selection([ ('lead', 'Lead'), ('opportunity', 'Opportunity')], index=True, required=True, tracking=15, default=lambda self: 'lead' if self.env['res.users'].has_group('crm.group_use_lead') else 'opportunity') # Pipeline management priority = fields.Selection( crm_stage.AVAILABLE_PRIORITIES, string='Priority', index=True, default=crm_stage.AVAILABLE_PRIORITIES[0][0]) stage_id = fields.Many2one( 'crm.stage', string='Stage', index=True, tracking=True, compute='_compute_stage_id', readonly=False, store=True, copy=False, group_expand='_read_group_stage_ids', ondelete='restrict', domain="['|', ('team_id', '=', False), ('team_id', '=', team_id)]") kanban_state = fields.Selection([ ('grey', 'No next activity planned'), ('red', 'Next activity late'), ('green', 'Next activity is planned')], string='Kanban State', compute='_compute_kanban_state') tag_ids = fields.Many2many( 'crm.tag', 'crm_tag_rel', 'lead_id', 'tag_id', string='Tags', help="Classify and analyze your lead/opportunity categories like: Training, Service") color = fields.Integer('Color Index', default=0) # Revenues expected_revenue = fields.Monetary('Expected Revenue', currency_field='company_currency', tracking=True) prorated_revenue = fields.Monetary('Prorated Revenue', currency_field='company_currency', store=True, compute="_compute_prorated_revenue") recurring_revenue = fields.Monetary('Recurring Revenues', currency_field='company_currency', groups="crm.group_use_recurring_revenues", tracking=True) recurring_plan = fields.Many2one('crm.recurring.plan', string="Recurring Plan", groups="crm.group_use_recurring_revenues") recurring_revenue_monthly = fields.Monetary('Expected MRR', currency_field='company_currency', store=True, compute="_compute_recurring_revenue_monthly", groups="crm.group_use_recurring_revenues") recurring_revenue_monthly_prorated = fields.Monetary('Prorated MRR', currency_field='company_currency', store=True, compute="_compute_recurring_revenue_monthly_prorated", groups="crm.group_use_recurring_revenues") company_currency = fields.Many2one("res.currency", string='Currency', compute="_compute_company_currency", readonly=True) # Dates date_closed = fields.Datetime('Closed Date', readonly=True, copy=False) date_action_last = fields.Datetime('Last Action', readonly=True) date_open = fields.Datetime( 'Assignment Date', compute='_compute_date_open', readonly=True, store=True) day_open = fields.Float('Days to Assign', compute='_compute_day_open', store=True) day_close = fields.Float('Days to Close', compute='_compute_day_close', store=True) date_last_stage_update = fields.Datetime( 'Last Stage Update', compute='_compute_date_last_stage_update', index=True, readonly=True, store=True) date_conversion = fields.Datetime('Conversion Date', readonly=True) date_deadline = fields.Date('Expected Closing', help="Estimate of the date on which the opportunity will be won.") # Customer / contact partner_id = fields.Many2one( 'res.partner', string='Customer', check_company=True, index=True, tracking=10, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]", help="Linked partner (optional). Usually created when converting the lead. You can find a partner by its Name, TIN, Email or Internal Reference.") partner_is_blacklisted = fields.Boolean('Partner is blacklisted', related='partner_id.is_blacklisted', readonly=True) contact_name = fields.Char( 'Contact Name', tracking=30, compute='_compute_contact_name', readonly=False, store=True) partner_name = fields.Char( 'Company Name', tracking=20, index=True, compute='_compute_partner_name', readonly=False, store=True, help='The name of the future partner company that will be created while converting the lead into opportunity') function = fields.Char('Job Position', compute='_compute_function', readonly=False, store=True) title = fields.Many2one('res.partner.title', string='Title', compute='_compute_title', readonly=False, store=True) email_from = fields.Char( 'Email', tracking=40, index=True, compute='_compute_email_from', inverse='_inverse_email_from', readonly=False, store=True) phone = fields.Char( 'Phone', tracking=50, compute='_compute_phone', inverse='_inverse_phone', readonly=False, store=True) mobile = fields.Char('Mobile', compute='_compute_mobile', readonly=False, store=True) phone_state = fields.Selection([ ('correct', 'Correct'), ('incorrect', 'Incorrect')], string='Phone Quality', compute="_compute_phone_state", store=True) email_state = fields.Selection([ ('correct', 'Correct'), ('incorrect', 'Incorrect')], string='Email Quality', compute="_compute_email_state", store=True) website = fields.Char('Website', index=True, help="Website of the contact", compute="_compute_website", readonly=False, store=True) lang_id = fields.Many2one( 'res.lang', string='Language', compute='_compute_lang_id', readonly=False, store=True) # Address fields street = fields.Char('Street', compute='_compute_partner_address_values', readonly=False, store=True) street2 = fields.Char('Street2', compute='_compute_partner_address_values', readonly=False, store=True) zip = fields.Char('Zip', change_default=True, compute='_compute_partner_address_values', readonly=False, store=True) city = fields.Char('City', compute='_compute_partner_address_values', readonly=False, store=True) state_id = fields.Many2one( "res.country.state", string='State', compute='_compute_partner_address_values', readonly=False, store=True, domain="[('country_id', '=?', country_id)]") country_id = fields.Many2one( 'res.country', string='Country', compute='_compute_partner_address_values', readonly=False, store=True) # Probability (Opportunity only) probability = fields.Float( 'Probability', group_operator="avg", copy=False, compute='_compute_probabilities', readonly=False, store=True) automated_probability = fields.Float('Automated Probability', compute='_compute_probabilities', readonly=True, store=True) is_automated_probability = fields.Boolean('Is automated probability?', compute="_compute_is_automated_probability") # Won/Lost lost_reason = fields.Many2one( 'crm.lost.reason', string='Lost Reason', index=True, ondelete='restrict', tracking=True) # Statistics calendar_event_ids = fields.One2many('calendar.event', 'opportunity_id', string='Meetings') calendar_event_count = fields.Integer('# Meetings', compute='_compute_calendar_event_count') duplicate_lead_ids = fields.Many2many("crm.lead", compute="_compute_potential_lead_duplicates", string="Potential Duplicate Lead", context={"active_test": False}) duplicate_lead_count = fields.Integer(compute="_compute_potential_lead_duplicates", string="Potential Duplicate Lead Count") # UX partner_email_update = fields.Boolean('Partner Email will Update', compute='_compute_partner_email_update') partner_phone_update = fields.Boolean('Partner Phone will Update', compute='_compute_partner_phone_update') _sql_constraints = [ ('check_probability', 'check(probability >= 0 and probability <= 100)', 'The probability of closing the deal should be between 0% and 100%!') ] @api.depends('activity_date_deadline') def _compute_kanban_state(self): today = date.today() for lead in self: kanban_state = 'grey' if lead.activity_date_deadline: lead_date = fields.Date.from_string(lead.activity_date_deadline) if lead_date >= today: kanban_state = 'green' else: kanban_state = 'red' lead.kanban_state = kanban_state @api.depends('company_id') def _compute_user_company_ids(self): all_companies = self.env['res.company'].search([]) for lead in self: if not lead.company_id: lead.user_company_ids = all_companies else: lead.user_company_ids = lead.company_id @api.depends('company_id') def _compute_company_currency(self): for lead in self: if not lead.company_id: lead.company_currency = self.env.company.currency_id else: lead.company_currency = lead.company_id.currency_id @api.depends('user_id', 'type') def _compute_team_id(self): """ When changing the user, also set a team_id or restrict team id to the ones user_id is member of. """ for lead in self: # setting user as void should not trigger a new team computation if not lead.user_id: continue user = lead.user_id if lead.team_id and user in (lead.team_id.member_ids | lead.team_id.user_id): continue team_domain = [('use_leads', '=', True)] if lead.type == 'lead' else [('use_opportunities', '=', True)] team = self.env['crm.team']._get_default_team_id(user_id=user.id, domain=team_domain) lead.team_id = team.id @api.depends('user_id', 'team_id', 'partner_id') def _compute_company_id(self): """ Compute company_id coherency. """ for lead in self: proposal = lead.company_id # invalidate wrong configuration if proposal: # company not in responsible companies if lead.user_id and proposal not in lead.user_id.company_ids: proposal = False # inconsistent if lead.team_id.company_id and proposal != lead.team_id.company_id: proposal = False # void company on team and no assignee if lead.team_id and not lead.team_id.company_id and not lead.user_id: proposal = False # no user and no team -> void company and let assignment do its job # unless customer has a company if not lead.team_id and not lead.user_id and \ (not lead.partner_id or lead.partner_id.company_id != proposal): proposal = False # propose a new company based on team > user (respecting context) > partner if not proposal: if lead.team_id.company_id: proposal = lead.team_id.company_id elif lead.user_id: if self.env.company in lead.user_id.company_ids: proposal = self.env.company else: proposal = lead.user_id.company_id & self.env.companies elif lead.partner_id: proposal = lead.partner_id.company_id else: proposal = False # set a new company if lead.company_id != proposal: lead.company_id = proposal @api.depends('team_id', 'type') def _compute_stage_id(self): for lead in self: if not lead.stage_id: lead.stage_id = lead._stage_find(domain=[('fold', '=', False)]).id @api.depends('user_id') def _compute_date_open(self): for lead in self: lead.date_open = fields.Datetime.now() if lead.user_id else False @api.depends('stage_id') def _compute_date_last_stage_update(self): for lead in self: lead.date_last_stage_update = fields.Datetime.now() @api.depends('create_date', 'date_open') def _compute_day_open(self): """ Compute difference between create date and open date """ leads = self.filtered(lambda l: l.date_open and l.create_date) others = self - leads others.day_open = None for lead in leads: date_create = fields.Datetime.from_string(lead.create_date).replace(microsecond=0) date_open = fields.Datetime.from_string(lead.date_open) lead.day_open = abs((date_open - date_create).days) @api.depends('create_date', 'date_closed') def _compute_day_close(self): """ Compute difference between current date and log date """ leads = self.filtered(lambda l: l.date_closed and l.create_date) others = self - leads others.day_close = None for lead in leads: date_create = fields.Datetime.from_string(lead.create_date) date_close = fields.Datetime.from_string(lead.date_closed) lead.day_close = abs((date_close - date_create).days) @api.depends('partner_id') def _compute_name(self): for lead in self: if not lead.name and lead.partner_id and lead.partner_id.name: lead.name = _("%s's opportunity") % lead.partner_id.name @api.depends('partner_id') def _compute_contact_name(self): """ compute the new values when partner_id has changed """ for lead in self: lead.update(lead._prepare_contact_name_from_partner(lead.partner_id)) @api.depends('partner_id') def _compute_partner_name(self): """ compute the new values when partner_id has changed """ for lead in self: lead.update(lead._prepare_partner_name_from_partner(lead.partner_id)) @api.depends('partner_id') def _compute_function(self): """ compute the new values when partner_id has changed """ for lead in self: if not lead.function or lead.partner_id.function: lead.function = lead.partner_id.function @api.depends('partner_id') def _compute_title(self): """ compute the new values when partner_id has changed """ for lead in self: if not lead.title or lead.partner_id.title: lead.title = lead.partner_id.title @api.depends('partner_id') def _compute_mobile(self): """ compute the new values when partner_id has changed """ for lead in self: if not lead.mobile or lead.partner_id.mobile: lead.mobile = lead.partner_id.mobile @api.depends('partner_id') def _compute_website(self): """ compute the new values when partner_id has changed """ for lead in self: if not lead.website or lead.partner_id.website: lead.website = lead.partner_id.website @api.depends('partner_id') def _compute_lang_id(self): """ compute the lang based on partner when partner_id has changed """ wo_lang = self.filtered(lambda lead: not lead.lang_id and lead.partner_id) if not wo_lang: return # prepare cache lang_codes = [code for code in wo_lang.mapped('partner_id.lang') if code] lang_id_by_code = dict( (code, self.env['res.lang']._lang_get_id(code)) for code in lang_codes ) for lead in wo_lang: lead.lang_id = lang_id_by_code.get(lead.partner_id.lang, False) @api.depends('partner_id') def _compute_partner_address_values(self): """ Sync all or none of address fields """ for lead in self: lead.update(lead._prepare_address_values_from_partner(lead.partner_id)) @api.depends('partner_id.email') def _compute_email_from(self): for lead in self: if lead.partner_id.email and lead._get_partner_email_update(): lead.email_from = lead.partner_id.email def _inverse_email_from(self): for lead in self: if lead._get_partner_email_update(): lead.partner_id.email = lead.email_from @api.depends('partner_id.phone') def _compute_phone(self): for lead in self: if lead.partner_id.phone and lead._get_partner_phone_update(): lead.phone = lead.partner_id.phone def _inverse_phone(self): for lead in self: if lead._get_partner_phone_update(): lead.partner_id.phone = lead.phone @api.depends('phone', 'country_id.code') def _compute_phone_state(self): for lead in self: phone_status = False if lead.phone: country_code = lead.country_id.code if lead.country_id and lead.country_id.code else None try: if phone_validation.phone_parse(lead.phone, country_code): # otherwise library not installed phone_status = 'correct' except UserError: phone_status = 'incorrect' lead.phone_state = phone_status @api.depends('email_from') def _compute_email_state(self): for lead in self: email_state = False if lead.email_from: email_state = 'incorrect' for email in email_split(lead.email_from): if mail_validation.mail_validate(email): email_state = 'correct' break lead.email_state = email_state @api.depends('probability', 'automated_probability') def _compute_is_automated_probability(self): """ If probability and automated_probability are equal probability computation is considered as automatic, aka probability is sync with automated_probability """ for lead in self: lead.is_automated_probability = tools.float_compare(lead.probability, lead.automated_probability, 2) == 0 @api.depends(lambda self: ['stage_id', 'team_id'] + self._pls_get_safe_fields()) def _compute_probabilities(self): lead_probabilities = self._pls_get_naive_bayes_probabilities() for lead in self: if lead.id in lead_probabilities: was_automated = lead.active and lead.is_automated_probability lead.automated_probability = lead_probabilities[lead.id] if was_automated: lead.probability = lead.automated_probability @api.depends('expected_revenue', 'probability') def _compute_prorated_revenue(self): for lead in self: lead.prorated_revenue = round((lead.expected_revenue or 0.0) * (lead.probability or 0) / 100.0, 2) @api.depends('recurring_revenue', 'recurring_plan.number_of_months') def _compute_recurring_revenue_monthly(self): for lead in self: lead.recurring_revenue_monthly = (lead.recurring_revenue or 0.0) / (lead.recurring_plan.number_of_months or 1) @api.depends('recurring_revenue_monthly', 'probability') def _compute_recurring_revenue_monthly_prorated(self): for lead in self: lead.recurring_revenue_monthly_prorated = (lead.recurring_revenue_monthly or 0.0) * (lead.probability or 0) / 100.0 def _compute_calendar_event_count(self): if self.ids: meeting_data = self.env['calendar.event'].sudo().read_group([ ('opportunity_id', 'in', self.ids) ], ['opportunity_id'], ['opportunity_id']) mapped_data = {m['opportunity_id'][0]: m['opportunity_id_count'] for m in meeting_data} else: mapped_data = dict() for lead in self: lead.calendar_event_count = mapped_data.get(lead.id, 0) @api.depends('email_from', 'partner_id', 'contact_name', 'partner_name') def _compute_potential_lead_duplicates(self): MIN_EMAIL_LENGTH = 7 MIN_NAME_LENGTH = 6 SEARCH_RESULT_LIMIT = 21 def return_if_relevant(model_name, domain): """ Returns the recordset obtained by performing a search on the provided model with the provided domain if the cardinality of that recordset is below a given threshold (i.e: `SEARCH_RESULT_LIMIT`). Otherwise, returns an empty recordset of the provided model as it indicates search term was not relevant. Note: The function will use the administrator privileges to guarantee that a maximum amount of leads will be included in the search results and transcend multi-company record rules. It also includes archived records. Idea is that counter indicates duplicates are present and that lead could be escalated to managers. """ # Includes archived records and transcend multi-company record rules model = self.env[model_name].sudo().with_context(active_test=False) res = model.search(domain, limit=SEARCH_RESULT_LIMIT) return res if len(res) < SEARCH_RESULT_LIMIT else model def get_email_to_search(email): """ Returns the full email address if the domain of the email address is common (i.e: in the mail domain blacklist). Otherwise, returns the domain of the email address. A minimal length is required to avoid returning false positives records. """ if not email or len(email) < MIN_EMAIL_LENGTH: return False parts = email.rsplit('@', maxsplit=1) if len(parts) > 1: email_domain = parts[1] if email_domain not in iap_tools._MAIL_DOMAIN_BLACKLIST: return '@' + email_domain return email for lead in self: lead_id = lead._origin.id if isinstance(lead.id, models.NewId) else lead.id common_lead_domain = [ ('id', '!=', lead_id) ] duplicate_lead_ids = self.env['crm.lead'] email_search = get_email_to_search(lead.email_from) if email_search: duplicate_lead_ids |= return_if_relevant('crm.lead', common_lead_domain + [ ('email_from', 'ilike', email_search) ]) if lead.partner_name and len(lead.partner_name) >= MIN_NAME_LENGTH: duplicate_lead_ids |= return_if_relevant('crm.lead', common_lead_domain + [ ('partner_name', 'ilike', lead.partner_name) ]) if lead.contact_name and len(lead.contact_name) >= MIN_NAME_LENGTH: duplicate_lead_ids |= return_if_relevant('crm.lead', common_lead_domain + [ ('contact_name', 'ilike', lead.contact_name) ]) if lead.partner_id and lead.partner_id.commercial_partner_id: duplicate_lead_ids |= lead.with_context(active_test=False).search(common_lead_domain + [ ("partner_id", "child_of", lead.partner_id.commercial_partner_id.id) ]) lead.duplicate_lead_ids = duplicate_lead_ids + lead lead.duplicate_lead_count = len(duplicate_lead_ids) @api.depends('email_from', 'partner_id') def _compute_partner_email_update(self): for lead in self: lead.partner_email_update = lead._get_partner_email_update() @api.depends('phone', 'partner_id') def _compute_partner_phone_update(self): for lead in self: lead.partner_phone_update = lead._get_partner_phone_update() @api.onchange('phone', 'country_id', 'company_id') def _onchange_phone_validation(self): if self.phone: self.phone = self.phone_get_sanitized_number(number_fname='phone', force_format='INTERNATIONAL') or self.phone @api.onchange('mobile', 'country_id', 'company_id') def _onchange_mobile_validation(self): if self.mobile: self.mobile = self.phone_get_sanitized_number(number_fname='mobile', force_format='INTERNATIONAL') or self.mobile def _prepare_values_from_partner(self, partner): """ Get a dictionary with values coming from partner information to copy on a lead. Non-address fields get the current lead values to avoid being reset if partner has no value for them. """ # Sync all address fields from partner, or none, to avoid mixing them. values = self._prepare_address_values_from_partner(partner) # For other fields, get the info from the partner, but only if set values.update({f: partner[f] or self[f] for f in PARTNER_FIELDS_TO_SYNC}) if partner.lang: values['lang_id'] = self.env['res.lang']._lang_get_id(partner.lang) # Fields with specific logic values.update(self._prepare_contact_name_from_partner(partner)) values.update(self._prepare_partner_name_from_partner(partner)) return self._convert_to_write(values) def _prepare_address_values_from_partner(self, partner): # Sync all address fields from partner, or none, to avoid mixing them. if any(partner[f] for f in PARTNER_ADDRESS_FIELDS_TO_SYNC): values = {f: partner[f] for f in PARTNER_ADDRESS_FIELDS_TO_SYNC} else: values = {f: self[f] for f in PARTNER_ADDRESS_FIELDS_TO_SYNC} return values def _prepare_contact_name_from_partner(self, partner): contact_name = False if partner.is_company else partner.name return {'contact_name': contact_name or self.contact_name} def _prepare_partner_name_from_partner(self, partner): """ Company name: name of partner parent (if set) or name of partner (if company) or company_name of partner (if not a company). """ partner_name = partner.parent_id.name if not partner_name and partner.is_company: partner_name = partner.name elif not partner_name and partner.company_name: partner_name = partner.company_name return {'partner_name': partner_name or self.partner_name} def _get_partner_email_update(self): """Calculate if we should write the email on the related partner. When the email of the lead / partner is an empty string, we force it to False to not propagate a False on an empty string. Done in a separate method so it can be used in both ribbon and inverse and compute of email update methods. """ self.ensure_one() if self.partner_id and self.email_from != self.partner_id.email: lead_email_normalized = tools.email_normalize(self.email_from) or self.email_from or False partner_email_normalized = tools.email_normalize(self.partner_id.email) or self.partner_id.email or False return lead_email_normalized != partner_email_normalized return False def _get_partner_phone_update(self): """Calculate if we should write the phone on the related partner. When the phone of the lead / partner is an empty string, we force it to False to not propagate a False on an empty string. Done in a separate method so it can be used in both ribbon and inverse and compute of phone update methods. """ self.ensure_one() if self.partner_id and self.phone != self.partner_id.phone: lead_phone_formatted = self.phone_get_sanitized_number(number_fname='phone') or self.phone or False partner_phone_formatted = self.partner_id.phone_get_sanitized_number(number_fname='phone') or self.partner_id.phone or False return lead_phone_formatted != partner_phone_formatted return False # ------------------------------------------------------------ # ORM # ------------------------------------------------------------ def _auto_init(self): res = super(Lead, self)._auto_init() tools.create_index(self._cr, 'crm_lead_user_id_team_id_type_index', self._table, ['user_id', 'team_id', 'type']) tools.create_index(self._cr, 'crm_lead_create_date_team_id_idx', self._table, ['create_date', 'team_id']) return res @api.model_create_multi def create(self, vals_list): for vals in vals_list: if vals.get('website'): vals['website'] = self.env['res.partner']._clean_website(vals['website']) leads = super(Lead, self).create(vals_list) for lead, values in zip(leads, vals_list): if any(field in ['active', 'stage_id'] for field in values): lead._handle_won_lost(values) return leads def write(self, vals): if vals.get('website'): vals['website'] = self.env['res.partner']._clean_website(vals['website']) stage_updated, stage_is_won = vals.get('stage_id'), False # stage change: update date_last_stage_update if stage_updated: stage = self.env['crm.stage'].browse(vals['stage_id']) if stage.is_won: vals.update({'probability': 100, 'automated_probability': 100}) stage_is_won = True # stage change with new stage: update probability and date_closed if vals.get('probability', 0) >= 100 or not vals.get('active', True): vals['date_closed'] = fields.Datetime.now() elif vals.get('probability', 0) > 0: vals['date_closed'] = False elif stage_updated and not stage_is_won and not 'probability' in vals: vals['date_closed'] = False if any(field in ['active', 'stage_id'] for field in vals): self._handle_won_lost(vals) if not stage_is_won: return super(Lead, self).write(vals) # stage change between two won stages: does not change the date_closed leads_already_won = self.filtered(lambda lead: lead.stage_id.is_won) remaining = self - leads_already_won if remaining: result = super(Lead, remaining).write(vals) if leads_already_won: vals.pop('date_closed', False) result = super(Lead, leads_already_won).write(vals) return result @api.model def search(self, args, offset=0, limit=None, order=None, count=False): """ Override to support ordering on my_activity_date_deadline. Ordering through web client calls search_read with an order parameter set. Search_read then calls search. In this override we therefore override search to intercept a search without count with an order on my_activity_date_deadline. In that case we do the search in two steps. First step: fill with deadline-based results * Perform a read_group on my activities to get a mapping lead_id / deadline Remember date_deadline is required, we always have a value for it. Only the earliest deadline per lead is kept. * Search leads linked to those activities that also match the asked domain and order from the original search request. * Results of that search will be at the top of returned results. Use limit None because we have to search all leads linked to activities as ordering on deadline is done in post processing. * Reorder them according to deadline asc or desc depending on original search ordering. Finally take only a subset of those leads to fill with results matching asked offset / limit. Second step: fill with other results. If first step does not gives results enough to match offset and limit parameters we fill with a search on other leads. We keep the asked domain and ordering while filtering out already scanned leads to keep a coherent results. All other search and search_read are left untouched by this override to avoid side effects. Search_count is not affected by this override. """ if count or not order or 'my_activity_date_deadline' not in order: return super(Lead, self).search(args, offset=offset, limit=limit, order=order, count=count) order_items = [order_item.strip().lower() for order_item in (order or self._order).split(',')] # Perform a read_group on my activities to get a mapping lead_id / deadline # Remember date_deadline is required, we always have a value for it. Only # the earliest deadline per lead is kept. activity_asc = any('my_activity_date_deadline asc' in item for item in order_items) my_lead_activities = self.env['mail.activity'].read_group( [('res_model', '=', self._name), ('user_id', '=', self.env.uid)], ['res_id', 'date_deadline:min'], ['res_id'], orderby='date_deadline ASC' ) my_lead_mapping = dict((item['res_id'], item['date_deadline']) for item in my_lead_activities) my_lead_ids = list(my_lead_mapping.keys()) my_lead_domain = expression.AND([[('id', 'in', my_lead_ids)], args]) my_lead_order = ', '.join(item for item in order_items if 'my_activity_date_deadline' not in item) # Search leads linked to those activities and order them. See docstring # of this method for more details. search_res = super(Lead, self).search(my_lead_domain, offset=0, limit=None, order=my_lead_order, count=count) my_lead_ids_ordered = sorted(search_res.ids, key=lambda lead_id: my_lead_mapping[lead_id], reverse=not activity_asc) # keep only requested window (offset + limit, or offset+) my_lead_ids_keep = my_lead_ids_ordered[offset:(offset + limit)] if limit else my_lead_ids_ordered[offset:] # keep list of already skipped lead ids to exclude them from future search my_lead_ids_skip = my_lead_ids_ordered[:(offset + limit)] if limit else my_lead_ids_ordered # do not go further if limit is achieved if limit and len(my_lead_ids_keep) >= limit: return self.browse(my_lead_ids_keep) # Fill with remaining leads. If a limit is given, simply remove count of # already fetched. Otherwise keep none. If an offset is set we have to # reduce it by already fetch results hereabove. Order is updated to exclude # my_activity_date_deadline when calling super() . lead_limit = (limit - len(my_lead_ids_keep)) if limit else None if offset: lead_offset = max((offset - len(search_res), 0)) else: lead_offset = 0 lead_order = ', '.join(item for item in order_items if 'my_activity_date_deadline' not in item) other_lead_res = super(Lead, self).search( expression.AND([[('id', 'not in', my_lead_ids_skip)], args]), offset=lead_offset, limit=lead_limit, order=lead_order, count=count ) return self.browse(my_lead_ids_keep) + other_lead_res def _handle_won_lost(self, vals): """ This method handle the state changes : - To lost : We need to increment corresponding lost count in scoring frequency table - To won : We need to increment corresponding won count in scoring frequency table - From lost to Won : We need to decrement corresponding lost count + increment corresponding won count in scoring frequency table. - From won to lost : We need to decrement corresponding won count + increment corresponding lost count in scoring frequency table.""" Lead = self.env['crm.lead'] leads_reach_won = Lead leads_leave_won = Lead leads_reach_lost = Lead leads_leave_lost = Lead won_stage_ids = self.env['crm.stage'].search([('is_won', '=', True)]).ids for lead in self: if 'stage_id' in vals: if vals['stage_id'] in won_stage_ids: if lead.probability == 0: leads_leave_lost += lead leads_reach_won += lead elif lead.stage_id.id in won_stage_ids and lead.active: # a lead can be lost at won_stage leads_leave_won += lead if 'active' in vals: if not vals['active'] and lead.active: # archive lead if lead.stage_id.id in won_stage_ids and lead not in leads_leave_won: leads_leave_won += lead leads_reach_lost += lead elif vals['active'] and not lead.active: # restore lead leads_leave_lost += lead leads_reach_won._pls_increment_frequencies(to_state='won') leads_leave_won._pls_increment_frequencies(from_state='won') leads_reach_lost._pls_increment_frequencies(to_state='lost') leads_leave_lost._pls_increment_frequencies(from_state='lost') @api.returns('self', lambda value: value.id) def copy(self, default=None): self.ensure_one() # set default value in context, if not already set (Put stage to 'new' stage) context = dict(self._context) context.setdefault('default_type', self.type) context.setdefault('default_team_id', self.team_id.id) # Set date_open to today if it is an opp default = default or {} default['date_open'] = fields.Datetime.now() if self.type == 'opportunity' else False # Do not assign to an archived user if not self.user_id.active: default['user_id'] = False if not self.env.user.has_group('crm.group_use_recurring_revenues'): default['recurring_revenue'] = 0 default['recurring_plan'] = False return super(Lead, self.with_context(context)).copy(default=default) def unlink(self): """ Update meetings when removing opportunities, otherwise you have a link to a record that does not lead anywhere. """ meetings = self.env['calendar.event'].search([ ('res_id', 'in', self.ids), ('res_model', '=', self._name), ]) if meetings: meetings.write({ 'res_id': False, 'res_model_id': False, }) return super(Lead, self).unlink() @api.model def _fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False): if self._context.get('opportunity_id'): opportunity = self.browse(self._context['opportunity_id']) action = opportunity.get_formview_action() if action.get('views') and any(view_id for view_id in action['views'] if view_id[1] == view_type): view_id = next(view_id[0] for view_id in action['views'] if view_id[1] == view_type) res = super(Lead, self)._fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) if view_type == 'form': res['arch'] = self._fields_view_get_address(res['arch']) return res @api.model def _read_group_stage_ids(self, stages, domain, order): # retrieve team_id from the context and write the domain # - ('id', 'in', stages.ids): add columns that should be present # - OR ('fold', '=', False): add default columns that are not folded # - OR ('team_ids', '=', team_id), ('fold', '=', False) if team_id: add team columns that are not folded team_id = self._context.get('default_team_id') if team_id: search_domain = ['|', ('id', 'in', stages.ids), '|', ('team_id', '=', False), ('team_id', '=', team_id)] else: search_domain = ['|', ('id', 'in', stages.ids), ('team_id', '=', False)] # perform search stage_ids = stages._search(search_domain, order=order, access_rights_uid=SUPERUSER_ID) return stages.browse(stage_ids) def _stage_find(self, team_id=False, domain=None, order='sequence, id', limit=1): """ Determine the stage of the current lead with its teams, the given domain and the given team_id :param team_id :param domain : base search domain for stage :param order : base search order for stage :param limit : base search limit for stage :returns crm.stage recordset """ # collect all team_ids by adding given one, and the ones related to the current leads team_ids = set() if team_id: team_ids.add(team_id) for lead in self: if lead.team_id: team_ids.add(lead.team_id.id) # generate the domain if team_ids: search_domain = ['|', ('team_id', '=', False), ('team_id', 'in', list(team_ids))] else: search_domain = [('team_id', '=', False)] # AND with the domain in parameter if domain: search_domain += list(domain) # perform search, return the first found return self.env['crm.stage'].search(search_domain, order=order, limit=limit) # ------------------------------------------------------------ # ACTIONS # ------------------------------------------------------------ def toggle_active(self): """ When archiving: mark probability as 0. When re-activating update probability again, for leads and opportunities. """ res = super(Lead, self).toggle_active() activated = self.filtered(lambda lead: lead.active) archived = self.filtered(lambda lead: not lead.active) if activated: activated.write({'lost_reason': False}) activated._compute_probabilities() if archived: archived.write({'probability': 0, 'automated_probability': 0}) return res def action_set_lost(self, **additional_values): """ Lost semantic: probability = 0 or active = False """ res = self.action_archive() if additional_values: self.write(dict(additional_values)) return res def action_set_won(self): """ Won semantic: probability = 100 (active untouched) """ self.action_unarchive() # group the leads by team_id, in order to write once by values couple (each write leads to frequency increment) leads_by_won_stage = {} for lead in self: won_stages = self._stage_find(domain=[('is_won', '=', True)], limit=None) # ABD : We could have a mixed pipeline, with "won" stages being separated by "standard" # stages. In the future, we may want to prevent any "standard" stage to have a higher # sequence than any "won" stage. But while this is not the case, searching # for the "won" stage while alterning the sequence order (see below) will correctly # handle such a case : # stage sequence : [x] [x (won)] [y] [y (won)] [z] [z (won)] # when in stage [y] and marked as "won", should go to the stage [y (won)], # not in [x (won)] nor [z (won)] stage_id = next((stage for stage in won_stages if stage.sequence > lead.stage_id.sequence), None) if not stage_id: stage_id = next((stage for stage in reversed(won_stages) if stage.sequence <= lead.stage_id.sequence), won_stages) if stage_id in leads_by_won_stage: leads_by_won_stage[stage_id] += lead else: leads_by_won_stage[stage_id] = lead for won_stage_id, leads in leads_by_won_stage.items(): leads.write({'stage_id': won_stage_id.id, 'probability': 100}) return True def action_set_automated_probability(self): self.write({'probability': self.automated_probability}) def action_set_won_rainbowman(self): self.ensure_one() self.action_set_won() message = self._get_rainbowman_message() if message: return { 'effect': { 'fadeout': 'slow', 'message': message, 'img_url': '/web/image/%s/%s/image_1024' % (self.team_id.user_id._name, self.team_id.user_id.id) if self.team_id.user_id.image_1024 else '/web/static/img/smile.svg', 'type': 'rainbow_man', } } return True def get_rainbowman_message(self): self.ensure_one() if self.stage_id.is_won: return self._get_rainbowman_message() return False def _get_rainbowman_message(self): if not self.user_id or not self.team_id: return False if not self.expected_revenue: # Show rainbow man for the first won lead of a salesman, even if expected revenue is not set. It is not # very often that leads without revenues are marked won, so simply get count using ORM instead of query today = fields.Datetime.today() user_won_leads_count = self.search_count([ ('type', '=', 'opportunity'), ('user_id', '=', self.user_id.id), ('probability', '=', 100), ('date_closed', '>=', date_utils.start_of(today, 'year')), ('date_closed', '<', date_utils.end_of(today, 'year')), ]) if user_won_leads_count == 1: return _('Go, go, go! Congrats for your first deal.') return False self.flush() # flush fields to make sure DB is up to date query = """ SELECT SUM(CASE WHEN user_id = %(user_id)s THEN 1 ELSE 0 END) as total_won, MAX(CASE WHEN date_closed >= CURRENT_DATE - INTERVAL '30 days' AND user_id = %(user_id)s THEN expected_revenue ELSE 0 END) as max_user_30, MAX(CASE WHEN date_closed >= CURRENT_DATE - INTERVAL '7 days' AND user_id = %(user_id)s THEN expected_revenue ELSE 0 END) as max_user_7, MAX(CASE WHEN date_closed >= CURRENT_DATE - INTERVAL '30 days' AND team_id = %(team_id)s THEN expected_revenue ELSE 0 END) as max_team_30, MAX(CASE WHEN date_closed >= CURRENT_DATE - INTERVAL '7 days' AND team_id = %(team_id)s THEN expected_revenue ELSE 0 END) as max_team_7 FROM crm_lead WHERE type = 'opportunity' AND active = True AND probability = 100 AND DATE_TRUNC('year', date_closed) = DATE_TRUNC('year', CURRENT_DATE) AND (user_id = %(user_id)s OR team_id = %(team_id)s) """ self.env.cr.execute(query, {'user_id': self.user_id.id, 'team_id': self.team_id.id}) query_result = self.env.cr.dictfetchone() message = False if query_result['total_won'] == 1: message = _('Go, go, go! Congrats for your first deal.') elif query_result['max_team_30'] == self.expected_revenue: message = _('Boom! Team record for the past 30 days.') elif query_result['max_team_7'] == self.expected_revenue: message = _('Yeah! Deal of the last 7 days for the team.') elif query_result['max_user_30'] == self.expected_revenue: message = _('You just beat your personal record for the past 30 days.') elif query_result['max_user_7'] == self.expected_revenue: message = _('You just beat your personal record for the past 7 days.') return message def action_schedule_meeting(self, smart_calendar=True): """ Open meeting's calendar view to schedule meeting on current opportunity. :param smart_calendar: boolean, to set to False if the view should not try to choose relevant mode and initial date for calendar view, see ``_get_opportunity_meeting_view_parameters`` :return dict: dictionary value for created Meeting view """ self.ensure_one() action = self.env["ir.actions.actions"]._for_xml_id("calendar.action_calendar_event") partner_ids = self.env.user.partner_id.ids if self.partner_id: partner_ids.append(self.partner_id.id) current_opportunity_id = self.id if self.type == 'opportunity' else False action['context'] = { 'search_default_opportunity_id': current_opportunity_id, 'default_opportunity_id': current_opportunity_id, 'default_partner_id': self.partner_id.id, 'default_partner_ids': partner_ids, 'default_team_id': self.team_id.id, 'default_name': self.name, } # 'Smart' calendar view : get the most relevant time period to display to the user. if current_opportunity_id and smart_calendar: mode, initial_date = self._get_opportunity_meeting_view_parameters() action['context'].update({'default_mode': mode, 'initial_date': initial_date}) return action def _get_opportunity_meeting_view_parameters(self): """ Return the most relevant parameters for calendar view when viewing meetings linked to an opportunity. If there are any meetings that are not finished yet, only consider those meetings, since the user would prefer no to see past meetings. Otherwise, consider all meetings. Allday events datetimes are used without taking tz into account. -If there is no event, return week mode and false (The calendar will target 'now' by default) -If there is only one, return week mode and date of the start of the event. -If there are several events entirely on the same week, return week mode and start of first event. -Else, return month mode and the date of the start of first event as initial date. (If they are on the same month, this will display that month and therefore show all of them, which is expected) :return tuple(mode, initial_date) - mode: selected mode of the calendar view, 'week' or 'month' - initial_date: date of the start of the first relevant meeting. The calendar will target that date. """ self.ensure_one() meeting_results = self.env["calendar.event"].search_read([('opportunity_id', '=', self.id)], ['start', 'stop', 'allday']) if not meeting_results: return "week", False user_tz = self.env.user.tz or self.env.context.get('tz') user_pytz = pytz.timezone(user_tz) if user_tz else pytz.utc # meeting_dts will contain one tuple of datetimes per meeting : (Start, Stop) # meetings_dts and now_dt are as per user time zone. meeting_dts = [] now_dt = datetime.now().astimezone(user_pytz).replace(tzinfo=None) # When creating an allday meeting, whatever the TZ, it will be stored the same e.g. 00.00.00->23.59.59 in utc or # 08.00.00->18.00.00. Therefore we must not put it back in the user tz but take it raw. for meeting in meeting_results: if meeting.get('allday'): meeting_dts.append((meeting.get('start'), meeting.get('stop'))) else: meeting_dts.append((meeting.get('start').astimezone(user_pytz).replace(tzinfo=None), meeting.get('stop').astimezone(user_pytz).replace(tzinfo=None))) # If there are meetings that are still ongoing or to come, only take those. unfinished_meeting_dts = [meeting_dt for meeting_dt in meeting_dts if meeting_dt[1] >= now_dt] relevant_meeting_dts = unfinished_meeting_dts if unfinished_meeting_dts else meeting_dts relevant_meeting_count = len(relevant_meeting_dts) if relevant_meeting_count == 1: return "week", relevant_meeting_dts[0][0].date() else: # Range of meetings earliest_start_dt = min(relevant_meeting_dt[0] for relevant_meeting_dt in relevant_meeting_dts) latest_stop_dt = max(relevant_meeting_dt[1] for relevant_meeting_dt in relevant_meeting_dts) # The week start day depends on language. We fetch the week_start of user's language. 1 is monday. lang_week_start = self.env["res.lang"].search_read([('code', '=', self.env.user.lang)], ['week_start']) # We substract one to make week_start_index range 0-6 instead of 1-7 week_start_index = int(lang_week_start[0].get('week_start', '1')) - 1 # We compute the weekday of earliest_start_dt according to week_start_index. earliest_start_dt_index will be 0 if we are on the # first day of the week and 6 on the last. weekday() returns 0 for monday and 6 for sunday. For instance, Tuesday in UK is the # third day of the week, so earliest_start_dt_index is 2, and remaining_days_in_week includes tuesday, so it will be 5. # The first term 7 is there to avoid negative left side on the modulo, improving readability. earliest_start_dt_weekday = (7 + earliest_start_dt.weekday() - week_start_index) % 7 remaining_days_in_week = 7 - earliest_start_dt_weekday # We compute the start of the week following the one containing the start of the first meeting. next_week_start_date = earliest_start_dt.date() + timedelta(days=remaining_days_in_week) # Latest_stop_dt must be before the start of following week. Limit is therefore set at midnight of first day, included. meetings_in_same_week = latest_stop_dt <= datetime(next_week_start_date.year, next_week_start_date.month, next_week_start_date.day, 0, 0, 0) if meetings_in_same_week: return "week", earliest_start_dt.date() else: return "month", earliest_start_dt.date() def action_reschedule_meeting(self): self.ensure_one() action = self.action_schedule_meeting(smart_calendar=False) next_activity = self.activity_ids.filtered(lambda activity: activity.user_id == self.env.user)[:1] if next_activity.calendar_event_id: action['context']['initial_date'] = next_activity.calendar_event_id.start return action def action_show_potential_duplicates(self): """ Open kanban view to display duplicate leads or opportunity. :return dict: dictionary value for created kanban view """ self.ensure_one() action = self.env["ir.actions.actions"]._for_xml_id("crm.crm_lead_opportunities") action['domain'] = [('id', 'in', self.duplicate_lead_ids.ids)] action['context'] = { 'active_test': False, 'create': False } return action def action_snooze(self): self.ensure_one() today = date.today() my_next_activity = self.activity_ids.filtered(lambda activity: activity.user_id == self.env.user)[:1] if my_next_activity: if my_next_activity.date_deadline < today: date_deadline = today + timedelta(days=7) else: date_deadline = my_next_activity.date_deadline + timedelta(days=7) my_next_activity.write({ 'date_deadline': date_deadline }) return True # ------------------------------------------------------------ # VIEWS # ------------------------------------------------------------ def redirect_lead_opportunity_view(self): self.ensure_one() return { 'name': _('Lead or Opportunity'), 'view_mode': 'form', 'res_model': 'crm.lead', 'domain': [('type', '=', self.type)], 'res_id': self.id, 'view_id': False, 'type': 'ir.actions.act_window', 'context': {'default_type': self.type} } @api.model def get_empty_list_help(self, help): """ This method returns the action helpers for the leads. If help is already provided on the action, the same is returned. Otherwise, we build the help message which contains the alias responsible for creating the lead (if available) and return it. """ if not is_html_empty(help): return help help_title, sub_title = "", "" if self._context.get('default_type') == 'lead': help_title = _('Create a new lead') else: help_title = _('Create an opportunity to start playing with your pipeline.') alias_record = self.env['mail.alias'].search([ ('alias_name', '!=', False), ('alias_name', '!=', ''), ('alias_model_id.model', '=', 'crm.lead'), ('alias_parent_model_id.model', '=', 'crm.team'), ('alias_force_thread_id', '=', False) ], limit=1) if alias_record and alias_record.alias_domain and alias_record.alias_name: email = '%s@%s' % (alias_record.alias_name, alias_record.alias_domain) email_link = "<b><a href='mailto:%s'>%s</a></b>" % (email, email) sub_title = _('Use the top left <i>Create</i> button, or send an email to %s to test the email gateway.') % (email_link) return '<p class="o_view_nocontent_smiling_face">%s</p><p class="oe_view_nocontent_alias">%s</p>' % (help_title, sub_title) # ------------------------------------------------------------ # BUSINESS # ------------------------------------------------------------ def log_meeting(self, meeting_subject, meeting_date, duration): if not duration: duration = _('unknown') else: duration = str(duration) meet_date = fields.Datetime.from_string(meeting_date) meeting_usertime = fields.Datetime.to_string(fields.Datetime.context_timestamp(self, meet_date)) html_time = "<time datetime='%s+00:00'>%s</time>" % (meeting_date, meeting_usertime) message = _("Meeting scheduled at '%s'<br> Subject: %s <br> Duration: %s hours") % (html_time, meeting_subject, duration) return self.message_post(body=message) # ------------------------------------------------------------ # MERGE AND CONVERT LEADS / OPPORTUNITIES # ------------------------------------------------------------ def _merge_data(self, fnames=None): """ Prepare lead/opp data into a dictionary for merging. Different types of fields are processed in different ways: - text: all the values are concatenated - m2m and o2m: those fields aren't processed - m2o: the first not null value prevails (the other are dropped) - any other type of field: same as m2o :param fields: list of fields to process :return dict data: contains the merged values of the new opportunity """ if fnames is None: fnames = self._merge_get_fields() fcallables = self._merge_get_fields_specific() # helpers def _get_first_not_null(attr, opportunities): value = False for opp in opportunities: if opp[attr]: value = opp[attr].id if isinstance(opp[attr], models.BaseModel) else opp[attr] break return value # process the field's values data = {} for field_name in fnames: field = self._fields.get(field_name) if field is None: continue fcallable = fcallables.get(field_name) if fcallable and callable(fcallable): data[field_name] = fcallable(field_name, self) elif not fcallable and field.type in ('many2many', 'one2many'): continue else: data[field_name] = _get_first_not_null(field_name, self) # take the first not null return data def _merge_notify_get_merged_fields_message(self): """ Generate the message body with the changed values :param fields : list of fields to track :returns a list of message bodies for the corresponding leads """ bodies = [] for lead in self: title = "%s : %s\n" % (_('Merged opportunity') if lead.type == 'opportunity' else _('Merged lead'), lead.name) body = [title] _fields = self.env['ir.model.fields'].sudo().search([ ('name', 'in', self._merge_get_fields()), ('model_id.model', '=', lead._name), ]) for field in _fields: value = getattr(lead, field.name, False) if field.ttype == 'selection': selections = lead.fields_get()[field.name]['selection'] value = next((v[1] for v in selections if v[0] == value), value) elif field.ttype == 'many2one': if value: value = value.sudo().display_name elif field.ttype == 'many2many': if value: value = ','.join( val.display_name for val in value.sudo() ) body.append("%s: %s" % (field.field_description, value or '')) bodies.append("<br/>".join(body + ['<br/>'])) return bodies def _merge_notify(self, opportunities): """ Post a message gathering merged leads/opps informations. It explains which fields has been merged and their new value. `self` is the resulting merge crm.lead record. :param opportunities: see ``_merge_dependences`` """ # TODO JEM: mail template should be used instead of fix body, subject text self.ensure_one() merge_message = _('Merged leads') if self.type == 'lead' else _('Merged opportunities') subject = merge_message + ": " + ", ".join(opportunities.mapped('name')) # message bodies message_bodies = opportunities._merge_notify_get_merged_fields_message() message_body = "\n\n".join(message_bodies) return self.message_post(body=message_body, subject=subject) def merge_opportunity(self, user_id=False, team_id=False, auto_unlink=True): """ Merge opportunities in one. Different cases of merge: - merge leads together = 1 new lead - merge at least 1 opp with anything else (lead or opp) = 1 new opp The resulting lead/opportunity will be the most important one (based on its confidence level) updated with values from other opportunities to merge. :param user_id : the id of the saleperson. If not given, will be determined by `_merge_data`. :param team : the id of the Sales Team. If not given, will be determined by `_merge_data`. :return crm.lead record resulting of th merge """ return self._merge_opportunity(user_id=user_id, team_id=team_id, auto_unlink=auto_unlink) def _merge_opportunity(self, user_id=False, team_id=False, auto_unlink=True, max_length=5): """ Private merging method. This one allows to relax rules on record set length allowing to merge more than 5 opportunities at once if requested. This should not be called by action buttons. See ``merge_opportunity`` for more details. """ if len(self.ids) <= 1: raise UserError(_('Please select more than one element (lead or opportunity) from the list view.')) if max_length and len(self.ids) > max_length and not self.env.is_superuser(): raise UserError(_("To prevent data loss, Leads and Opportunities can only be merged by groups of %(max_length)s.", max_length=max_length)) opportunities = self._sort_by_confidence_level(reverse=True) # get SORTED recordset of head and tail, and complete list opportunities_head = opportunities[0] opportunities_tail = opportunities[1:] # merge all the sorted opportunity. This means the value of # the first (head opp) will be a priority. merged_data = opportunities._merge_data(self._merge_get_fields()) # force value for saleperson and Sales Team if user_id: merged_data['user_id'] = user_id if team_id: merged_data['team_id'] = team_id # log merge message opportunities_head._merge_notify(opportunities_tail) # merge other data (mail.message, attachments, ...) from tail into head opportunities_head._merge_dependences(opportunities_tail) # check if the stage is in the stages of the Sales Team. If not, assign the stage with the lowest sequence if merged_data.get('team_id'): team_stage_ids = self.env['crm.stage'].search(['|', ('team_id', '=', merged_data['team_id']), ('team_id', '=', False)], order='sequence, id') if merged_data.get('stage_id') not in team_stage_ids.ids: merged_data['stage_id'] = team_stage_ids[0].id if team_stage_ids else False # write merged data into first opportunity opportunities_head.write(merged_data) # delete tail opportunities # we use the SUPERUSER to avoid access rights issues because as the user had the rights to see the records it should be safe to do so if auto_unlink: opportunities_tail.sudo().unlink() return opportunities_head def _merge_get_fields_specific(self): return { 'description': lambda fname, leads: '<br/><br/>'.join(desc for desc in leads.mapped('description') if not is_html_empty(desc)), 'type': lambda fname, leads: 'opportunity' if any(lead.type == 'opportunity' for lead in leads) else 'lead', 'priority': lambda fname, leads: max(leads.mapped('priority')) if leads else False, } def _merge_get_fields(self): return list(CRM_LEAD_FIELDS_TO_MERGE) + list(self._merge_get_fields_specific().keys()) def _merge_dependences(self, opportunities): """ Merge dependences (messages, attachments,activities, calendar events, ...). These dependences will be transfered to `self` considered as the master lead. :param opportunities : recordset of opportunities to transfer. Does not include `self` which is the target crm.lead being the result of the merge; """ self.ensure_one() self._merge_dependences_history(opportunities) self._merge_dependences_attachments(opportunities) self._merge_dependences_calendar_events(opportunities) def _merge_dependences_history(self, opportunities): """ Move history from the given opportunities to the current one. `self` is the crm.lead record destination for message of `opportunities`. This method moves * messages * activities :param opportunities: see ``_merge_dependences`` """ self.ensure_one() for opportunity in opportunities: for message in opportunity.message_ids: if message.subject: subject = _("From %(source_name)s : %(source_subject)s", source_name=opportunity.name, source_subject=message.subject) else: subject = _("From %(source_name)s", source_name=opportunity.name) message.write({ 'res_id': self.id, 'subject': subject, }) opportunities.activity_ids.write({ 'res_id': self.id, }) return True def _merge_dependences_attachments(self, opportunities): """ Move attachments of given opportunities to the current one `self`, and rename the attachments having same name than native ones. :param opportunities: see ``_merge_dependences`` """ self.ensure_one() all_attachments = self.env['ir.attachment'].search([ ('res_model', '=', self._name), ('res_id', 'in', opportunities.ids) ]) for opportunity in opportunities: attachments = all_attachments.filtered(lambda attach: attach.res_id == opportunity.id) for attachment in attachments: attachment.write({ 'res_id': self.id, 'name': _("%(attach_name)s (from %(lead_name)s)", attach_name=attachment.name, lead_name=opportunity.name[:20] ) }) return True def _merge_dependences_calendar_events(self, opportunities): """ Move calender.event from the given opportunities to the current one. `self` is the crm.lead record destination for event of `opportunities`. :param opportunities: see ``merge_dependences`` """ self.ensure_one() meetings = self.env['calendar.event'].search([('opportunity_id', 'in', opportunities.ids)]) return meetings.write({ 'res_id': self.id, 'opportunity_id': self.id, }) # CONVERT # ---------------------------------------------------------------------- def _convert_opportunity_data(self, customer, team_id=False): """ Extract the data from a lead to create the opportunity :param customer : res.partner record :param team_id : identifier of the Sales Team to determine the stage """ new_team_id = team_id if team_id else self.team_id.id upd_values = { 'type': 'opportunity', 'date_open': fields.Datetime.now(), 'date_conversion': fields.Datetime.now(), } if customer != self.partner_id: upd_values['partner_id'] = customer.id if customer else False if not self.stage_id: stage = self._stage_find(team_id=new_team_id) upd_values['stage_id'] = stage.id return upd_values def convert_opportunity(self, partner_id, user_ids=False, team_id=False): customer = False if partner_id: customer = self.env['res.partner'].browse(partner_id) for lead in self: if not lead.active or lead.probability == 100: continue vals = lead._convert_opportunity_data(customer, team_id) lead.write(vals) if user_ids or team_id: self._handle_salesmen_assignment(user_ids=user_ids, team_id=team_id) return True def _handle_partner_assignment(self, force_partner_id=False, create_missing=True): """ Update customer (partner_id) of leads. Purpose is to set the same partner on most leads; either through a newly created partner either through a given partner_id. :param int force_partner_id: if set, update all leads to that customer; :param create_missing: for leads without customer, create a new one based on lead information; """ for lead in self: if force_partner_id: lead.partner_id = force_partner_id if not lead.partner_id and create_missing: partner = lead._create_customer() lead.partner_id = partner.id def _handle_salesmen_assignment(self, user_ids=False, team_id=False): """ Assign salesmen and salesteam to a batch of leads. If there are more leads than salesmen, these salesmen will be assigned in round-robin. E.g. 4 salesmen (S1, S2, S3, S4) for 6 leads (L1, L2, ... L6) will assigned as following: L1 - S1, L2 - S2, L3 - S3, L4 - S4, L5 - S1, L6 - S2. :param list user_ids: salesmen to assign :param int team_id: salesteam to assign """ update_vals = {'team_id': team_id} if team_id else {} if not user_ids and team_id: self.write(update_vals) else: lead_ids = self.ids steps = len(user_ids) # pass 1 : lead_ids[0:6:3] = [L1,L4] # pass 2 : lead_ids[1:6:3] = [L2,L5] # pass 3 : lead_ids[2:6:3] = [L3,L6] # ... for idx in range(0, steps): subset_ids = lead_ids[idx:len(lead_ids):steps] update_vals['user_id'] = user_ids[idx] self.env['crm.lead'].browse(subset_ids).write(update_vals) # ------------------------------------------------------------ # MERGE / CONVERT TOOLS # --------------------------------------------------------- # CLASSIFICATION TOOLS # -------------------------------------------------- def _get_lead_duplicates(self, partner=None, email=None, include_lost=False): """ Search for leads that seem duplicated based on partner / email. :param partner : optional customer when searching duplicated :param email: email (possibly formatted) to search :param boolean include_lost: if True, search includes archived opportunities (still only active leads are considered). If False, search for active and not won leads and opportunities; """ if not email and not partner: return self.env['crm.lead'] domain = [] for normalized_email in [tools.email_normalize(email) for email in tools.email_split(email)]: domain.append(('email_normalized', '=', normalized_email)) if partner: domain.append(('partner_id', '=', partner.id)) if not domain: return self.env['crm.lead'] domain = ['|'] * (len(domain) - 1) + domain if include_lost: domain += ['|', ('type', '=', 'opportunity'), ('active', '=', True)] else: domain += ['&', ('active', '=', True), '|', ('stage_id', '=', False), ('stage_id.is_won', '=', False)] return self.with_context(active_test=False).search(domain) def _sort_by_confidence_level(self, reverse=False): """ Sorting the leads/opps according to the confidence level to it being won. It is sorted following this incremental heuristics : * "not lost" first (inactive leads are lost); normally all leads should be active but in case lost one, they are always last. Inactive opportunities are considered as valid; * opportunity is more reliable than a lead which is a pre-stage used mainly for first classification; * stage sequence: the higher the better as it indicates we are moving towards won stage; * probability: the higher the better as it is more likely to be won; * ID: the higher the better when all other parameters are equal. We consider newer leads to be more reliable; """ def opps_key(opportunity): return opportunity.type == 'opportunity' or opportunity.active, \ opportunity.type == 'opportunity', \ opportunity.stage_id.sequence, \ opportunity.probability, \ -opportunity._origin.id return self.sorted(key=opps_key, reverse=reverse) # CUSTOMER TOOLS # -------------------------------------------------- def _find_matching_partner(self, email_only=False): """ Try to find a matching partner with available information on the lead, using notably customer's name, email, ... :param email_only: Only find a matching based on the email. To use for automatic process where ilike based on name can be too dangerous :return: partner browse record """ self.ensure_one() partner = self.partner_id if not partner and self.email_from: partner = self.env['res.partner'].search([('email', '=', self.email_from)], limit=1) if not partner and not email_only: # search through the existing partners based on the lead's partner or contact name # to be aligned with _create_customer, search on lead's name as last possibility for customer_potential_name in [self[field_name] for field_name in ['partner_name', 'contact_name', 'name'] if self[field_name]]: partner = self.env['res.partner'].search([('name', 'ilike', '%' + customer_potential_name + '%')], limit=1) if partner: break return partner def _create_customer(self): """ Create a partner from lead data and link it to the lead. :return: newly-created partner browse record """ Partner = self.env['res.partner'] contact_name = self.contact_name if not contact_name: contact_name = Partner._parse_partner_name(self.email_from)[0] if self.email_from else False if self.partner_name: partner_company = Partner.create(self._prepare_customer_values(self.partner_name, is_company=True)) elif self.partner_id: partner_company = self.partner_id else: partner_company = None if contact_name: return Partner.create(self._prepare_customer_values(contact_name, is_company=False, parent_id=partner_company.id if partner_company else False)) if partner_company: return partner_company return Partner.create(self._prepare_customer_values(self.name, is_company=False)) def _prepare_customer_values(self, partner_name, is_company=False, parent_id=False): """ Extract data from lead to create a partner. :param name : furtur name of the partner :param is_company : True if the partner is a company :param parent_id : id of the parent partner (False if no parent) :return: dictionary of values to give at res_partner.create() """ email_parts = tools.email_split(self.email_from) res = { 'name': partner_name, 'user_id': self.env.context.get('default_user_id') or self.user_id.id, 'comment': self.description, 'team_id': self.team_id.id, 'parent_id': parent_id, 'phone': self.phone, 'mobile': self.mobile, 'email': email_parts[0] if email_parts else False, 'title': self.title.id, 'function': self.function, 'street': self.street, 'street2': self.street2, 'zip': self.zip, 'city': self.city, 'country_id': self.country_id.id, 'state_id': self.state_id.id, 'website': self.website, 'is_company': is_company, 'type': 'contact' } if self.lang_id: res['lang'] = self.lang_id.code return res # ------------------------------------------------------------ # MAILING # ------------------------------------------------------------ def _creation_subtype(self): return self.env.ref('crm.mt_lead_create') def _track_subtype(self, init_values): self.ensure_one() if 'stage_id' in init_values and self.probability == 100 and self.stage_id: return self.env.ref('crm.mt_lead_won') elif 'lost_reason' in init_values and self.lost_reason: return self.env.ref('crm.mt_lead_lost') elif 'stage_id' in init_values: return self.env.ref('crm.mt_lead_stage') elif 'active' in init_values and self.active: return self.env.ref('crm.mt_lead_restored') elif 'active' in init_values and not self.active: return self.env.ref('crm.mt_lead_lost') return super(Lead, self)._track_subtype(init_values) def _notify_get_groups(self, msg_vals=None): """ Handle salesman recipients that can convert leads into opportunities and set opportunities as won / lost. """ groups = super(Lead, self)._notify_get_groups(msg_vals=msg_vals) local_msg_vals = dict(msg_vals or {}) self.ensure_one() if self.type == 'lead': convert_action = self._notify_get_action_link('controller', controller='/lead/convert', **local_msg_vals) salesman_actions = [{'url': convert_action, 'title': _('Convert to opportunity')}] else: won_action = self._notify_get_action_link('controller', controller='/lead/case_mark_won', **local_msg_vals) lost_action = self._notify_get_action_link('controller', controller='/lead/case_mark_lost', **local_msg_vals) salesman_actions = [ {'url': won_action, 'title': _('Won')}, {'url': lost_action, 'title': _('Lost')}] if self.team_id: custom_params = dict(local_msg_vals, res_id=self.team_id.id, model=self.team_id._name) salesman_actions.append({ 'url': self._notify_get_action_link('view', **custom_params), 'title': _('Sales Team Settings') }) salesman_group_id = self.env.ref('sales_team.group_sale_salesman').id new_group = ( 'group_sale_salesman', lambda pdata: pdata['type'] == 'user' and salesman_group_id in pdata['groups'], { 'actions': salesman_actions, }) return [new_group] + groups def _notify_get_reply_to(self, default=None, records=None, company=None, doc_names=None): """ Override to set alias of lead and opportunities to their sales team if any. """ aliases = self.mapped('team_id').sudo()._notify_get_reply_to(default=default, records=None, company=company, doc_names=None) res = {lead.id: aliases.get(lead.team_id.id) for lead in self} leftover = self.filtered(lambda rec: not rec.team_id) if leftover: res.update(super(Lead, leftover)._notify_get_reply_to(default=default, records=None, company=company, doc_names=doc_names)) return res def _message_get_default_recipients(self): return {r.id: { 'partner_ids': [], 'email_to': r.email_normalized, 'email_cc': False} for r in self} def _message_get_suggested_recipients(self): recipients = super(Lead, self)._message_get_suggested_recipients() try: for lead in self: if lead.partner_id: lead._message_add_suggested_recipient(recipients, partner=lead.partner_id, reason=_('Customer')) elif lead.email_from: lead._message_add_suggested_recipient(recipients, email=lead.email_from, reason=_('Customer Email')) except AccessError: # no read access rights -> just ignore suggested recipients because this imply modifying followers pass return recipients @api.model def message_new(self, msg_dict, custom_values=None): """ Overrides mail_thread message_new that is called by the mailgateway through message_process. This override updates the document according to the email. """ # remove default author when going through the mail gateway. Indeed we # do not want to explicitly set an user as responsible. We prefer that # assignment is done automatically (scoring) or manually. Otherwise it # would always be either root (gateway user) either alias owner (through # alias_user_id). It also allows to exclude portal / public users. self = self.with_context(default_user_id=False) if custom_values is None: custom_values = {} defaults = { 'name': msg_dict.get('subject') or _("No Subject"), 'email_from': msg_dict.get('from'), 'partner_id': msg_dict.get('author_id', False), } if msg_dict.get('priority') in dict(crm_stage.AVAILABLE_PRIORITIES): defaults['priority'] = msg_dict.get('priority') defaults.update(custom_values) return super(Lead, self).message_new(msg_dict, custom_values=defaults) def _message_post_after_hook(self, message, msg_vals): if self.email_from and not self.partner_id: # we consider that posting a message with a specified recipient (not a follower, a specific one) # on a document without customer means that it was created through the chatter using # suggested recipients. This heuristic allows to avoid ugly hacks in JS. new_partner = message.partner_ids.filtered(lambda partner: partner.email == self.email_from) if new_partner: self.search([ ('partner_id', '=', False), ('email_from', '=', new_partner.email), ('stage_id.fold', '=', False)]).write({'partner_id': new_partner.id}) return super(Lead, self)._message_post_after_hook(message, msg_vals) def _message_partner_info_from_emails(self, emails, link_mail=False): result = super(Lead, self)._message_partner_info_from_emails(emails, link_mail=link_mail) for partner_info in result: if not partner_info.get('partner_id') and (self.partner_name or self.contact_name): emails = email_re.findall(partner_info['full_name'] or '') email = emails and emails[0] or '' if email and self.email_from and email.lower() == self.email_from.lower(): partner_info['full_name'] = tools.formataddr((self.contact_name or self.partner_name, email)) break return result def _phone_get_number_fields(self): """ Use mobile or phone fields to compute sanitized phone number """ return ['mobile', 'phone'] @api.model def get_import_templates(self): return [{ 'label': _('Import Template for Leads & Opportunities'), 'template': '/crm/static/xls/crm_lead.xls' }] # ------------------------------------------------------------ # PLS # ------------------------------------------------------------ # Predictive lead scoring is computing the lead probability, based on won and lost leads from the past # Each won/lost lead increments a frequency table, where we store, for each field/value couple, the number of # won and lost leads. # E.g. : A won lead from Belgium will increase the won count of the frequency country_id='Belgium' by 1. # The frequencies are split by team_id, so each team has his own frequencies environment. (Team A doesn't impact B) # There are two main ways to build the frequency table: # - Live Increment: At each Won/lost, we increment directly the frequencies based on the lead values. # Done right BEFORE writing the lead as won or lost. # We consider a lead that will be marked as won or lost. # Used each time a lead is won or lost, to ensure frequency table is always up to date # - One shot Rebuild: empty the frequency table and rebuild it from scratch, based on every already won/lost leads # Done during cron process. # We consider all the leads that have been already won or lost. # Used in one shot, when modifying the criteria to take into account (fields or reference date) # --------------------------------- # PLS: Probability Computation # --------------------------------- def _pls_get_naive_bayes_probabilities(self, batch_mode=False): """ In machine learning, naive Bayes classifiers (NBC) are a family of simple "probabilistic classifiers" based on applying Bayes theorem with strong (naive) independence assumptions between the variables taken into account. E.g: will TDE eat m&m's depending on his sleep status, the amount of work he has and the fullness of his stomach? As we use experience to compute the statistics, every day, we will register the variables state + the result. As the days pass, we will be able to determine, with more and more precision, if TDE will eat m&m's for a specific combination : - did sleep very well, a lot of work and stomach full > Will never happen ! - didn't sleep at all, no work at all and empty stomach > for sure ! Following Bayes' Theorem: the probability that an event occurs (to win) under certain conditions is proportional to the probability to win under each condition separately and the probability to win. We compute a 'Win score' -> P(Won | A∩B) ∝ P(A∩B | Won)*P(Won) OR S(Won | A∩B) = P(A∩B | Won)*P(Won) To compute a percentage of probability to win, we also compute the 'Lost score' that is proportional to the probability to lose under each condition separately and the probability to lose. -> Probability = S(Won | A∩B) / ( S(Won | A∩B) + S(Lost | A∩B) ) See https://www.youtube.com/watch?v=CPqOCI0ahss can help to get a quick and simple example. One issue about NBC is when a event occurence is never observed. E.g: if when TDE has an empty stomach, he always eat m&m's, than the "not eating m&m's when empty stomach' event will never be observed. This is called 'zero frequency' and that leads to division (or at least multiplication) by zero. To avoid this, we add 0.1 in each frequency. With few data, the computation is than not really realistic. The more we have records to analyse, the more the estimation will be precise. :return: probability in percent (and integer rounded) that the lead will be won at the current stage. """ lead_probabilities = {} if not self: return lead_probabilities # Get all leads values, no matter the team_id domain = [] if batch_mode: domain = [ '&', ('active', '=', True), ('id', 'in', self.ids), '|', ('probability', '=', None), '&', ('probability', '<', 100), ('probability', '>', 0) ] leads_values_dict = self._pls_get_lead_pls_values(domain=domain) if not leads_values_dict: return lead_probabilities # Get unique couples to search in frequency table and won leads. leads_fields = set() # keep unique fields, as a lead can have multiple tag_ids won_leads = set() won_stage_ids = self.env['crm.stage'].search([('is_won', '=', True)]).ids for lead_id, values in leads_values_dict.items(): for field, value in values['values']: if field == 'stage_id' and value in won_stage_ids: won_leads.add(lead_id) leads_fields.add(field) leads_fields = sorted(leads_fields) # get all variable related records from frequency table, no matter the team_id frequencies = self.env['crm.lead.scoring.frequency'].search([('variable', 'in', list(leads_fields))], order="team_id asc, id") # get all team_ids from frequencies frequency_teams = frequencies.mapped('team_id') frequency_team_ids = [team.id for team in frequency_teams] # 1. Compute each variable value count individually # regroup each variable to be able to compute their own probabilities # As all the variable does not enter into account (as we reject unset values in the process) # each value probability must be computed only with their own variable related total count # special case: for lead for which team_id is not in frequency table or lead with no team_id, # we consider all the records, independently from team_id (this is why we add a result[-1]) result = dict((team_id, dict((field, dict(won_total=0, lost_total=0)) for field in leads_fields)) for team_id in frequency_team_ids) result[-1] = dict((field, dict(won_total=0, lost_total=0)) for field in leads_fields) for frequency in frequencies: field = frequency['variable'] value = frequency['value'] # To avoid that a tag take to much importance if his subset is too small, # we ignore the tag frequencies if we have less than 50 won or lost for this tag. if field == 'tag_id' and (frequency['won_count'] + frequency['lost_count']) < 50: continue if frequency.team_id: team_result = result[frequency.team_id.id] team_result[field][value] = {'won': frequency['won_count'], 'lost': frequency['lost_count']} team_result[field]['won_total'] += frequency['won_count'] team_result[field]['lost_total'] += frequency['lost_count'] if value not in result[-1][field]: result[-1][field][value] = {'won': 0, 'lost': 0} result[-1][field][value]['won'] += frequency['won_count'] result[-1][field][value]['lost'] += frequency['lost_count'] result[-1][field]['won_total'] += frequency['won_count'] result[-1][field]['lost_total'] += frequency['lost_count'] # Get all won, lost and total count for all records in frequencies per team_id for team_id in result: result[team_id]['team_won'], \ result[team_id]['team_lost'], \ result[team_id]['team_total'] = self._pls_get_won_lost_total_count(result[team_id]) save_team_id = None p_won, p_lost = 1, 1 for lead_id, lead_values in leads_values_dict.items(): # if stage_id is null, return 0 and bypass computation lead_fields = [value[0] for value in lead_values.get('values', [])] if not 'stage_id' in lead_fields: lead_probabilities[lead_id] = 0 continue # if lead stage is won, return 100 elif lead_id in won_leads: lead_probabilities[lead_id] = 100 continue # team_id not in frequency Table -> convert to -1 lead_team_id = lead_values['team_id'] if lead_values['team_id'] in result else -1 if lead_team_id != save_team_id: save_team_id = lead_team_id team_won = result[save_team_id]['team_won'] team_lost = result[save_team_id]['team_lost'] team_total = result[save_team_id]['team_total'] # if one count = 0, we cannot compute lead probability if not team_won or not team_lost: continue p_won = team_won / team_total p_lost = team_lost / team_total # 2. Compute won and lost score using each variable's individual probability s_lead_won, s_lead_lost = p_won, p_lost for field, value in lead_values['values']: field_result = result.get(save_team_id, {}).get(field) value = value.origin if hasattr(value, 'origin') else value value_result = field_result.get(str(value)) if field_result else False if value_result: total_won = team_won if field == 'stage_id' else field_result['won_total'] total_lost = team_lost if field == 'stage_id' else field_result['lost_total'] # if one count = 0, we cannot compute lead probability if not total_won or not total_lost: continue s_lead_won *= value_result['won'] / total_won s_lead_lost *= value_result['lost'] / total_lost # 3. Compute Probability to win lead_probabilities[lead_id] = round(100 * s_lead_won / (s_lead_won + s_lead_lost), 2) return lead_probabilities # --------------------------------- # PLS: Live Increment # --------------------------------- def _pls_increment_frequencies(self, from_state=None, to_state=None): """ When losing or winning a lead, this method is called to increment each PLS parameter related to the lead in won_count (if won) or in lost_count (if lost). This method is also used when reactivating a mistakenly lost lead (using the decrement argument). In this case, the lost count should be de-increment by 1 for each PLS parameter linked ot the lead. Live increment must be done before writing the new values because we need to know the state change (from and to). This would not be an issue for the reach won or reach lost as we just need to increment the frequencies with the final state of the lead. This issue is when the lead leaves a closed state because once the new values have been writen, we do not know what was the previous state that we need to decrement. This is why 'is_won' and 'decrement' parameters are used to describe the from / to change of his state. """ new_frequencies_by_team, existing_frequencies_by_team = self._pls_prepare_update_frequency_table(target_state=from_state or to_state) # update frequency table self._pls_update_frequency_table(new_frequencies_by_team, 1 if to_state else -1, existing_frequencies_by_team=existing_frequencies_by_team) # --------------------------------- # PLS: One shot rebuild # --------------------------------- def _cron_update_automated_probabilities(self): """ This cron will : - rebuild the lead scoring frequency table - recompute all the automated_probability and align probability if both were aligned """ cron_start_date = datetime.now() self._rebuild_pls_frequency_table() self._update_automated_probabilities() _logger.info("Predictive Lead Scoring : Cron duration = %d seconds" % ((datetime.now() - cron_start_date).total_seconds())) def _rebuild_pls_frequency_table(self): # Clear the frequencies table (in sql to speed up the cron) try: self.check_access_rights('unlink') except AccessError: raise UserError(_("You don't have the access needed to run this cron.")) else: self._cr.execute('TRUNCATE TABLE crm_lead_scoring_frequency') new_frequencies_by_team, unused = self._pls_prepare_update_frequency_table(rebuild=True) # update frequency table self._pls_update_frequency_table(new_frequencies_by_team, 1) _logger.info("Predictive Lead Scoring : crm.lead.scoring.frequency table rebuilt") def _update_automated_probabilities(self): """ Recompute all the automated_probability (and align probability if both were aligned) for all the leads that are active (not won, nor lost). For performance matter, as there can be a huge amount of leads to recompute, this cron proceed by batch. Each batch is performed into its own transaction, in order to minimise the lock time on the lead table (and to avoid complete lock if there was only 1 transaction that would last for too long -> several minutes). If a concurrent update occurs, it will simply be put in the queue to get the lock. """ pls_start_date = self._pls_get_safe_start_date() if not pls_start_date: return # 1. Get all the leads to recompute created after pls_start_date that are nor won nor lost # (Won : probability = 100 | Lost : probability = 0 or inactive. Here, inactive won't be returned anyway) # Get also all the lead without probability --> These are the new leads. Activate auto probability on them. pending_lead_domain = [ '&', '&', ('stage_id', '!=', False), ('create_date', '>=', pls_start_date), '|', ('probability', '=', False), '&', ('probability', '<', 100), ('probability', '>', 0) ] leads_to_update = self.env['crm.lead'].search(pending_lead_domain) leads_to_update_count = len(leads_to_update) # 2. Compute by batch to avoid memory error lead_probabilities = {} for i in range(0, leads_to_update_count, PLS_COMPUTE_BATCH_STEP): leads_to_update_part = leads_to_update[i:i + PLS_COMPUTE_BATCH_STEP] lead_probabilities.update(leads_to_update_part._pls_get_naive_bayes_probabilities(batch_mode=True)) _logger.info("Predictive Lead Scoring : New automated probabilities computed") # 3. Group by new probability to reduce server roundtrips when executing the update probability_leads = defaultdict(list) for lead_id, probability in sorted(lead_probabilities.items()): probability_leads[probability].append(lead_id) # 4. Update automated_probability (+ probability if both were equal) update_sql = """UPDATE crm_lead SET automated_probability = %s, probability = CASE WHEN (probability = automated_probability OR probability is null) THEN (%s) ELSE (probability) END WHERE id in %s""" # Update by a maximum number of leads at the same time, one batch by transaction : # - avoid memory errors # - avoid blocking the table for too long with a too big transaction transactions_count, transactions_failed_count = 0, 0 cron_update_lead_start_date = datetime.now() auto_commit = not getattr(threading.current_thread(), 'testing', False) for probability, probability_lead_ids in probability_leads.items(): for lead_ids_current in tools.split_every(PLS_UPDATE_BATCH_STEP, probability_lead_ids): transactions_count += 1 try: self.env.cr.execute(update_sql, (probability, probability, tuple(lead_ids_current))) # auto-commit except in testing mode if auto_commit: self.env.cr.commit() except Exception as e: _logger.warning("Predictive Lead Scoring : update transaction failed. Error: %s" % e) transactions_failed_count += 1 _logger.info( "Predictive Lead Scoring : All automated probabilities updated (%d leads / %d transactions (%d failed) / %d seconds)" % ( leads_to_update_count, transactions_count, transactions_failed_count, (datetime.now() - cron_update_lead_start_date).total_seconds(), ) ) # --------------------------------- # PLS: Common parts for both mode # --------------------------------- def _pls_prepare_update_frequency_table(self, rebuild=False, target_state=False): """ This method is common to Live Increment or Full Rebuild mode, as it shares the main steps. This method will prepare the frequency dict needed to update the frequency table: - New frequencies: frequencies that we need to add in the frequency table. - Existing frequencies: frequencies that are already in the frequency table. In rebuild mode, only the new frequencies are needed as existing frequencies are truncated. For each team, each dict contains the frequency in won and lost for each field/value couple of the target leads. Target leads are : - in Live increment mode : given ongoing leads (self) - in Full rebuild mode : all the closed (won and lost) leads in the DB. During the frequencies update, with both new and existing frequencies, we can split frequencies to update and frequencies to add. If a field/value couple already exists in the frequency table, we just update it. Otherwise, we need to insert a new one. """ # Keep eligible leads pls_start_date = self._pls_get_safe_start_date() if not pls_start_date: return {}, {} if rebuild: # rebuild will treat every closed lead in DB, increment will treat current ongoing leads pls_leads = self else: # Only treat leads created after the PLS start Date pls_leads = self.filtered( lambda lead: fields.Date.to_date(pls_start_date) <= fields.Date.to_date(lead.create_date)) if not pls_leads: return {}, {} # Extract target leads values if rebuild: # rebuild is ok domain = [ '&', ('create_date', '>=', pls_start_date), '|', ('probability', '=', 100), '&', ('probability', '=', 0), ('active', '=', False) ] team_ids = self.env['crm.team'].with_context(active_test=False).search([]).ids + [0] # If team_id is unset, consider it as team 0 else: # increment domain = [('id', 'in', pls_leads.ids)] team_ids = pls_leads.mapped('team_id').ids + [0] leads_values_dict = pls_leads._pls_get_lead_pls_values(domain=domain) # split leads values by team_id # get current frequencies related to the target leads leads_frequency_values_by_team = dict((team_id, []) for team_id in team_ids) leads_pls_fields = set() # ensure to keep each field unique (can have multiple tag_id leads_values_dict) for lead_id, values in leads_values_dict.items(): team_id = values.get('team_id', 0) # If team_id is unset, consider it as team 0 lead_frequency_values = {'count': 1} for field, value in values['values']: if field != "probability": # was added to lead values in batch mode to know won/lost state, but is not a pls fields. leads_pls_fields.add(field) else: # extract lead probability - needed to increment tag_id frequency. (proba always before tag_id) lead_probability = value if field == 'tag_id': # handle tag_id separatelly (as in One Shot rebuild mode) leads_frequency_values_by_team[team_id].append({field: value, 'count': 1, 'probability': lead_probability}) else: lead_frequency_values[field] = value leads_frequency_values_by_team[team_id].append(lead_frequency_values) leads_pls_fields = sorted(leads_pls_fields) # get new frequencies new_frequencies_by_team = {} for team_id in team_ids: # prepare fields and tag values for leads by team new_frequencies_by_team[team_id] = self._pls_prepare_frequencies( leads_frequency_values_by_team[team_id], leads_pls_fields, target_state=target_state) # get existing frequencies existing_frequencies_by_team = {} if not rebuild: # there is no existing frequency in rebuild mode as they were all deleted. # read all fields to get everything in memory in one query (instead of having query + prefetch) existing_frequencies = self.env['crm.lead.scoring.frequency'].search_read( ['&', ('variable', 'in', leads_pls_fields), '|', ('team_id', 'in', pls_leads.mapped('team_id').ids), ('team_id', '=', False)]) for frequency in existing_frequencies: team_id = frequency['team_id'][0] if frequency.get('team_id') else 0 if team_id not in existing_frequencies_by_team: existing_frequencies_by_team[team_id] = dict((field, {}) for field in leads_pls_fields) existing_frequencies_by_team[team_id][frequency['variable']][frequency['value']] = { 'frequency_id': frequency['id'], 'won': frequency['won_count'], 'lost': frequency['lost_count'] } return new_frequencies_by_team, existing_frequencies_by_team def _pls_update_frequency_table(self, new_frequencies_by_team, step, existing_frequencies_by_team=None): """ Create / update the frequency table in a cross company way, per team_id""" values_to_update = {} values_to_create = [] if not existing_frequencies_by_team: existing_frequencies_by_team = {} # build the create multi + frequencies to update for team_id, new_frequencies in new_frequencies_by_team.items(): for field, value in new_frequencies.items(): # frequency already present ? current_frequencies = existing_frequencies_by_team.get(team_id, {}) for param, result in value.items(): current_frequency_for_couple = current_frequencies.get(field, {}).get(param, {}) # If frequency already present : UPDATE IT if current_frequency_for_couple: new_won = current_frequency_for_couple['won'] + (result['won'] * step) new_lost = current_frequency_for_couple['lost'] + (result['lost'] * step) # ensure to have always positive frequencies values_to_update[current_frequency_for_couple['frequency_id']] = { 'won_count': new_won if new_won > 0 else 0.1, 'lost_count': new_lost if new_lost > 0 else 0.1 } continue # Else, CREATE a new frequency record. # We add + 0.1 in won and lost counts to avoid zero frequency issues # should be +1 but it weights too much on small recordset. values_to_create.append({ 'variable': field, 'value': param, 'won_count': result['won'] + 0.1, 'lost_count': result['lost'] + 0.1, 'team_id': team_id if team_id else None # team_id = 0 means no team_id }) LeadScoringFrequency = self.env['crm.lead.scoring.frequency'].sudo() for frequency_id, values in values_to_update.items(): LeadScoringFrequency.browse(frequency_id).write(values) if values_to_create: LeadScoringFrequency.create(values_to_create) # --------------------------------- # Utility Tools for PLS # --------------------------------- # PLS: Config Parameters # --------------------- def _pls_get_safe_start_date(self): """ As config_parameters does not accept Date field, we get directly the date formated string stored into the Char config field, as we directly use this string in the sql queries. To avoid sql injections when using this config param, we ensure the date string can be effectively a date.""" str_date = self.env['ir.config_parameter'].sudo().get_param('crm.pls_start_date') if not fields.Date.to_date(str_date): return False return str_date def _pls_get_safe_fields(self): """ As config_parameters does not accept M2M field, we the fields from the formated string stored into the Char config field. To avoid sql injections when using that list, we return only the fields that are defined on the model. """ pls_fields_config = self.env['ir.config_parameter'].sudo().get_param('crm.pls_fields') pls_fields = pls_fields_config.split(',') if pls_fields_config else [] pls_safe_fields = [field for field in pls_fields if field in self._fields.keys()] return pls_safe_fields # Compute Automated Probability Tools # ----------------------------------- def _pls_get_won_lost_total_count(self, team_results): """ Get all won and all lost + total : first stage can be used to know how many lost and won there is as won count are equals for all stage and first stage is always incremented in lost_count :param frequencies: lead_scoring_frequencies :return: won count, lost count and total count for all records in frequencies """ # TODO : check if we need to handle specific team_id stages [for lost count] (if first stage in sequence is team_specific) first_stage_id = self.env['crm.stage'].search([('team_id', '=', False)], order='sequence, id', limit=1) if str(first_stage_id.id) not in team_results.get('stage_id', []): return 0, 0, 0 stage_result = team_results['stage_id'][str(first_stage_id.id)] return stage_result['won'], stage_result['lost'], stage_result['won'] + stage_result['lost'] # PLS: Rebuild Frequency Table Tools # ---------------------------------- def _pls_prepare_frequencies(self, lead_values, leads_pls_fields, target_state=None): """new state is used when getting frequencies for leads that are changing to lost or won. Stays none if we are checking frequencies for leads already won or lost.""" pls_fields = leads_pls_fields.copy() frequencies = dict((field, {}) for field in pls_fields) stage_ids = self.env['crm.stage'].search_read([], ['sequence', 'name', 'id'], order='sequence, id') stage_sequences = {stage['id']: stage['sequence'] for stage in stage_ids} # Increment won / lost frequencies by criteria (field / value couple) for values in lead_values: if target_state: # ignore probability values if target state (as probability is the old value) won_count = values['count'] if target_state == 'won' else 0 lost_count = values['count'] if target_state == 'lost' else 0 else: won_count = values['count'] if values.get('probability', 0) == 100 else 0 lost_count = values['count'] if values.get('probability', 1) == 0 else 0 if 'tag_id' in values: frequencies = self._pls_increment_frequency_dict(frequencies, 'tag_id', values['tag_id'], won_count, lost_count) continue # Else, treat other fields if 'tag_id' in pls_fields: # tag_id already treated here above. pls_fields.remove('tag_id') for field in pls_fields: if field not in values: continue value = values[field] if value or field in ('email_state', 'phone_state'): if field == 'stage_id': if won_count: # increment all stages if won stages_to_increment = [stage['id'] for stage in stage_ids] else: # increment only current + previous stages if lost current_stage_sequence = stage_sequences[value] stages_to_increment = [stage['id'] for stage in stage_ids if stage['sequence'] <= current_stage_sequence] for stage_id in stages_to_increment: frequencies = self._pls_increment_frequency_dict(frequencies, field, stage_id, won_count, lost_count) else: frequencies = self._pls_increment_frequency_dict(frequencies, field, value, won_count, lost_count) return frequencies def _pls_increment_frequency_dict(self, frequencies, field, value, won, lost): value = str(value) # Ensure we will always compare strings. if value not in frequencies[field]: frequencies[field][value] = {'won': won, 'lost': lost} else: frequencies[field][value]['won'] += won frequencies[field][value]['lost'] += lost return frequencies # Common PLS Tools # ---------------- def _pls_get_lead_pls_values(self, domain=[]): """ This methods builds a dict where, for each lead in self or matching the given domain, we will get a list of field/value couple. Due to onchange and create, we don't always have the id of the lead to recompute. When we update few records (one, typically) with onchanges, we build the lead_values (= couple field/value) using the ORM. To speed up the computation and avoid making too much DB read inside loops, we can give a domain to make sql queries to bypass the ORM. This domain will be used in sql queries to get the values for every lead matching the domain. :param domain: If set, we get all the leads values via unique sql queries (one for tags, one for other fields), using the given domain on leads. If not set, get lead values lead by lead using the ORM. :return: {lead_id: [(field1: value1), (field2: value2), ...], ...} """ leads_values_dict = OrderedDict() pls_fields = ["stage_id", "team_id"] + self._pls_get_safe_fields() # Check if tag_ids is in the pls_fields and removed it from the list. The tags will be managed separately. use_tags = 'tag_ids' in pls_fields if use_tags: pls_fields.remove('tag_ids') if domain: # active_test = False as domain should take active into 'active' field it self from_clause, where_clause, where_params = self.env['crm.lead'].with_context(active_test=False)._where_calc(domain).get_sql() str_fields = ", ".join(["{}"] * len(pls_fields)) args = [sql.Identifier(field) for field in pls_fields] # Get leads values self.flush(['probability']) query = """SELECT id, probability, %s FROM %s WHERE %s order by team_id asc, id desc""" query = sql.SQL(query % (str_fields, from_clause, where_clause)).format(*args) self._cr.execute(query, where_params) lead_results = self._cr.dictfetchall() if use_tags: # Get tags values query = """SELECT crm_lead.id as lead_id, t.id as tag_id FROM %s LEFT JOIN crm_tag_rel rel ON crm_lead.id = rel.lead_id LEFT JOIN crm_tag t ON rel.tag_id = t.id WHERE %s order by crm_lead.team_id asc, crm_lead.id""" args.append(sql.Identifier('tag_id')) query = sql.SQL(query % (from_clause, where_clause)).format(*args) self._cr.execute(query, where_params) tag_results = self._cr.dictfetchall() else: tag_results = [] # get all (variable, value) couple for all in self for lead in lead_results: lead_values = [] for field in pls_fields + ['probability']: # add probability as used in _pls_prepare_frequencies (needed in rebuild mode) value = lead[field] if field == 'team_id': # ignore team_id as stored separately in leads_values_dict[lead_id][team_id] continue if value or field == 'probability': # 0 is a correct value for probability lead_values.append((field, value)) elif field in ('email_state', 'phone_state'): # As ORM reads 'None' as 'False', do the same here lead_values.append((field, False)) leads_values_dict[lead['id']] = {'values': lead_values, 'team_id': lead['team_id'] or 0} for tag in tag_results: if tag['tag_id']: leads_values_dict[tag['lead_id']]['values'].append(('tag_id', tag['tag_id'])) return leads_values_dict else: for lead in self: lead_values = [] for field in pls_fields: if field == 'team_id': # ignore team_id as stored separately in leads_values_dict[lead_id][team_id] continue value = lead[field].id if isinstance(lead[field], models.BaseModel) else lead[field] if value or field in ('email_state', 'phone_state'): lead_values.append((field, value)) if use_tags: for tag in lead.tag_ids: lead_values.append(('tag_id', tag.id)) leads_values_dict[lead.id] = {'values': lead_values, 'team_id': lead['team_id'].id} return leads_values_dict
50.213366
124,730
2,723
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models class Partner(models.Model): _name = 'res.partner' _inherit = 'res.partner' team_id = fields.Many2one('crm.team', string='Sales Team', ondelete="set null") opportunity_ids = fields.One2many('crm.lead', 'partner_id', string='Opportunities', domain=[('type', '=', 'opportunity')]) opportunity_count = fields.Integer("Opportunity", compute='_compute_opportunity_count') @api.model def default_get(self, fields): rec = super(Partner, self).default_get(fields) active_model = self.env.context.get('active_model') if active_model == 'crm.lead' and len(self.env.context.get('active_ids', [])) <= 1: lead = self.env[active_model].browse(self.env.context.get('active_id')).exists() if lead: rec.update( phone=lead.phone, mobile=lead.mobile, function=lead.function, title=lead.title.id, website=lead.website, street=lead.street, street2=lead.street2, city=lead.city, state_id=lead.state_id.id, country_id=lead.country_id.id, zip=lead.zip, ) return rec def _compute_opportunity_count(self): # retrieve all children partners and prefetch 'parent_id' on them all_partners = self.with_context(active_test=False).search([('id', 'child_of', self.ids)]) all_partners.read(['parent_id']) opportunity_data = self.env['crm.lead'].with_context(active_test=False).read_group( domain=[('partner_id', 'in', all_partners.ids)], fields=['partner_id'], groupby=['partner_id'] ) self.opportunity_count = 0 for group in opportunity_data: partner = self.browse(group['partner_id'][0]) while partner: if partner in self: partner.opportunity_count += group['partner_id_count'] partner = partner.parent_id def action_view_opportunity(self): ''' This function returns an action that displays the opportunities from partner. ''' action = self.env['ir.actions.act_window']._for_xml_id('crm.crm_lead_opportunities') action['context'] = {'active_test': False} if self.is_company: action['domain'] = [('partner_id.commercial_partner_id.id', '=', self.id)] else: action['domain'] = [('partner_id.id', '=', self.id)] return action
41.892308
2,723
3,186
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models, tools, api class ActivityReport(models.Model): """ CRM Lead Analysis """ _name = "crm.activity.report" _auto = False _description = "CRM Activity Analysis" _rec_name = 'id' date = fields.Datetime('Completion Date', readonly=True) lead_create_date = fields.Datetime('Creation Date', readonly=True) date_conversion = fields.Datetime('Conversion Date', readonly=True) date_deadline = fields.Date('Expected Closing', readonly=True) date_closed = fields.Datetime('Closed Date', readonly=True) author_id = fields.Many2one('res.partner', 'Assigned To', readonly=True) user_id = fields.Many2one('res.users', 'Salesperson', readonly=True) team_id = fields.Many2one('crm.team', 'Sales Team', readonly=True) lead_id = fields.Many2one('crm.lead', "Opportunity", readonly=True) body = fields.Html('Activity Description', readonly=True) subtype_id = fields.Many2one('mail.message.subtype', 'Subtype', readonly=True) mail_activity_type_id = fields.Many2one('mail.activity.type', 'Activity Type', readonly=True) country_id = fields.Many2one('res.country', 'Country', readonly=True) company_id = fields.Many2one('res.company', 'Company', readonly=True) stage_id = fields.Many2one('crm.stage', 'Stage', readonly=True) partner_id = fields.Many2one('res.partner', 'Customer', readonly=True) lead_type = fields.Selection( string='Type', selection=[('lead', 'Lead'), ('opportunity', 'Opportunity')], help="Type is used to separate Leads and Opportunities") active = fields.Boolean('Active', readonly=True) def _select(self): return """ SELECT m.id, l.create_date AS lead_create_date, l.date_conversion, l.date_deadline, l.date_closed, m.subtype_id, m.mail_activity_type_id, m.author_id, m.date, m.body, l.id as lead_id, l.user_id, l.team_id, l.country_id, l.company_id, l.stage_id, l.partner_id, l.type as lead_type, l.active """ def _from(self): return """ FROM mail_message AS m """ def _join(self): return """ JOIN crm_lead AS l ON m.res_id = l.id """ def _where(self): disccusion_subtype = self.env.ref('mail.mt_comment') return """ WHERE m.model = 'crm.lead' AND (m.mail_activity_type_id IS NOT NULL OR m.subtype_id = %s) """ % (disccusion_subtype.id,) def init(self): tools.drop_view_if_exists(self._cr, self._table) self._cr.execute(""" CREATE OR REPLACE VIEW %s AS ( %s %s %s %s ) """ % (self._table, self._select(), self._from(), self._join(), self._where()) )
36.204545
3,186
1,948
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import logging from odoo.addons.mail.controllers.mail import MailController from odoo import http from odoo.http import request _logger = logging.getLogger(__name__) class CrmController(http.Controller): @http.route('/lead/case_mark_won', type='http', auth='user', methods=['GET']) def crm_lead_case_mark_won(self, res_id, token): comparison, record, redirect = MailController._check_token_and_record_or_redirect('crm.lead', int(res_id), token) if comparison and record: try: record.action_set_won_rainbowman() except Exception: _logger.exception("Could not mark crm.lead as won") return MailController._redirect_to_messaging() return redirect @http.route('/lead/case_mark_lost', type='http', auth='user', methods=['GET']) def crm_lead_case_mark_lost(self, res_id, token): comparison, record, redirect = MailController._check_token_and_record_or_redirect('crm.lead', int(res_id), token) if comparison and record: try: record.action_set_lost() except Exception: _logger.exception("Could not mark crm.lead as lost") return MailController._redirect_to_messaging() return redirect @http.route('/lead/convert', type='http', auth='user', methods=['GET']) def crm_lead_convert(self, res_id, token): comparison, record, redirect = MailController._check_token_and_record_or_redirect('crm.lead', int(res_id), token) if comparison and record: try: record.convert_opportunity(record.partner_id.id) except Exception: _logger.exception("Could not convert crm.lead to opportunity") return MailController._redirect_to_messaging() return redirect
43.288889
1,948
541
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Service Margins in Sales Orders', 'version': '1.0', 'summary': 'Bridge module between Sales Margin and Sales Timesheet', 'description': """ Allows to compute accurate margin for Service sales. ====================================================== """, 'category': 'Hidden', 'depends': ['sale_margin', 'sale_timesheet'], 'demo': [], 'data': [], 'auto_install': True, 'license': 'LGPL-3', }
30.055556
541
1,724
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class SaleOrderLine(models.Model): _inherit = "sale.order.line" @api.depends('analytic_line_ids.amount', 'qty_delivered_method') def _compute_purchase_price(self): timesheet_sols = self.filtered( lambda sol: sol.qty_delivered_method == 'timesheet' and not sol.product_id.standard_price ) super(SaleOrderLine, self - timesheet_sols)._compute_purchase_price() if timesheet_sols: group_amount = self.env['account.analytic.line'].read_group( [('so_line', 'in', timesheet_sols.ids), ('project_id', '!=', False)], ['so_line', 'amount:sum', 'unit_amount:sum'], ['so_line']) mapped_sol_timesheet_amount = { amount['so_line'][0]: -amount['amount'] / amount['unit_amount'] if amount['unit_amount'] else 0.0 for amount in group_amount } for line in timesheet_sols: line = line.with_company(line.company_id) product_cost = mapped_sol_timesheet_amount.get(line.id, line.product_id.standard_price) if line.product_id.uom_id != line.company_id.project_time_mode_id and\ line.product_id.uom_id.category_id.id == line.company_id.project_time_mode_id.category_id.id: product_cost = line.company_id.project_time_mode_id._compute_quantity( product_cost, line.product_id.uom_id ) line.purchase_price = line._convert_price(product_cost, line.product_id.uom_id)
53.875
1,724
909
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Thailand - Accounting', 'version': '2.0', 'category': 'Accounting/Localizations/Account Charts', 'description': """ Chart of Accounts for Thailand. =============================== Thai accounting chart and localization. """, 'author': 'Almacom', 'website': 'http://almacom.co.th/', 'depends': ['account'], 'data': [ 'data/account_tax_group_data.xml', 'data/l10n_th_chart_data.xml', 'data/account.account.template.csv', 'data/l10n_th_chart_post_data.xml', 'data/account_tax_report_data.xml', 'data/account_tax_template_data.xml', 'data/account_chart_template_data.xml', ], 'demo': [ 'demo/demo_company.xml', ], 'post_init_hook': '_preserve_tag_on_taxes', 'license': 'LGPL-3', }
29.322581
909
1,399
py
PYTHON
15.0
# Author: Silvija Butko. Copyright: JSC Focusate. # Co-Authors: Eimantas Nėjus, Andrius Laukavičius. Copyright: JSC Focusate # See LICENSE file for full copyright and licensing details. { 'name': "Lithuania - Accounting", 'version': '1.0.0', 'description': """ Chart of Accounts (COA) Template for Lithuania's Accounting. This module also includes: * List of available banks in Lithuania. * Tax groups. * Most common Lithuanian Taxes. * Fiscal positions. * Account Tags. """, 'license': 'LGPL-3', 'author': "Focusate", 'website': "http://www.focusate.eu", 'category': 'Accounting/Localizations/Account Charts', 'depends': [ 'l10n_multilang', ], 'data': [ 'data/account_account_tag_data.xml', 'data/account_chart_template_data.xml', 'data/account.account.template.csv', 'data/account_chart_template_setup_data.xml', 'data/res_bank_data.xml', 'data/account_tax_group_data.xml', 'data/account_tax_template_data.xml', 'data/account_fiscal_position_template_data.xml', # Try Loading COA for Current Company 'data/account_chart_template_load.xml', 'data/menuitem_data.xml', ], 'demo': [ 'demo/demo_company.xml', ], 'post_init_hook': 'load_translations', 'installable': True, }
32.488372
1,397
715
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Test Payment Acquirer', 'version': '2.0', 'category': 'Hidden', 'description': """ This module adds a simple payment acquirer allowing to make test payments. It should never be used in production environment. Make sure to disable it before going live. """, 'depends': ['payment'], 'data': [ 'views/payment_templates.xml', 'views/payment_test_templates.xml', 'data/payment_acquirer_data.xml', ], 'uninstall_hook': 'uninstall_hook', 'assets': { 'web.assets_frontend': [ 'payment_test/static/src/js/**/*', ], }, 'license': 'LGPL-3', }
29.791667
715
2,731
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. import logging from odoo import _, api, models from odoo.exceptions import ValidationError from odoo.addons.payment import utils as payment_utils _logger = logging.getLogger(__name__) class PaymentTransaction(models.Model): _inherit = 'payment.transaction' def _send_payment_request(self): """ Override of payment to simulate a payment request. Note: self.ensure_one() :return: None """ super()._send_payment_request() if self.provider != 'test': return # The payment request response would normally transit through the controller but in the end, # all that interests us is the reference. To avoid making a localhost request, we bypass the # controller and handle the fake feedback data directly. self._handle_feedback_data('test', {'reference': self.reference}) @api.model def _get_tx_from_feedback_data(self, provider, data): """ Override of payment to find the transaction based on dummy data. :param str provider: The provider of the acquirer that handled the transaction :param dict data: The dummy feedback data :return: The transaction if found :rtype: recordset of `payment.transaction` :raise: ValidationError if the data match no transaction """ tx = super()._get_tx_from_feedback_data(provider, data) if provider != 'test': return tx reference = data.get('reference') tx = self.search([('reference', '=', reference), ('provider', '=', 'test')]) if not tx: raise ValidationError( "Test: " + _("No transaction found matching reference %s.", reference) ) return tx def _process_feedback_data(self, data): """ Override of payment to process the transaction based on dummy data. Note: self.ensure_one() :param dict data: The dummy feedback data :return: None :raise: ValidationError if inconsistent data were received """ super()._process_feedback_data(data) if self.provider != "test": return self._set_done() # Dummy transactions are always successful if self.tokenize: token = self.env['payment.token'].create({ 'acquirer_id': self.acquirer_id.id, 'name': payment_utils.build_token_name(payment_details_short=data['cc_summary']), 'partner_id': self.partner_id.id, 'acquirer_ref': 'fake acquirer reference', 'verified': True, }) self.token_id = token.id
35.934211
2,731
427
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class AccountPaymentMethod(models.Model): _inherit = 'account.payment.method' @api.model def _get_payment_method_information(self): res = super()._get_payment_method_information() res['test'] = {'mode': 'unique', 'domain': [('type', '=', 'bank')]} return res
30.5
427
1,133
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import _, api, fields, models from odoo.exceptions import UserError class PaymentAcquirer(models.Model): _inherit = 'payment.acquirer' provider = fields.Selection(selection_add=[('test', 'Test')], ondelete={'test': 'set default'}) @api.depends('provider') def _compute_view_configuration_fields(self): """ Override of payment to hide the credentials page. :return: None """ super()._compute_view_configuration_fields() self.filtered(lambda acq: acq.provider == 'test').show_credentials_page = False @api.constrains('state', 'provider') def _check_acquirer_state(self): if self.filtered(lambda a: a.provider == 'test' and a.state not in ('test', 'disabled')): raise UserError(_("Test acquirers should never be enabled.")) def _get_default_payment_method_id(self): self.ensure_one() if self.provider != 'test': return super()._get_default_payment_method_id() return self.env.ref('payment_test.payment_method_test').id
37.766667
1,133
757
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import http from odoo.http import request class PaymentTestController(http.Controller): @http.route('/payment/test/simulate_payment', type='json', auth='public') def test_simulate_payment(self, reference, customer_input): """ Simulate the response of a payment request. :param str reference: The reference of the transaction :param str customer_input: The payment method details :return: None """ fake_api_response = { 'reference': reference, 'cc_summary': customer_input[-4:], } request.env['payment.transaction'].sudo()._handle_feedback_data('test', fake_api_response)
36.047619
757
913
py
PYTHON
15.0
# -*- encoding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Ireland - Accounting', 'version': '1.0', 'category': 'Accounting/Localizations/Account Charts', 'description': """ This module is for all the Irish SMEs who would like to setup their accounting quickly. The module provides: - a Chart of Accounts customised to Ireland - VAT Rates and Structure""", 'author': 'Target Integration', 'website': 'http://www.targetintegration.com', 'depends': ['account', 'base_iban', 'base_vat'], 'data': [ 'data/account_chart_template.xml', 'data/account.account.template.csv', 'data/account.chart.template.csv', 'data/account_tax_data.xml', 'data/account_chart_template_configuration_data.xml', ], 'demo': [ 'demo/demo_company.xml', ], 'license': 'LGPL-3', }
32.607143
913
3,137
py
PYTHON
15.0
# -*- coding: utf-8 -*- { 'name': 'Lunch', 'sequence': 300, 'version': '1.0', 'depends': ['mail'], 'category': 'Human Resources/Lunch', 'summary': 'Handle lunch orders of your employees', 'description': """ The base module to manage lunch. ================================ Many companies order sandwiches, pizzas and other, from usual vendors, for their employees to offer them more facilities. However lunches management within the company requires proper administration especially when the number of employees or vendors is important. The “Lunch Order” module has been developed to make this management easier but also to offer employees more tools and usability. In addition to a full meal and vendor management, this module offers the possibility to display warning and provides quick order selection based on employee’s preferences. If you want to save your employees' time and avoid them to always have coins in their pockets, this module is essential. """, 'data': [ 'security/lunch_security.xml', 'security/ir.model.access.csv', 'report/lunch_cashmove_report_views.xml', 'views/lunch_templates.xml', 'views/lunch_alert_views.xml', 'views/lunch_cashmove_views.xml', 'views/lunch_location_views.xml', 'views/lunch_orders_views.xml', 'views/lunch_product_views.xml', 'views/lunch_supplier_views.xml', 'views/res_config_settings.xml', 'views/lunch_views.xml', 'data/mail_template_data.xml', 'data/lunch_data.xml', ], 'demo': ['data/lunch_demo.xml'], 'installable': True, 'application': True, 'certificate': '001292377792581874189', 'assets': { 'web.assets_backend': [ 'lunch/static/src/scss/lunch_view.scss', 'lunch/static/src/scss/lunch_kanban.scss', 'lunch/static/src/scss/lunch_list.scss', 'lunch/static/src/js/lunch_controller_common.js', 'lunch/static/src/js/lunch_widget.js', 'lunch/static/src/js/lunch_mobile.js', 'lunch/static/src/js/lunch_payment_dialog.js', 'lunch/static/src/js/lunch_kanban_view.js', 'lunch/static/src/js/lunch_kanban_controller.js', 'lunch/static/src/js/lunch_kanban_renderer.js', 'lunch/static/src/js/lunch_kanban_record.js', 'lunch/static/src/js/lunch_model_extension.js', 'lunch/static/src/js/lunch_list_view.js', 'lunch/static/src/js/lunch_list_controller.js', 'lunch/static/src/js/lunch_list_renderer.js', ], 'web.qunit_suite_tests': [ 'lunch/static/tests/lunch_test_utils.js', 'lunch/static/tests/lunch_kanban_tests.js', 'lunch/static/tests/lunch_list_tests.js', ], 'web.qunit_mobile_suite_tests': [ 'lunch/static/tests/lunch_test_utils.js', 'lunch/static/tests/lunch_kanban_mobile_tests.js', ], 'web.assets_qweb': [ 'lunch/static/src/xml/lunch_templates.xml', ], }, 'license': 'LGPL-3', }
41.197368
3,131
7,215
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import logging from dateutil.relativedelta import relativedelta from itertools import groupby from odoo import models from odoo.tools import populate _logger = logging.getLogger(__name__) class LunchProductCategory(models.Model): _inherit = 'lunch.product.category' _populate_sizes = {'small': 5, 'medium': 150, 'large': 400} _populate_dependencies = ['res.company'] def _populate_factories(self): # TODO topping_ids_{1,2,3}, toppping_label_{1,2,3}, topping_quantity{1,2,3} company_ids = self.env.registry.populated_models['res.company'] return [ ('name', populate.constant('lunch_product_category_{counter}')), ('company_id', populate.iterate( [False, self.env.ref('base.main_company').id] + company_ids, [1, 1] + [2/(len(company_ids) or 1)]*len(company_ids))), ] class LunchProduct(models.Model): _inherit = 'lunch.product' _populate_sizes = {'small': 10, 'medium': 150, 'large': 10000} _populate_dependencies = ['lunch.product.category', 'lunch.supplier'] def _populate_factories(self): category_ids = self.env.registry.populated_models['lunch.product.category'] category_records = self.env['lunch.product.category'].browse(category_ids) category_by_company = {k: list(v) for k, v in groupby(category_records, key=lambda rec: rec['company_id'].id)} supplier_ids = self.env.registry.populated_models['lunch.supplier'] company_by_supplier = {rec.id: rec.company_id.id for rec in self.env['lunch.supplier'].browse(supplier_ids)} def get_category(random=None, values=None, **kwargs): company_id = company_by_supplier[values['supplier_id']] return random.choice(category_by_company[company_id]).id return [ ('active', populate.iterate([True, False], [0.9, 0.1])), ('name', populate.constant('lunch_product_{counter}')), ('price', populate.randfloat(0.1, 50)), ('supplier_id', populate.randomize(supplier_ids)), ('category_id', populate.compute(get_category)), ] class LunchLocation(models.Model): _inherit = 'lunch.location' _populate_sizes = {'small': 3, 'medium': 50, 'large': 500} _populate_dependencies = ['res.company'] def _populate_factories(self): company_ids = self.env.registry.populated_models['res.company'] return [ ('name', populate.constant('lunch_location_{counter}')), ('address', populate.constant('lunch_address_location_{counter}')), ('company_id', populate.randomize(company_ids)) ] class LunchSupplier(models.Model): _inherit = 'lunch.supplier' _populate_sizes = {'small': 3, 'medium': 50, 'large': 1500} _populate_dependencies = ['lunch.location', 'res.partner', 'res.users'] def _populate_factories(self): location_ids = self.env.registry.populated_models['lunch.location'] partner_ids = self.env.registry.populated_models['res.partner'] user_ids = self.env.registry.populated_models['res.users'] def get_location_ids(random=None, **kwargs): nb_locations = random.randint(0, len(location_ids)) return [(6, 0, random.choices(location_ids, k=nb_locations))] return [ ('active', populate.cartesian([True, False])), ('send_by', populate.cartesian(['phone', 'mail'])), ('delivery', populate.cartesian(['delivery', 'no_delivery'])), ('mon', populate.iterate([True, False], [0.9, 0.1])), ('tue', populate.iterate([True, False], [0.9, 0.1])), ('wed', populate.iterate([True, False], [0.9, 0.1])), ('thu', populate.iterate([True, False], [0.9, 0.1])), ('fri', populate.iterate([True, False], [0.9, 0.1])), ('sat', populate.iterate([False, True], [0.9, 0.1])), ('sun', populate.iterate([False, True], [0.9, 0.1])), ('available_location_ids', populate.iterate( [[], [(6, 0, location_ids)]], then=populate.compute(get_location_ids))), ('partner_id', populate.randomize(partner_ids)), ('responsible_id', populate.randomize(user_ids)), ('moment', populate.iterate(['am', 'pm'])), ('automatic_email_time', populate.randfloat(0, 12)), ] class LunchOrder(models.Model): _inherit = 'lunch.order' _populate_sizes = {'small': 20, 'medium': 3000, 'large': 15000} _populate_dependencies = ['lunch.product', 'res.users', 'res.company'] def _populate_factories(self): # TODO topping_ids_{1,2,3}, topping_label_{1,3}, topping_quantity_{1,3} user_ids = self.env.registry.populated_models['res.users'] product_ids = self.env.registry.populated_models['lunch.product'] company_ids = self.env.registry.populated_models['res.company'] return [ ('active', populate.cartesian([True, False])), ('state', populate.cartesian(['new', 'confirmed', 'ordered', 'cancelled'])), ('product_id', populate.randomize(product_ids)), ('user_id', populate.randomize(user_ids)), ('note', populate.constant('lunch_note_{counter}')), ('company_id', populate.randomize(company_ids)), ('quantity', populate.randint(0, 10)), ] class LunchAlert(models.Model): _inherit = 'lunch.alert' _populate_sizes = {'small': 10, 'medium': 40, 'large': 150} _populate_dependencies = ['lunch.location'] def _populate_factories(self): location_ids = self.env.registry.populated_models['lunch.location'] def get_location_ids(random=None, **kwargs): nb_max = len(location_ids) start = random.randint(0, nb_max) end = random.randint(start, nb_max) return location_ids[start:end] return [ ('active', populate.cartesian([True, False])), ('recipients', populate.cartesian(['everyone', 'last_week', 'last_month', 'last_year'])), ('mode', populate.iterate(['alert', 'chat'])), ('mon', populate.iterate([True, False], [0.9, 0.1])), ('tue', populate.iterate([True, False], [0.9, 0.1])), ('wed', populate.iterate([True, False], [0.9, 0.1])), ('thu', populate.iterate([True, False], [0.9, 0.1])), ('fri', populate.iterate([True, False], [0.9, 0.1])), ('sat', populate.iterate([False, True], [0.9, 0.1])), ('sun', populate.iterate([False, True], [0.9, 0.1])), ('name', populate.constant('alert_{counter}')), ('message', populate.constant('<strong>alert message {counter}</strong>')), ('notification_time', populate.randfloat(0, 12)), ('notification_moment', populate.iterate(['am', 'pm'])), ('until', populate.randdatetime(relative_before=relativedelta(years=-2), relative_after=relativedelta(years=2))), ('location_ids', populate.compute(get_location_ids)) ]
43.463855
7,215
2,470
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. from datetime import datetime, timedelta from odoo import fields from odoo.tests import common from odoo.addons.lunch.tests.common import TestsCommon class TestAlarm(TestsCommon): @common.users('cle-lunch-manager') def test_cron_sync_create(self): cron_ny = self.alert_ny.cron_id self.assertTrue(cron_ny.active) self.assertEqual(cron_ny.name, "Lunch: alert chat notification (New York UTC-5)") self.assertEqual( [line for line in cron_ny.code.splitlines() if not line.lstrip().startswith("#")], ["env['lunch.alert'].browse([%i])._notify_chat()" % self.alert_ny.id]) self.assertEqual(cron_ny.nextcall, datetime(2021, 1, 29, 15, 0)) # New-york is UTC-5 tokyo_cron = self.alert_tokyo.cron_id self.assertEqual(tokyo_cron.nextcall, datetime(2021, 1, 29, 23, 0)) # Tokyo is UTC+9 but the cron is posponed @common.users('cle-lunch-manager') def test_cron_sync_active(self): cron_ny = self.alert_ny.cron_id self.alert_ny.active = False self.assertFalse(cron_ny.active) self.alert_ny.active = True self.assertTrue(cron_ny.active) self.alert_ny.mode = 'alert' self.assertFalse(cron_ny.active) self.alert_ny.mode = 'chat' self.assertTrue(cron_ny.active) ctx_today = fields.Date.context_today(self.alert_ny, self.fakenow) self.alert_ny.until = ctx_today - timedelta(days=1) self.assertFalse(cron_ny.active) self.alert_ny.until = ctx_today + timedelta(days=2) self.assertTrue(cron_ny.active) self.alert_ny.until = False self.assertTrue(cron_ny.active) @common.users('cle-lunch-manager') def test_cron_sync_nextcall(self): cron_ny = self.alert_ny.cron_id old_nextcall = cron_ny.nextcall self.alert_ny.notification_time -= 5 self.assertEqual(cron_ny.nextcall, old_nextcall - timedelta(hours=5) + timedelta(days=1)) # Simulate cron execution cron_ny.sudo().lastcall = old_nextcall - timedelta(hours=5) cron_ny.sudo().nextcall += timedelta(days=1) self.alert_ny.notification_time += 7 self.assertEqual(cron_ny.nextcall, old_nextcall + timedelta(days=1, hours=2)) self.alert_ny.notification_time -= 1 self.assertEqual(cron_ny.nextcall, old_nextcall + timedelta(days=1, hours=1))
41.166667
2,470
8,590
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. import pytz from datetime import datetime, time, timedelta from unittest.mock import patch from odoo import fields from odoo.tests import common from odoo.addons.lunch.tests.common import TestsCommon class TestSupplier(TestsCommon): def setUp(self): super(TestSupplier, self).setUp() self.monday_1am = datetime(2018, 10, 29, 1, 0, 0) self.monday_10am = datetime(2018, 10, 29, 10, 0, 0) self.monday_1pm = datetime(2018, 10, 29, 13, 0, 0) self.monday_8pm = datetime(2018, 10, 29, 20, 0, 0) self.saturday_3am = datetime(2018, 11, 3, 3, 0, 0) self.saturday_10am = datetime(2018, 11, 3, 10, 0, 0) self.saturday_1pm = datetime(2018, 11, 3, 13, 0, 0) self.saturday_8pm = datetime(2018, 11, 3, 20, 0, 0) @common.users('cle-lunch-manager') def test_send_email_cron(self): self.supplier_kothai.cron_id.ensure_one() self.assertEqual(self.supplier_kothai.cron_id.nextcall.time(), time(15, 0)) self.assertEqual(self.supplier_kothai.cron_id.code, f"""\ # This cron is dynamically controlled by Lunch Supplier. # Do NOT modify this cron, modify the related record instead. env['lunch.supplier'].browse([{self.supplier_kothai.id}])._send_auto_email()""") cron_id = self.supplier_kothai.cron_id.id self.supplier_kothai.unlink() self.assertFalse(self.env['ir.cron'].sudo().search([('id', '=', cron_id)])) @common.users('cle-lunch-manager') def test_compute_available_today(self): tests = [(self.monday_1am, True), (self.monday_10am, True), (self.monday_1pm, True), (self.monday_8pm, True), (self.saturday_3am, False), (self.saturday_10am, False), (self.saturday_1pm, False), (self.saturday_8pm, False)] for value, result in tests: with patch.object(fields.Datetime, 'now', return_value=value) as _: assert self.supplier_pizza_inn.available_today == result,\ 'supplier pizza inn should %s considered available on %s' % ('be' if result else 'not be', value) self.env['lunch.supplier'].invalidate_cache(['available_today'], [self.supplier_pizza_inn.id]) @common.users('cle-lunch-manager') def test_search_available_today(self): ''' This test checks that _search_available_today returns a valid domain ''' self.env.user.tz = 'Europe/Brussels' Supplier = self.env['lunch.supplier'] tests = [(self.monday_1am, 1.0, 'mon'), (self.monday_10am, 10.0, 'mon'), (self.monday_1pm, 13.0, 'mon'), (self.monday_8pm, 20.0, 'mon'), (self.saturday_3am, 3.0, 'sat'), (self.saturday_10am, 10.0, 'sat'), (self.saturday_1pm, 13.0, 'sat'), (self.saturday_8pm, 20.0, 'sat')] # It should return an empty domain if we compare to values other than datetime assert Supplier._search_available_today('>', 7) == [] assert Supplier._search_available_today('>', True) == [] for value, rvalue, dayname in tests: with patch.object(fields.Datetime, 'now', return_value=value) as _: assert Supplier._search_available_today('=', True) == ['&', '|', ('recurrency_end_date', '=', False), ('recurrency_end_date', '>', value.replace(tzinfo=pytz.UTC).astimezone(pytz.timezone(self.env.user.tz))), (dayname, '=', True)],\ 'Wrong domain generated for values (%s, %s)' % (value, rvalue) with patch.object(fields.Datetime, 'now', return_value=self.monday_10am) as _: assert self.supplier_pizza_inn in Supplier.search([('available_today', '=', True)]) @common.users('cle-lunch-manager') def test_auto_email_send(self): with patch.object(fields.Datetime, 'now', return_value=self.monday_1pm) as _: with patch.object(fields.Date, 'today', return_value=self.monday_1pm.date()) as _: with patch.object(fields.Date, 'context_today', return_value=self.monday_1pm.date()) as _: line = self.env['lunch.order'].create({ 'product_id': self.product_pizza.id, 'date': self.monday_1pm.date(), 'supplier_id': self.supplier_pizza_inn.id, }) line.action_order() assert line.state == 'ordered' self.supplier_pizza_inn._send_auto_email() assert line.state == 'confirmed' line = self.env['lunch.order'].create({ 'product_id': self.product_pizza.id, 'topping_ids_1': [(6, 0, [self.topping_olives.id])], 'date': self.monday_1pm.date(), 'supplier_id': self.supplier_pizza_inn.id, }) line2 = self.env['lunch.order'].create({ 'product_id': self.product_sandwich_tuna.id, 'date': self.monday_1pm.date(), 'supplier_id': self.supplier_coin_gourmand.id, }) (line | line2).action_order() assert line.state == 'ordered' assert line2.state == 'ordered' self.supplier_pizza_inn._send_auto_email() assert line.state == 'confirmed' assert line2.state == 'ordered' line_1 = self.env['lunch.order'].create({ 'product_id': self.product_pizza.id, 'quantity': 2, 'date': self.monday_1pm.date(), 'supplier_id': self.product_pizza.id, }) line_2 = self.env['lunch.order'].create({ 'product_id': self.product_pizza.id, 'topping_ids_1': [(6, 0, [self.topping_olives.id])], 'date': self.monday_1pm.date(), 'supplier_id': self.product_pizza.id, }) line_3 = self.env['lunch.order'].create({ 'product_id': self.product_sandwich_tuna.id, 'quantity': 2, 'date': self.monday_1pm.date(), 'supplier_id': self.supplier_coin_gourmand.id, }) (line_1 | line_2 | line_3).action_order() assert all(line.state == 'ordered' for line in [line_1, line_2, line_3]) self.supplier_pizza_inn._send_auto_email() @common.users('cle-lunch-manager') def test_cron_sync_create(self): cron_ny = self.supplier_kothai.cron_id # I am at New-York self.assertTrue(cron_ny.active) self.assertEqual(cron_ny.name, "Lunch: send automatic email to Kothai") self.assertEqual( [line for line in cron_ny.code.splitlines() if not line.lstrip().startswith("#")], ["env['lunch.supplier'].browse([%i])._send_auto_email()" % self.supplier_kothai.id]) self.assertEqual(cron_ny.nextcall, datetime(2021, 1, 29, 15, 0)) # New-york is UTC-5 @common.users('cle-lunch-manager') def test_cron_sync_active(self): cron_ny = self.supplier_kothai.cron_id self.supplier_kothai.active = False self.assertFalse(cron_ny.active) self.supplier_kothai.active = True self.assertTrue(cron_ny.active) self.supplier_kothai.send_by = 'phone' self.assertFalse(cron_ny.active) self.supplier_kothai.send_by = 'mail' self.assertTrue(cron_ny.active) @common.users('cle-lunch-manager') def test_cron_sync_nextcall(self): cron_ny = self.supplier_kothai.cron_id old_nextcall = cron_ny.nextcall self.supplier_kothai.automatic_email_time -= 5 self.assertEqual(cron_ny.nextcall, old_nextcall - timedelta(hours=5) + timedelta(days=1)) # Simulate cron execution cron_ny.sudo().lastcall = old_nextcall - timedelta(hours=5) cron_ny.sudo().nextcall += timedelta(days=1) self.supplier_kothai.automatic_email_time += 7 self.assertEqual(cron_ny.nextcall, old_nextcall + timedelta(days=1, hours=2)) self.supplier_kothai.automatic_email_time -= 1 self.assertEqual(cron_ny.nextcall, old_nextcall + timedelta(days=1, hours=1))
45.691489
8,590
4,106
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. from datetime import datetime from freezegun import freeze_time from odoo.tests import common, new_test_user fakenow = datetime(2021, 1, 29, 12, 20, 0) @freeze_time(fakenow) class TestsCommon(common.TransactionCase): @classmethod def setUpClass(cls): super().setUpClass() cls.fakenow = fakenow def setUp(self): super(TestsCommon, self).setUp() self.env['lunch.cashmove'].create({ 'amount': 100, }) self.manager = new_test_user(self.env, 'cle-lunch-manager', 'base.group_user,base.group_partner_manager,lunch.group_lunch_manager') with self.with_user('cle-lunch-manager'): self.location_office_1 = self.env['lunch.location'].create({ 'name' : 'Farm 1', }) self.location_office_2 = self.env['lunch.location'].create({ 'name': 'Farm 2', }) self.partner_pizza_inn = self.env['res.partner'].create({ 'name': 'Pizza Inn', }) self.supplier_pizza_inn = self.env['lunch.supplier'].create({ 'partner_id': self.partner_pizza_inn.id, 'send_by': 'mail', 'automatic_email_time': 11, 'available_location_ids': [ (6, 0, [self.location_office_1.id, self.location_office_2.id]) ], }) self.partner_kothai = self.env['res.partner'].create({ 'name': 'Kothai', }) self.supplier_kothai = self.env['lunch.supplier'].create({ 'partner_id': self.partner_kothai.id, 'send_by': 'mail', 'automatic_email_time': 10, 'tz': 'America/New_York', }) self.partner_coin_gourmand = self.env['res.partner'].create({ 'name': 'Coin Gourmand', }) self.supplier_coin_gourmand = self.env['lunch.supplier'].create({ 'partner_id': self.partner_coin_gourmand.id, 'send_by': 'phone', 'available_location_ids': [ (6, 0, [self.location_office_1.id, self.location_office_2.id]) ], }) self.category_pizza = self.env['lunch.product.category'].create({ 'name': 'Pizza', }) self.category_sandwich = self.env['lunch.product.category'].create({ 'name': 'Sandwich', }) self.product_pizza = self.env['lunch.product'].create({ 'name': 'Pizza', 'category_id': self.category_pizza.id, 'price': 9, 'supplier_id': self.supplier_pizza_inn.id, }) self.product_sandwich_tuna = self.env['lunch.product'].create({ 'name': 'Tuna Sandwich', 'category_id': self.category_sandwich.id, 'price': 3, 'supplier_id': self.supplier_coin_gourmand.id, }) self.topping_olives = self.env['lunch.topping'].create({ 'name': 'Olives', 'price': 0.3, 'supplier_id': self.supplier_pizza_inn.id, }) self.env['lunch.cashmove'].create({ 'amount': 100, }) self.alert_ny = self.env['lunch.alert'].create({ 'name': 'New York UTC-5', 'mode': 'chat', 'notification_time': 10, 'notification_moment': 'am', 'tz': 'America/New_York', 'message': "", }).with_context(tz='America/New_York') self.alert_tokyo = self.env['lunch.alert'].create({ 'name': 'Tokyo UTC+9', 'mode': 'chat', 'notification_time': 8, 'notification_moment': 'am', 'tz': 'Asia/Tokyo', 'message': "", }).with_context(tz='Asia/Tokyo')
34.504202
4,106
12,703
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. import math import pytz from datetime import datetime, time, timedelta from textwrap import dedent from odoo import api, fields, models from odoo.osv import expression from odoo.tools import float_round from odoo.addons.base.models.res_partner import _tz_get WEEKDAY_TO_NAME = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] CRON_DEPENDS = {'name', 'active', 'send_by', 'automatic_email_time', 'moment', 'tz'} def float_to_time(hours, moment='am'): """ Convert a number of hours into a time object. """ if hours == 12.0 and moment == 'pm': return time.max fractional, integral = math.modf(hours) if moment == 'pm': integral += 12 return time(int(integral), int(float_round(60 * fractional, precision_digits=0)), 0) def time_to_float(t): return float_round(t.hour + t.minute/60 + t.second/3600, precision_digits=2) class LunchSupplier(models.Model): _name = 'lunch.supplier' _description = 'Lunch Supplier' _inherit = ['mail.thread', 'mail.activity.mixin'] partner_id = fields.Many2one('res.partner', string='Vendor', required=True) name = fields.Char('Name', related='partner_id.name', readonly=False) email = fields.Char(related='partner_id.email', readonly=False) email_formatted = fields.Char(related='partner_id.email_formatted', readonly=True) phone = fields.Char(related='partner_id.phone', readonly=False) street = fields.Char(related='partner_id.street', readonly=False) street2 = fields.Char(related='partner_id.street2', readonly=False) zip_code = fields.Char(related='partner_id.zip', readonly=False) city = fields.Char(related='partner_id.city', readonly=False) state_id = fields.Many2one("res.country.state", related='partner_id.state_id', readonly=False) country_id = fields.Many2one('res.country', related='partner_id.country_id', readonly=False) company_id = fields.Many2one('res.company', related='partner_id.company_id', readonly=False, store=True) responsible_id = fields.Many2one('res.users', string="Responsible", domain=lambda self: [('groups_id', 'in', self.env.ref('lunch.group_lunch_manager').id)], default=lambda self: self.env.user, help="The responsible is the person that will order lunch for everyone. It will be used as the 'from' when sending the automatic email.") send_by = fields.Selection([ ('phone', 'Phone'), ('mail', 'Email'), ], 'Send Order By', default='phone') automatic_email_time = fields.Float('Order Time', default=12.0, required=True) cron_id = fields.Many2one('ir.cron', ondelete='cascade', required=True, readonly=True) mon = fields.Boolean(default=True) tue = fields.Boolean(default=True) wed = fields.Boolean(default=True) thu = fields.Boolean(default=True) fri = fields.Boolean(default=True) sat = fields.Boolean() sun = fields.Boolean() recurrency_end_date = fields.Date('Until', help="This field is used in order to ") available_location_ids = fields.Many2many('lunch.location', string='Location') available_today = fields.Boolean('This is True when if the supplier is available today', compute='_compute_available_today', search='_search_available_today') tz = fields.Selection(_tz_get, string='Timezone', required=True, default=lambda self: self.env.user.tz or 'UTC') active = fields.Boolean(default=True) moment = fields.Selection([ ('am', 'AM'), ('pm', 'PM'), ], default='am', required=True) delivery = fields.Selection([ ('delivery', 'Delivery'), ('no_delivery', 'No Delivery') ], default='no_delivery') topping_label_1 = fields.Char('Extra 1 Label', required=True, default='Extras') topping_label_2 = fields.Char('Extra 2 Label', required=True, default='Beverages') topping_label_3 = fields.Char('Extra 3 Label', required=True, default='Extra Label 3') topping_ids_1 = fields.One2many('lunch.topping', 'supplier_id', domain=[('topping_category', '=', 1)]) topping_ids_2 = fields.One2many('lunch.topping', 'supplier_id', domain=[('topping_category', '=', 2)]) topping_ids_3 = fields.One2many('lunch.topping', 'supplier_id', domain=[('topping_category', '=', 3)]) topping_quantity_1 = fields.Selection([ ('0_more', 'None or More'), ('1_more', 'One or More'), ('1', 'Only One')], 'Extra 1 Quantity', default='0_more', required=True) topping_quantity_2 = fields.Selection([ ('0_more', 'None or More'), ('1_more', 'One or More'), ('1', 'Only One')], 'Extra 2 Quantity', default='0_more', required=True) topping_quantity_3 = fields.Selection([ ('0_more', 'None or More'), ('1_more', 'One or More'), ('1', 'Only One')], 'Extra 3 Quantity', default='0_more', required=True) _sql_constraints = [ ('automatic_email_time_range', 'CHECK(automatic_email_time >= 0 AND automatic_email_time <= 12)', 'Automatic Email Sending Time should be between 0 and 12'), ] def name_get(self): res = [] for supplier in self: if supplier.phone: res.append((supplier.id, '%s %s' % (supplier.name, supplier.phone))) else: res.append((supplier.id, supplier.name)) return res def _sync_cron(self): for supplier in self: supplier = supplier.with_context(tz=supplier.tz) sendat_tz = pytz.timezone(supplier.tz).localize(datetime.combine( fields.Date.context_today(supplier), float_to_time(supplier.automatic_email_time, supplier.moment))) cron = supplier.cron_id.sudo() lc = cron.lastcall if (( lc and sendat_tz.date() <= fields.Datetime.context_timestamp(supplier, lc).date() ) or ( not lc and sendat_tz <= fields.Datetime.context_timestamp(supplier, fields.Datetime.now()) )): sendat_tz += timedelta(days=1) sendat_utc = sendat_tz.astimezone(pytz.UTC).replace(tzinfo=None) cron.active = supplier.active and supplier.send_by == 'mail' cron.name = f"Lunch: send automatic email to {supplier.name}" cron.nextcall = sendat_utc cron.code = dedent(f"""\ # This cron is dynamically controlled by {self._description}. # Do NOT modify this cron, modify the related record instead. env['{self._name}'].browse([{supplier.id}])._send_auto_email()""") @api.model_create_multi def create(self, vals_list): for vals in vals_list: for topping in vals.get('topping_ids_2', []): topping[2].update({'topping_category': 2}) for topping in vals.get('topping_ids_3', []): topping[2].update({'topping_category': 3}) crons = self.env['ir.cron'].sudo().create([ { 'user_id': self.env.ref('base.user_root').id, 'active': False, 'interval_type': 'days', 'interval_number': 1, 'numbercall': -1, 'doall': False, 'name': "Lunch: send automatic email", 'model_id': self.env['ir.model']._get_id(self._name), 'state': 'code', 'code': "", } for _ in range(len(vals_list)) ]) self.env['ir.model.data'].sudo().create([{ 'name': f'lunch_supplier_cron_sa_{cron.ir_actions_server_id.id}', 'module': 'lunch', 'res_id': cron.ir_actions_server_id.id, 'model': 'ir.actions.server', # noupdate is set to true to avoid to delete record at module update 'noupdate': True, } for cron in crons]) for vals, cron in zip(vals_list, crons): vals['cron_id'] = cron.id suppliers = super().create(vals_list) suppliers._sync_cron() return suppliers def write(self, values): for topping in values.get('topping_ids_2', []): topping_values = topping[2] if topping_values: topping_values.update({'topping_category': 2}) for topping in values.get('topping_ids_3', []): topping_values = topping[2] if topping_values: topping_values.update({'topping_category': 3}) if values.get('company_id'): self.env['lunch.order'].search([('supplier_id', 'in', self.ids)]).write({'company_id': values['company_id']}) super().write(values) if not CRON_DEPENDS.isdisjoint(values): self._sync_cron() def unlink(self): crons = self.cron_id.sudo() server_actions = crons.ir_actions_server_id super().unlink() crons.unlink() server_actions.unlink() def toggle_active(self): """ Archiving related lunch product """ res = super().toggle_active() active_suppliers = self.filtered(lambda s: s.active) inactive_suppliers = self - active_suppliers Product = self.env['lunch.product'].with_context(active_test=False) Product.search([('supplier_id', 'in', active_suppliers.ids)]).write({'active': True}) Product.search([('supplier_id', 'in', inactive_suppliers.ids)]).write({'active': False}) return res def _send_auto_email(self): """ Send an email to the supplier with the order of the day """ # Called daily by cron self.ensure_one() if not self.available_today: return if self.send_by != 'mail': raise ValueError("Cannot send an email to this supplier") orders = self.env['lunch.order'].search([ ('supplier_id', '=', self.id), ('state', '=', 'ordered'), ('date', '=', fields.Date.context_today(self.with_context(tz=self.tz))), ], order="user_id, name") if not orders: return order = { 'company_name': orders[0].company_id.name, 'currency_id': orders[0].currency_id.id, 'supplier_id': self.partner_id.id, 'supplier_name': self.name, 'email_from': self.responsible_id.email_formatted, 'amount_total': sum(order.price for order in orders), } sites = orders.mapped('user_id.last_lunch_location_id').sorted(lambda x: x.name) orders_per_site = orders.sorted(lambda x: x.user_id.last_lunch_location_id.id) email_orders = [{ 'product': order.product_id.name, 'note': order.note, 'quantity': order.quantity, 'price': order.price, 'toppings': order.display_toppings, 'username': order.user_id.name, 'site': order.user_id.last_lunch_location_id.name, } for order in orders_per_site] email_sites = [{ 'name': site.name, 'address': site.address, } for site in sites] self.env.ref('lunch.lunch_order_mail_supplier').with_context( order=order, lines=email_orders, sites=email_sites ).send_mail(self.id) orders.action_confirm() @api.depends('recurrency_end_date', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun') def _compute_available_today(self): now = fields.Datetime.now().replace(tzinfo=pytz.UTC) for supplier in self: now = now.astimezone(pytz.timezone(supplier.tz)) if supplier.recurrency_end_date and now.date() >= supplier.recurrency_end_date: supplier.available_today = False else: fieldname = WEEKDAY_TO_NAME[now.weekday()] supplier.available_today = supplier[fieldname] def _search_available_today(self, operator, value): if (not operator in ['=', '!=']) or (not value in [True, False]): return [] searching_for_true = (operator == '=' and value) or (operator == '!=' and not value) now = fields.Datetime.now().replace(tzinfo=pytz.UTC).astimezone(pytz.timezone(self.env.user.tz or 'UTC')) fieldname = WEEKDAY_TO_NAME[now.weekday()] recurrency_domain = expression.OR([ [('recurrency_end_date', '=', False)], [('recurrency_end_date', '>' if searching_for_true else '<', now)] ]) return expression.AND([ recurrency_domain, [(fieldname, operator, value)] ])
42.343333
12,703
5,607
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import base64 from collections import defaultdict from odoo import api, fields, models, _ from odoo.exceptions import UserError from odoo.osv import expression class LunchProduct(models.Model): """ Products available to order. A product is linked to a specific vendor. """ _name = 'lunch.product' _description = 'Lunch Product' _inherit = 'image.mixin' _order = 'name' _check_company_auto = True name = fields.Char('Product Name', required=True, translate=True) category_id = fields.Many2one('lunch.product.category', 'Product Category', check_company=True, required=True) description = fields.Html('Description', translate=True) price = fields.Float('Price', digits='Account', required=True) supplier_id = fields.Many2one('lunch.supplier', 'Vendor', check_company=True, required=True) active = fields.Boolean(default=True) company_id = fields.Many2one('res.company', related='supplier_id.company_id', readonly=False, store=True) currency_id = fields.Many2one('res.currency', related='company_id.currency_id') new_until = fields.Date('New Until') is_new = fields.Boolean(compute='_compute_is_new') favorite_user_ids = fields.Many2many('res.users', 'lunch_product_favorite_user_rel', 'product_id', 'user_id', check_company=True) is_favorite = fields.Boolean(compute='_compute_is_favorite', inverse='_inverse_is_favorite') last_order_date = fields.Date(compute='_compute_last_order_date') product_image = fields.Image(compute='_compute_product_image') # This field is used only for searching is_available_at = fields.Many2one('lunch.location', 'Product Availability', compute='_compute_is_available_at', search='_search_is_available_at') @api.depends('image_128', 'category_id.image_128') def _compute_product_image(self): for product in self: product.product_image = product.image_128 or product.category_id.image_128 @api.depends('new_until') def _compute_is_new(self): today = fields.Date.context_today(self) for product in self: if product.new_until: product.is_new = today <= product.new_until else: product.is_new = False @api.depends_context('uid') @api.depends('favorite_user_ids') def _compute_is_favorite(self): for product in self: product.is_favorite = self.env.user in product.favorite_user_ids @api.depends_context('uid') def _compute_last_order_date(self): all_orders = self.env['lunch.order'].search([ ('user_id', '=', self.env.user.id), ('product_id', 'in', self.ids), ]) mapped_orders = defaultdict(lambda: self.env['lunch.order']) for order in all_orders: mapped_orders[order.product_id] |= order for product in self: if not mapped_orders[product]: product.last_order_date = False else: product.last_order_date = max(mapped_orders[product].mapped('date')) def _compute_is_available_at(self): """ Is available_at is always false when browsing it this field is there only to search (see _search_is_available_at) """ for product in self: product.is_available_at = False def _search_is_available_at(self, operator, value): supported_operators = ['in', 'not in', '=', '!='] if not operator in supported_operators: return expression.TRUE_DOMAIN if isinstance(value, int): value = [value] if operator in expression.NEGATIVE_TERM_OPERATORS: return expression.AND([[('supplier_id.available_location_ids', 'not in', value)], [('supplier_id.available_location_ids', '!=', False)]]) return expression.OR([[('supplier_id.available_location_ids', 'in', value)], [('supplier_id.available_location_ids', '=', False)]]) def _sync_active_from_related(self): """ Archive/unarchive product after related field is archived/unarchived """ return self.filtered(lambda p: (p.category_id.active and p.supplier_id.active) != p.active).toggle_active() def toggle_active(self): invalid_products = self.filtered(lambda product: not product.active and not product.category_id.active) if invalid_products: raise UserError(_("The following product categories are archived. You should either unarchive the categories or change the category of the product.\n%s", '\n'.join(invalid_products.category_id.mapped('name')))) invalid_products = self.filtered(lambda product: not product.active and not product.supplier_id.active) if invalid_products: raise UserError(_("The following suppliers are archived. You should either unarchive the suppliers or change the supplier of the product.\n%s", '\n'.join(invalid_products.supplier_id.mapped('name')))) return super().toggle_active() def _inverse_is_favorite(self): """ Handled in the write() """ pass def write(self, vals): if 'is_favorite' in vals: if vals.pop('is_favorite'): commands = [(4, product.id) for product in self] else: commands = [(3, product.id) for product in self] self.env.user.write({ 'favorite_lunch_product_ids': commands, }) if not vals: return True return super().write(vals)
43.465116
5,607
415
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models class LunchLocation(models.Model): _name = 'lunch.location' _description = 'Lunch Locations' name = fields.Char('Location Name', required=True) address = fields.Text('Address') company_id = fields.Many2one('res.company', default=lambda self: self.env.company)
31.923077
415
12,812
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models, _ from odoo.exceptions import ValidationError, UserError class LunchOrder(models.Model): _name = 'lunch.order' _description = 'Lunch Order' _order = 'id desc' _display_name = 'product_id' name = fields.Char(related='product_id.name', string="Product Name", store=True, readonly=True) topping_ids_1 = fields.Many2many('lunch.topping', 'lunch_order_topping', 'order_id', 'topping_id', string='Extras 1', domain=[('topping_category', '=', 1)]) topping_ids_2 = fields.Many2many('lunch.topping', 'lunch_order_topping', 'order_id', 'topping_id', string='Extras 2', domain=[('topping_category', '=', 2)]) topping_ids_3 = fields.Many2many('lunch.topping', 'lunch_order_topping', 'order_id', 'topping_id', string='Extras 3', domain=[('topping_category', '=', 3)]) product_id = fields.Many2one('lunch.product', string="Product", required=True) category_id = fields.Many2one( string='Product Category', related='product_id.category_id', store=True) date = fields.Date('Order Date', required=True, readonly=True, states={'new': [('readonly', False)]}, default=fields.Date.context_today) supplier_id = fields.Many2one( string='Vendor', related='product_id.supplier_id', store=True, index=True) user_id = fields.Many2one('res.users', 'User', readonly=True, states={'new': [('readonly', False)]}, default=lambda self: self.env.uid) lunch_location_id = fields.Many2one('lunch.location', default=lambda self: self.env.user.last_lunch_location_id) note = fields.Text('Notes') price = fields.Monetary('Total Price', compute='_compute_total_price', readonly=True, store=True) active = fields.Boolean('Active', default=True) state = fields.Selection([('new', 'To Order'), ('ordered', 'Ordered'), ('confirmed', 'Received'), ('cancelled', 'Cancelled')], 'Status', readonly=True, index=True, default='new') company_id = fields.Many2one('res.company', default=lambda self: self.env.company.id) currency_id = fields.Many2one(related='company_id.currency_id', store=True) quantity = fields.Float('Quantity', required=True, default=1) display_toppings = fields.Text('Extras', compute='_compute_display_toppings', store=True) product_description = fields.Html('Description', related='product_id.description') topping_label_1 = fields.Char(related='product_id.supplier_id.topping_label_1') topping_label_2 = fields.Char(related='product_id.supplier_id.topping_label_2') topping_label_3 = fields.Char(related='product_id.supplier_id.topping_label_3') topping_quantity_1 = fields.Selection(related='product_id.supplier_id.topping_quantity_1') topping_quantity_2 = fields.Selection(related='product_id.supplier_id.topping_quantity_2') topping_quantity_3 = fields.Selection(related='product_id.supplier_id.topping_quantity_3') image_1920 = fields.Image(compute='_compute_product_images') image_128 = fields.Image(compute='_compute_product_images') available_toppings_1 = fields.Boolean(help='Are extras available for this product', compute='_compute_available_toppings') available_toppings_2 = fields.Boolean(help='Are extras available for this product', compute='_compute_available_toppings') available_toppings_3 = fields.Boolean(help='Are extras available for this product', compute='_compute_available_toppings') display_reorder_button = fields.Boolean(compute='_compute_display_reorder_button') @api.depends('product_id') def _compute_product_images(self): for line in self: line.image_1920 = line.product_id.image_1920 or line.category_id.image_1920 line.image_128 = line.product_id.image_128 or line.category_id.image_128 @api.depends('category_id') def _compute_available_toppings(self): for order in self: order.available_toppings_1 = bool(order.env['lunch.topping'].search_count([('supplier_id', '=', order.supplier_id.id), ('topping_category', '=', 1)])) order.available_toppings_2 = bool(order.env['lunch.topping'].search_count([('supplier_id', '=', order.supplier_id.id), ('topping_category', '=', 2)])) order.available_toppings_3 = bool(order.env['lunch.topping'].search_count([('supplier_id', '=', order.supplier_id.id), ('topping_category', '=', 3)])) @api.depends_context('show_reorder_button') @api.depends('state') def _compute_display_reorder_button(self): show_button = self.env.context.get('show_reorder_button') for order in self: order.display_reorder_button = show_button and order.state == 'confirmed' def init(self): self._cr.execute("""CREATE INDEX IF NOT EXISTS lunch_order_user_product_date ON %s (user_id, product_id, date)""" % self._table) def _extract_toppings(self, values): """ If called in api.multi then it will pop topping_ids_1,2,3 from values """ if self.ids: # TODO This is not taking into account all the toppings for each individual order, this is usually not a problem # since in the interface you usually don't update more than one order at a time but this is a bug nonetheless topping_1 = values.pop('topping_ids_1')[0][2] if 'topping_ids_1' in values else self[:1].topping_ids_1.ids topping_2 = values.pop('topping_ids_2')[0][2] if 'topping_ids_2' in values else self[:1].topping_ids_2.ids topping_3 = values.pop('topping_ids_3')[0][2] if 'topping_ids_3' in values else self[:1].topping_ids_3.ids else: topping_1 = values['topping_ids_1'][0][2] if 'topping_ids_1' in values else [] topping_2 = values['topping_ids_2'][0][2] if 'topping_ids_2' in values else [] topping_3 = values['topping_ids_3'][0][2] if 'topping_ids_3' in values else [] return topping_1 + topping_2 + topping_3 @api.constrains('topping_ids_1', 'topping_ids_2', 'topping_ids_3') def _check_topping_quantity(self): errors = { '1_more': _('You should order at least one %s'), '1': _('You have to order one and only one %s'), } for line in self: for index in range(1, 4): availability = line['available_toppings_%s' % index] quantity = line['topping_quantity_%s' % index] toppings = line['topping_ids_%s' % index].filtered(lambda x: x.topping_category == index) label = line['topping_label_%s' % index] if availability and quantity != '0_more': check = bool(len(toppings) == 1 if quantity == '1' else toppings) if not check: raise ValidationError(errors[quantity] % label) @api.model def create(self, values): lines = self._find_matching_lines({ **values, 'toppings': self._extract_toppings(values), }) if lines: # YTI FIXME This will update multiple lines in the case there are multiple # matching lines which should not happen through the interface lines.update_quantity(1) return lines[:1] return super().create(values) def write(self, values): merge_needed = 'note' in values or 'topping_ids_1' in values or 'topping_ids_2' in values or 'topping_ids_3' in values default_location_id = self.env.user.last_lunch_location_id and self.env.user.last_lunch_location_id.id or False if merge_needed: lines_to_deactivate = self.env['lunch.order'] for line in self: # Only write on topping_ids_1 because they all share the same table # and we don't want to remove all the records # _extract_toppings will pop topping_ids_1, topping_ids_2 and topping_ids_3 from values # This also forces us to invalidate the cache for topping_ids_2 and topping_ids_3 that # could have changed through topping_ids_1 without the cache knowing about it toppings = self._extract_toppings(values) self.invalidate_cache(['topping_ids_2', 'topping_ids_3']) values['topping_ids_1'] = [(6, 0, toppings)] matching_lines = self._find_matching_lines({ 'user_id': values.get('user_id', line.user_id.id), 'product_id': values.get('product_id', line.product_id.id), 'note': values.get('note', line.note or False), 'toppings': toppings, 'lunch_location_id': values.get('lunch_location_id', default_location_id), }) if matching_lines: lines_to_deactivate |= line matching_lines.update_quantity(line.quantity) lines_to_deactivate.write({'active': False}) return super(LunchOrder, self - lines_to_deactivate).write(values) return super().write(values) @api.model def _find_matching_lines(self, values): default_location_id = self.env.user.last_lunch_location_id and self.env.user.last_lunch_location_id.id or False domain = [ ('user_id', '=', values.get('user_id', self.default_get(['user_id'])['user_id'])), ('product_id', '=', values.get('product_id', False)), ('date', '=', fields.Date.today()), ('note', '=', values.get('note', False)), ('lunch_location_id', '=', values.get('lunch_location_id', default_location_id)), ] toppings = values.get('toppings', []) return self.search(domain).filtered(lambda line: (line.topping_ids_1 | line.topping_ids_2 | line.topping_ids_3).ids == toppings) @api.depends('topping_ids_1', 'topping_ids_2', 'topping_ids_3', 'product_id', 'quantity') def _compute_total_price(self): for line in self: line.price = line.quantity * (line.product_id.price + sum((line.topping_ids_1 | line.topping_ids_2 | line.topping_ids_3).mapped('price'))) @api.depends('topping_ids_1', 'topping_ids_2', 'topping_ids_3') def _compute_display_toppings(self): for line in self: toppings = line.topping_ids_1 | line.topping_ids_2 | line.topping_ids_3 line.display_toppings = ' + '.join(toppings.mapped('name')) def update_quantity(self, increment): for line in self.filtered(lambda line: line.state != 'confirmed'): if line.quantity <= -increment: # TODO: maybe unlink the order? line.active = False else: line.quantity += increment self._check_wallet() def add_to_cart(self): """ This method currently does nothing, we currently need it in order to be able to reuse this model in place of a wizard """ # YTI FIXME: Find a way to drop this. return True def _check_wallet(self): self.flush() for line in self: if self.env['lunch.cashmove'].get_wallet_balance(line.user_id) < 0: raise ValidationError(_('Your wallet does not contain enough money to order that. To add some money to your wallet, please contact your lunch manager.')) def action_order(self): for order in self: if not order.supplier_id.available_today: raise UserError(_('The vendor related to this order is not available today.')) if self.filtered(lambda line: not line.product_id.active): raise ValidationError(_('Product is no longer available.')) self.write({ 'state': 'ordered', }) for order in self: order.lunch_location_id = order.user_id.last_lunch_location_id self._check_wallet() def action_reorder(self): self.ensure_one() if not self.supplier_id.available_today: raise UserError(_('The vendor related to this order is not available today.')) self.copy({ 'date': fields.Date.context_today(self), 'state': 'ordered', }) action = self.env['ir.actions.act_window']._for_xml_id('lunch.lunch_order_action') return action def action_confirm(self): self.write({'state': 'confirmed'}) def action_cancel(self): self.write({'state': 'cancelled'}) def action_reset(self): self.write({'state': 'ordered'})
53.831933
12,812
1,771
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import base64 from odoo import api, fields, models from odoo.modules.module import get_module_resource class LunchProductCategory(models.Model): """ Category of the product such as pizza, sandwich, pasta, chinese, burger... """ _name = 'lunch.product.category' _inherit = 'image.mixin' _description = 'Lunch Product Category' @api.model def _default_image(self): image_path = get_module_resource('lunch', 'static/img', 'lunch.png') return base64.b64encode(open(image_path, 'rb').read()) name = fields.Char('Product Category', required=True, translate=True) company_id = fields.Many2one('res.company') currency_id = fields.Many2one('res.currency', related='company_id.currency_id') product_count = fields.Integer(compute='_compute_product_count', help="The number of products related to this category") active = fields.Boolean(string='Active', default=True) image_1920 = fields.Image(default=_default_image) def _compute_product_count(self): product_data = self.env['lunch.product'].read_group([('category_id', 'in', self.ids)], ['category_id'], ['category_id']) data = {product['category_id'][0]: product['category_id_count'] for product in product_data} for category in self: category.product_count = data.get(category.id, 0) def toggle_active(self): """ Archiving related lunch product """ res = super().toggle_active() Product = self.env['lunch.product'].with_context(active_test=False) all_products = Product.search([('category_id', 'in', self.ids)]) all_products._sync_active_from_related() return res
43.195122
1,771
1,070
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models from odoo.tools import formatLang class LunchTopping(models.Model): _name = 'lunch.topping' _description = 'Lunch Extras' name = fields.Char('Name', required=True) company_id = fields.Many2one('res.company', default=lambda self: self.env.company) currency_id = fields.Many2one('res.currency', related='company_id.currency_id') price = fields.Monetary('Price', required=True) supplier_id = fields.Many2one('lunch.supplier', ondelete='cascade') topping_category = fields.Integer('Topping Category', help="This field is a technical field", required=True, default=1) def name_get(self): currency_id = self.env.company.currency_id res = dict(super(LunchTopping, self).name_get()) for topping in self: price = formatLang(self.env, topping.price, currency_obj=currency_id) res[topping.id] = '%s %s' % (topping.name, price) return list(res.items())
41.153846
1,070
238
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import models, fields class Company(models.Model): _inherit = 'res.company' lunch_minimum_threshold = fields.Float()
23.8
238
382
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models class ResUsers(models.Model): _inherit = 'res.users' last_lunch_location_id = fields.Many2one('lunch.location') favorite_lunch_product_ids = fields.Many2many('lunch.product', 'lunch_product_favorite_user_rel', 'user_id', 'product_id')
34.727273
382
453
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models class ResConfigSettings(models.TransientModel): _inherit = 'res.config.settings' currency_id = fields.Many2one('res.currency', related='company_id.currency_id') company_lunch_minimum_threshold = fields.Float(string="Maximum Allowed Overdraft", readonly=False, related='company_id.lunch_minimum_threshold')
41.181818
453
1,283
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models, _ from odoo.tools import float_round class LunchCashMove(models.Model): """ Two types of cashmoves: payment (credit) or order (debit) """ _name = 'lunch.cashmove' _description = 'Lunch Cashmove' _order = 'date desc' currency_id = fields.Many2one('res.currency', default=lambda self: self.env.company.currency_id, required=True) user_id = fields.Many2one('res.users', 'User', default=lambda self: self.env.uid) date = fields.Date('Date', required=True, default=fields.Date.context_today) amount = fields.Float('Amount', required=True) description = fields.Text('Description') def name_get(self): return [(cashmove.id, '%s %s' % (_('Lunch Cashmove'), '#%d' % cashmove.id)) for cashmove in self] @api.model def get_wallet_balance(self, user, include_config=True): result = float_round(sum(move['amount'] for move in self.env['lunch.cashmove.report'].search_read( [('user_id', '=', user.id)], ['amount'])), precision_digits=2) if include_config: result += user.company_id.lunch_minimum_threshold return result
42.766667
1,283
7,859
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. import pytz import logging from odoo import api, fields, models from odoo.osv import expression from .lunch_supplier import float_to_time from datetime import datetime, timedelta from textwrap import dedent from odoo.addons.base.models.res_partner import _tz_get _logger = logging.getLogger(__name__) WEEKDAY_TO_NAME = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] CRON_DEPENDS = {'name', 'active', 'mode', 'until', 'notification_time', 'notification_moment', 'tz'} class LunchAlert(models.Model): """ Alerts to display during a lunch order. An alert can be specific to a given day, weekly or daily. The alert is displayed from start to end hour. """ _name = 'lunch.alert' _description = 'Lunch Alert' _order = 'write_date desc, id' name = fields.Char('Alert Name', required=True, translate=True) message = fields.Html('Message', required=True, translate=True) mode = fields.Selection([ ('alert', 'Alert in app'), ('chat', 'Chat notification')], string='Display', default='alert') recipients = fields.Selection([ ('everyone', 'Everyone'), ('last_week', 'Employee who ordered last week'), ('last_month', 'Employee who ordered last month'), ('last_year', 'Employee who ordered last year')], string='Recipients', default='everyone') notification_time = fields.Float(default=10.0, string='Notification Time') notification_moment = fields.Selection([ ('am', 'AM'), ('pm', 'PM')], default='am', required=True) tz = fields.Selection(_tz_get, string='Timezone', required=True, default=lambda self: self.env.user.tz or 'UTC') cron_id = fields.Many2one('ir.cron', ondelete='cascade', required=True, readonly=True) until = fields.Date('Show Until') mon = fields.Boolean(default=True) tue = fields.Boolean(default=True) wed = fields.Boolean(default=True) thu = fields.Boolean(default=True) fri = fields.Boolean(default=True) sat = fields.Boolean(default=True) sun = fields.Boolean(default=True) available_today = fields.Boolean('Is Displayed Today', compute='_compute_available_today', search='_search_available_today') active = fields.Boolean('Active', default=True) location_ids = fields.Many2many('lunch.location', string='Location') _sql_constraints = [ ('notification_time_range', 'CHECK(notification_time >= 0 and notification_time <= 12)', 'Notification time must be between 0 and 12') ] @api.depends('mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun') def _compute_available_today(self): today = fields.Date.context_today(self) fieldname = WEEKDAY_TO_NAME[today.weekday()] for alert in self: alert.available_today = alert.until > today if alert.until else True and alert[fieldname] def _search_available_today(self, operator, value): if (not operator in ['=', '!=']) or (not value in [True, False]): return [] searching_for_true = (operator == '=' and value) or (operator == '!=' and not value) today = fields.Date.context_today(self) fieldname = WEEKDAY_TO_NAME[today.weekday()] return expression.AND([ [(fieldname, operator, value)], expression.OR([ [('until', '=', False)], [('until', '>' if searching_for_true else '<', today)], ]) ]) def _sync_cron(self): """ Synchronise the related cron fields to reflect this alert """ for alert in self: alert = alert.with_context(tz=alert.tz) cron_required = ( alert.active and alert.mode == 'chat' and (not alert.until or fields.Date.context_today(alert) <= alert.until) ) sendat_tz = pytz.timezone(alert.tz).localize(datetime.combine( fields.Date.context_today(alert, fields.Datetime.now()), float_to_time(alert.notification_time, alert.notification_moment))) cron = alert.cron_id.sudo() lc = cron.lastcall if (( lc and sendat_tz.date() <= fields.Datetime.context_timestamp(alert, lc).date() ) or ( not lc and sendat_tz <= fields.Datetime.context_timestamp(alert, fields.Datetime.now()) )): sendat_tz += timedelta(days=1) sendat_utc = sendat_tz.astimezone(pytz.UTC).replace(tzinfo=None) cron.name = f"Lunch: alert chat notification ({alert.name})" cron.active = cron_required cron.nextcall = sendat_utc cron.code = dedent(f"""\ # This cron is dynamically controlled by {self._description}. # Do NOT modify this cron, modify the related record instead. env['{self._name}'].browse([{alert.id}])._notify_chat()""") @api.model_create_multi def create(self, vals_list): crons = self.env['ir.cron'].sudo().create([ { 'user_id': self.env.ref('base.user_root').id, 'active': False, 'interval_type': 'days', 'interval_number': 1, 'numbercall': -1, 'doall': False, 'name': "Lunch: alert chat notification", 'model_id': self.env['ir.model']._get_id(self._name), 'state': 'code', 'code': "", } for _ in range(len(vals_list)) ]) self.env['ir.model.data'].sudo().create([{ 'name': f'lunch_alert_cron_sa_{cron.ir_actions_server_id.id}', 'module': 'lunch', 'res_id': cron.ir_actions_server_id.id, 'model': 'ir.actions.server', # noupdate is set to true to avoid to delete record at module update 'noupdate': True, } for cron in crons]) for vals, cron in zip(vals_list, crons): vals['cron_id'] = cron.id alerts = super().create(vals_list) alerts._sync_cron() return alerts def write(self, values): super().write(values) if not CRON_DEPENDS.isdisjoint(values): self._sync_cron() def unlink(self): crons = self.cron_id.sudo() server_actions = crons.ir_actions_server_id super().unlink() crons.unlink() server_actions.unlink() def _notify_chat(self): # Called daily by cron self.ensure_one() if not self.available_today: _logger.warning("cancelled, not available today") if self.cron_id and self.until and fields.Date.context_today(self) > self.until: self.cron_id.unlink() self.cron_id = False return if not self.active or self.mode != 'chat': raise ValueError("Cannot send a chat notification in the current state") order_domain = [('state', '!=', 'cancelled')] if self.location_ids.ids: order_domain = expression.AND([order_domain, [('user_id.last_lunch_location_id', 'in', self.location_ids.ids)]]) if self.recipients != 'everyone': weeksago = fields.Date.today() - timedelta(weeks=( 1 if self.recipients == 'last_week' else 4 if self.recipients == 'last_month' else 52 # if self.recipients == 'last_year' )) order_domain = expression.AND([order_domain, [('date', '>=', weeksago)]]) partners = self.env['lunch.order'].search(order_domain).user_id.partner_id if partners: self.env['mail.thread'].message_notify(body=self.message, partner_ids=partners.ids)
40.302564
7,859
1,813
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models, tools, _ class CashmoveReport(models.Model): _name = "lunch.cashmove.report" _description = 'Cashmoves report' _auto = False _order = "date desc" id = fields.Integer('ID') amount = fields.Float('Amount') date = fields.Date('Date') currency_id = fields.Many2one('res.currency', string='Currency') user_id = fields.Many2one('res.users', string='User') description = fields.Text('Description') def name_get(self): return [(cashmove.id, '%s %s' % (_('Lunch Cashmove'), '#%d' % cashmove.id)) for cashmove in self] def init(self): tools.drop_view_if_exists(self._cr, self._table) self._cr.execute(""" CREATE or REPLACE view %s as ( SELECT lc.id as id, lc.amount as amount, lc.date as date, lc.currency_id as currency_id, lc.user_id as user_id, lc.description as description FROM lunch_cashmove lc UNION ALL SELECT -lol.id as id, -lol.price as amount, lol.date as date, lol.currency_id as currency_id, lol.user_id as user_id, format('Order: %%s x %%s %%s', lol.quantity::text, lp.name, lol.display_toppings) as description FROM lunch_order lol JOIN lunch_product lp ON lp.id = lol.product_id WHERE lol.state in ('ordered', 'confirmed') AND lol.active = True ); """ % self._table)
36.26
1,813
6,314
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import _, http, fields from odoo.exceptions import AccessError from odoo.http import request from odoo.osv import expression from odoo.tools import float_round, float_repr class LunchController(http.Controller): @http.route('/lunch/infos', type='json', auth='user') def infos(self, user_id=None): self._check_user_impersonification(user_id) user = request.env['res.users'].browse(user_id) if user_id else request.env.user infos = self._make_infos(user, order=False) lines = self._get_current_lines(user) if lines: lines = [{'id': line.id, 'product': (line.product_id.id, line.product_id.name, float_repr(float_round(line.price, 2), 2)), 'toppings': [(topping.name, float_repr(float_round(topping.price, 2), 2)) for topping in line.topping_ids_1 | line.topping_ids_2 | line.topping_ids_3], 'quantity': line.quantity, 'price': line.price, 'state': line.state, # Only used for _get_state 'note': line.note} for line in lines] raw_state, state = self._get_state(lines) infos.update({ 'total': float_repr(float_round(sum(line['price'] for line in lines), 2), 2), 'raw_state': raw_state, 'state': state, 'lines': lines, }) return infos @http.route('/lunch/trash', type='json', auth='user') def trash(self, user_id=None): self._check_user_impersonification(user_id) user = request.env['res.users'].browse(user_id) if user_id else request.env.user lines = self._get_current_lines(user) lines.action_cancel() lines.unlink() @http.route('/lunch/pay', type='json', auth='user') def pay(self, user_id=None): self._check_user_impersonification(user_id) user = request.env['res.users'].browse(user_id) if user_id else request.env.user lines = self._get_current_lines(user) if lines: lines = lines.filtered(lambda line: line.state == 'new') lines.action_order() return True return False @http.route('/lunch/payment_message', type='json', auth='user') def payment_message(self): return {'message': request.env['ir.qweb']._render('lunch.lunch_payment_dialog', {})} @http.route('/lunch/user_location_set', type='json', auth='user') def set_user_location(self, location_id=None, user_id=None): self._check_user_impersonification(user_id) user = request.env['res.users'].browse(user_id) if user_id else request.env.user user.sudo().last_lunch_location_id = request.env['lunch.location'].browse(location_id) return True @http.route('/lunch/user_location_get', type='json', auth='user') def get_user_location(self, user_id=None): self._check_user_impersonification(user_id) user = request.env['res.users'].browse(user_id) if user_id else request.env.user company_ids = request.env.context.get('allowed_company_ids', request.env.company.ids) user_location = user.last_lunch_location_id has_multi_company_access = not user_location.company_id or user_location.company_id.id in company_ids if not user_location or not has_multi_company_access: return request.env['lunch.location'].search([('company_id', 'in', [False] + company_ids)], limit=1).id return user_location.id def _make_infos(self, user, **kwargs): res = dict(kwargs) is_manager = request.env.user.has_group('lunch.group_lunch_manager') currency = user.company_id.currency_id res.update({ 'username': user.sudo().name, 'userimage': '/web/image?model=res.users&id=%s&field=avatar_128' % user.id, 'wallet': request.env['lunch.cashmove'].get_wallet_balance(user, False), 'is_manager': is_manager, 'group_portal_id': request.env.ref('base.group_portal').id, 'locations': request.env['lunch.location'].search_read([], ['name']), 'currency': {'symbol': currency.symbol, 'position': currency.position}, }) user_location = user.last_lunch_location_id has_multi_company_access = not user_location.company_id or user_location.company_id.id in request._context.get('allowed_company_ids', request.env.company.ids) if not user_location or not has_multi_company_access: user.last_lunch_location_id = user_location = request.env['lunch.location'].search([], limit=1) alert_domain = expression.AND([ [('available_today', '=', True)], [('location_ids', 'in', user_location.id)], [('mode', '=', 'alert')], ]) res.update({ 'user_location': (user_location.id, user_location.name), 'alerts': request.env['lunch.alert'].search_read(alert_domain, ['message']), }) return res def _check_user_impersonification(self, user_id=None): if (user_id and request.env.uid != user_id and not request.env.user.has_group('lunch.group_lunch_manager')): raise AccessError(_('You are trying to impersonate another user, but this can only be done by a lunch manager')) def _get_current_lines(self, user): return request.env['lunch.order'].search( [('user_id', '=', user.id), ('date', '=', fields.Date.context_today(user)), ('state', '!=', 'cancelled')] ) def _get_state(self, lines): """ This method returns the lowest state of the list of lines eg: [confirmed, confirmed, new] will return ('new', 'To Order') """ states_to_int = {'new': 0, 'ordered': 1, 'confirmed': 2, 'cancelled': 3} int_to_states = ['new', 'ordered', 'confirmed', 'cancelled'] translated_states = dict(request.env['lunch.order']._fields['state']._description_selection(request.env)) state = int_to_states[min(states_to_int[line['state']] for line in lines)] return (state, translated_states[state])
44.153846
6,314
932
py
PYTHON
15.0
{ 'name': "Egypt - Accounting", 'description': """ This is the base module to manage the accounting chart for Egypt in Odoo. ============================================================================== """, 'author': "Odoo SA", 'category': 'Accounting/Localizations/Account Charts', 'version': '1.0', 'depends': ['account','l10n_multilang'], 'data': [ 'data/l10n_eg_chart_data.xml', 'data/account.account.template.csv', 'data/l10n_eg_chart_post_data.xml', 'data/account_tax_report_data.xml', 'data/account_tax_group_data.xml', 'data/account_tax_template_data.xml', 'data/fiscal_templates_data.xml', 'data/account_chart_template_data.xml', 'views/account_tax.xml' ], 'demo': [ 'demo/demo_company.xml', 'demo/demo_partner.xml' ], 'post_init_hook': 'load_translations', 'license': 'LGPL-3', }
33.285714
932
4,644
py
PYTHON
15.0
from odoo import models, fields class ETAAccountTaxMixin(models.AbstractModel): _name = 'l10n_eg.eta.account.tax.mixin' _description = 'ETA tax codes mixin' l10n_eg_eta_code = fields.Selection( selection=[ ('t1_v001', 'T1 - V001 - Export'), ('t1_v002', 'T1 - V002 - Export to free areas and other areas'), ('t1_v003', 'T1 - V003 - Exempted good or service'), ('t1_v004', 'T1 - V004 - A non-taxable good or service'), ('t1_v005', 'T1 - V005 - Exemptions for diplomats, consulates and embassies'), ('t1_v006', 'T1 - V006 - Defence and National security Exemptions'), ('t1_v007', 'T1 - V007 - Agreements exemptions'), ('t1_v008', 'T1 - V008 - Special Exemption and other reasons'), ('t1_v009', 'T1 - V009 - General Item sales'), ('t1_v010', 'T1 - V010 - Other Rates'), ('t2_tbl01', 'T2 - Tbl01 - Table tax (percentage)'), ('t3_tbl02', 'T3 - Tbl02 - Table tax (Fixed Amount)'), ('t4_w001', 'T4 - W001 - Contracting'), ('t4_w002', 'T4 - W002 - Supplies'), ('t4_w003', 'T4 - W003 - Purchases'), ('t4_w004', 'T4 - W004 - Services'), ('t4_w005', 'T4 - W005 - Sums paid by the cooperative societies for car transportation to their members'), ('t4_w006', 'T4 - W006 - Commission agency & brokerage'), ('t4_w007', 'T4 - W007 - Discounts & grants & additional exceptional incentives (smoke, cement companies)'), ('t4_w008', 'T4 - W008 - All discounts & grants & commissions (petroleum, telecommunications, and other)'), ('t4_w009', 'T4 - W009 - Supporting export subsidies'), ('t4_w010', 'T4 - W010 - Professional fees'), ('t4_w011', 'T4 - W011 - Commission & brokerage _A_57'), ('t4_w012', 'T4 - W012 - Hospitals collecting from doctors'), ('t4_w013', 'T4 - W013 - Royalties'), ('t4_w014', 'T4 - W014 - Customs clearance'), ('t4_w015', 'T4 - W015 - Exemption'), ('t4_w016', 'T4 - W016 - advance payments'), ('t5_st01', 'T5 - ST01 - Stamping tax (percentage)'), ('t6_st02', 'T6 - ST02 - Stamping Tax (amount)'), ('t7_ent01', 'T7 - Ent01 - Entertainment tax (rate)'), ('t7_ent02', 'T7 - Ent02 - Entertainment tax (amount)'), ('t8_rd01', 'T8 - RD01 - Resource development fee (rate)'), ('t8_rd02', 'T8 - RD02 - Resource development fee (amount)'), ('t9_sc01', 'T9 - SC01 - Service charges (rate)'), ('t9_sc02', 'T9 - SC02 - Service charges (amount)'), ('t10_mn01', 'T10 - Mn01 - Municipality Fees (rate)'), ('t10_mn02', 'T10 - Mn02 - Municipality Fees (amount)'), ('t11_mi01', 'T11 - MI01 - Medical insurance fee (rate)'), ('t11_mi02', 'T11 - MI02 - Medical insurance fee (amount)'), ('t12_of01', 'T12 - OF01 - Other fees (rate)'), ('t12_of02', 'T12 - OF02 - Other fees (amount)'), ('t13_st03', 'T13 - ST03 - Stamping tax (percentage)'), ('t14_st04', 'T14 - ST04 - Stamping Tax (amount)'), ('t15_ent03', 'T15 - Ent03 - Entertainment tax (rate)'), ('t15_ent04', 'T15 - Ent04 - Entertainment tax (amount)'), ('t16_rd03', 'T16 - RD03 - Resource development fee (rate)'), ('t16_rd04', 'T16 - RD04 - Resource development fee (amount)'), ('t17_sc03', 'T17 - SC03 - Service charges (rate)'), ('t17_sc04', 'T17 - SC04 - Service charges (amount)'), ('t18_mn03', 'T18 - Mn03 - Municipality Fees (rate)'), ('t18_mn04', 'T18 - Mn04 - Municipality Fees (amount)'), ('t19_mi03', 'T19 - MI03 - Medical insurance fee (rate)'), ('t19_mi04', 'T19 - MI04 - Medical insurance fee (amount)'), ('t20_of03', 'T20 - OF03 - Other fees (rate)'), ('t20_of04', 'T20 - OF04 - Other fees (amount)') ], string='ETA Code (Egypt)', default=False) class AccountTax(models.Model): _name = 'account.tax' _inherit = ['account.tax', 'l10n_eg.eta.account.tax.mixin'] class AccountTaxTemplate(models.Model): _name = 'account.tax.template' _inherit = ['account.tax.template', 'l10n_eg.eta.account.tax.mixin'] def _get_tax_vals(self, company, tax_template_to_tax): vals = super(AccountTaxTemplate, self)._get_tax_vals(company, tax_template_to_tax) vals.update({ 'l10n_eg_eta_code': self.l10n_eg_eta_code, }) return vals
55.285714
4,644
847
py
PYTHON
15.0
from odoo import models class AccountChartTemplate(models.Model): _inherit = 'account.chart.template' def _prepare_all_journals(self, acc_template_ref, company, journals_dict=None): """ If EGYPT chart, we add 2 new journals TA and IFRS""" if self == self.env.ref('l10n_eg.egypt_chart_template_standard'): if not journals_dict: journals_dict = [] journals_dict.extend( [{"name": "Tax Adjustments", "company_id": company.id, "code": "TA", "type": "general", "sequence": 1, "favorite": True}, {"name": "IFRS 16", "company_id": company.id, "code": "IFRS", "type": "general", "favorite": True, "sequence": 10}]) return super()._prepare_all_journals(acc_template_ref, company, journals_dict=journals_dict)
47.055556
847
700
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Stripe Payment Acquirer', 'version': '2.0', 'category': 'Accounting/Payment Acquirers', 'sequence': 380, 'summary': 'Payment Acquirer: Stripe Implementation', 'description': """Stripe Payment Acquirer""", 'depends': ['payment'], 'data': [ 'views/payment_views.xml', 'views/payment_templates.xml', 'data/payment_acquirer_data.xml', ], 'application': True, 'uninstall_hook': 'uninstall_hook', 'assets': { 'web.assets_frontend': [ 'payment_stripe/static/src/js/payment_form.js', ], }, 'license': 'LGPL-3', }
29.166667
700
1,389
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. def get_publishable_key(acquirer_sudo): """ Return the publishable key for Stripe. Note: This method serves as a hook for modules that would fully implement Stripe Connect. :param recordset acquirer_sudo: The acquirer on which the key should be read, as a sudoed `payment.acquire` record. :return: The publishable key :rtype: str """ return acquirer_sudo.stripe_publishable_key def get_secret_key(acquirer_sudo): """ Return the secret key for Stripe. Note: This method serves as a hook for modules that would fully implement Stripe Connect. :param recordset acquirer_sudo: The acquirer on which the key should be read, as a sudoed `payment.acquire` record. :return: The secret key :rtype: str """ return acquirer_sudo.stripe_secret_key def get_webhook_secret(acquirer_sudo): """ Return the webhook secret for Stripe. Note: This method serves as a hook for modules that would fully implement Stripe Connect. :param recordset acquirer_sudo: The acquirer on which the key should be read, as a sudoed `payment.acquire` record. :returns: The webhook secret :rtype: str """ return acquirer_sudo.stripe_webhook_secret
35.615385
1,389
1,182
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. from collections import namedtuple API_VERSION = '2019-05-16' # The API version of Stripe implemented in this module # Stripe proxy URL PROXY_URL = 'https://stripe.api.odoo.com/api/stripe/' # Support payment method types PMT = namedtuple('PaymentMethodType', ['name', 'countries', 'currencies', 'recurrence']) PAYMENT_METHOD_TYPES = [ PMT('card', [], [], 'recurring'), PMT('ideal', ['nl'], ['eur'], 'punctual'), PMT('bancontact', ['be'], ['eur'], 'punctual'), PMT('eps', ['at'], ['eur'], 'punctual'), PMT('giropay', ['de'], ['eur'], 'punctual'), PMT('p24', ['pl'], ['eur', 'pln'], 'punctual'), ] # Mapping of transaction states to Stripe {Payment,Setup}Intent statuses. # See https://stripe.com/docs/payments/intents#intent-statuses for the exhaustive list of status. INTENT_STATUS_MAPPING = { 'draft': ('requires_payment_method', 'requires_confirmation', 'requires_action'), 'pending': ('processing',), 'done': ('succeeded',), 'cancel': ('canceled',), } # Events which are handled by the webhook WEBHOOK_HANDLED_EVENTS = [ 'checkout.session.completed', ]
35.818182
1,182
3,343
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. from unittest.mock import patch from odoo.tests import tagged from odoo.tools import mute_logger from odoo.addons.payment_stripe.controllers.onboarding import OnboardingController from odoo.addons.payment_stripe.const import WEBHOOK_HANDLED_EVENTS from .common import StripeCommon @tagged('post_install', '-at_install') class StripeTest(StripeCommon): def test_processing_values(self): dummy_session_id = 'cs_test_sbTG0yGwTszAqFUP8Ulecr1bUwEyQEo29M8taYvdP7UA6Qr37qX6uA6w' tx = self.create_transaction(flow='redirect') # We don't really care what the flow is here. # Ensure no external API call is done, we only want to check the processing values logic def mock_stripe_create_checkout_session(self): return {'id': dummy_session_id} with patch.object( type(self.env['payment.transaction']), '_stripe_create_checkout_session', mock_stripe_create_checkout_session, ), mute_logger('odoo.addons.payment.models.payment_transaction'): processing_values = tx._get_processing_values() self.assertEqual(processing_values['publishable_key'], self.stripe.stripe_publishable_key) self.assertEqual(processing_values['session_id'], dummy_session_id) def test_onboarding_action_redirect_to_url(self): """ Test that the action generate and return an URL when the acquirer is disabled. """ with patch.object( type(self.env['payment.acquirer']), '_stripe_fetch_or_create_connected_account', return_value={'id': 'dummy'}, ), patch.object( type(self.env['payment.acquirer']), '_stripe_create_account_link', return_value='https://dummy.url', ): onboarding_url = self.stripe.action_stripe_connect_account() self.assertEqual(onboarding_url['url'], 'https://dummy.url') def test_only_create_webhook_if_not_already_done(self): """ Test that a webhook is created only if the webhook secret is not already set. """ self.stripe.stripe_webhook_secret = False with patch.object(type(self.env['payment.acquirer']), '_stripe_make_request') as mock: self.stripe.action_stripe_create_webhook() self.assertEqual(mock.call_count, 1) def test_do_not_create_webhook_if_already_done(self): """ Test that no webhook is created if the webhook secret is already set. """ self.stripe.stripe_webhook_secret = 'dummy' with patch.object(type(self.env['payment.acquirer']), '_stripe_make_request') as mock: self.stripe.action_stripe_create_webhook() self.assertEqual(mock.call_count, 0) def test_create_account_link_pass_required_parameters(self): """ Test that the generation of an account link includes all the required parameters. """ with patch.object( type(self.env['payment.acquirer']), '_stripe_make_proxy_request', return_value={'url': 'https://dummy.url'}, ) as mock: self.stripe._stripe_create_account_link('dummy', 'dummy') for payload_param in ('account', 'return_url', 'refresh_url', 'type'): self.assertIn(payload_param, mock.call_args.kwargs['payload'].keys())
49.895522
3,343
688
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo.addons.payment.tests.common import PaymentCommon class StripeCommon(PaymentCommon): @classmethod def setUpClass(cls, chart_template_ref=None): super().setUpClass(chart_template_ref=chart_template_ref) cls.stripe = cls._prepare_acquirer('stripe', update_values={ 'stripe_secret_key': 'sk_test_KJtHgNwt2KS3xM7QJPr4O5E8', 'stripe_publishable_key': 'pk_test_QSPnimmb4ZhtkEy3Uhdm4S6J', 'stripe_webhook_secret': 'whsec_vG1fL6CMUouQ7cObF2VJprLVXT5jBLxB', 'payment_icon_ids': [(5, 0, 0)], }) cls.acquirer = cls.stripe
38.222222
688
15,900
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. import logging import pprint from werkzeug import urls from odoo import _, api, fields, models from odoo.exceptions import UserError, ValidationError from odoo.addons.payment import utils as payment_utils from odoo.addons.payment_stripe import utils as stripe_utils from odoo.addons.payment_stripe.const import INTENT_STATUS_MAPPING, PAYMENT_METHOD_TYPES from odoo.addons.payment_stripe.controllers.main import StripeController _logger = logging.getLogger(__name__) class PaymentTransaction(models.Model): _inherit = 'payment.transaction' stripe_payment_intent = fields.Char(string="Stripe Payment Intent ID", readonly=True) def _get_specific_processing_values(self, processing_values): """ Override of payment to return Stripe-specific processing values. Note: self.ensure_one() from `_get_processing_values` :param dict processing_values: The generic processing values of the transaction :return: The dict of acquirer-specific processing values :rtype: dict """ res = super()._get_specific_processing_values(processing_values) if self.provider != 'stripe' or self.operation == 'online_token': return res checkout_session = self._stripe_create_checkout_session() return { 'publishable_key': stripe_utils.get_publishable_key(self.acquirer_id), 'session_id': checkout_session['id'], } def _stripe_create_checkout_session(self): """ Create and return a Checkout Session. :return: The Checkout Session :rtype: dict """ # Filter payment method types by available payment method existing_pms = [pm.name.lower() for pm in self.env['payment.icon'].search([])] linked_pms = [pm.name.lower() for pm in self.acquirer_id.payment_icon_ids] pm_filtered_pmts = filter( lambda pmt: pmt.name == 'card' # If the PM (payment.icon) record related to a PMT doesn't exist, don't filter out the # PMT because the user couldn't even have linked it to the acquirer in the first place. or (pmt.name in linked_pms or pmt.name not in existing_pms), PAYMENT_METHOD_TYPES ) # Filter payment method types by country code country_code = self.partner_country_id and self.partner_country_id.code.lower() country_filtered_pmts = filter( lambda pmt: not pmt.countries or country_code in pmt.countries, pm_filtered_pmts ) # Filter payment method types by currency name currency_name = self.currency_id.name.lower() currency_filtered_pmts = filter( lambda pmt: not pmt.currencies or currency_name in pmt.currencies, country_filtered_pmts ) # Filter payment method types by recurrence if the transaction must be tokenized if self.tokenize: recurrence_filtered_pmts = filter( lambda pmt: pmt.recurrence == 'recurring', currency_filtered_pmts ) else: recurrence_filtered_pmts = currency_filtered_pmts # Build the session values related to payment method types pmt_values = {} for pmt_id, pmt_name in enumerate(map(lambda pmt: pmt.name, recurrence_filtered_pmts)): pmt_values[f'payment_method_types[{pmt_id}]'] = pmt_name # Create the session according to the operation and return it customer = self._stripe_create_customer() common_session_values = self._get_common_stripe_session_values(pmt_values, customer) base_url = self.acquirer_id.get_base_url() if self.operation == 'online_redirect': return_url = f'{urls.url_join(base_url, StripeController._checkout_return_url)}' \ f'?reference={urls.url_quote_plus(self.reference)}' # Specify a future usage for the payment intent to: # 1. attach the payment method to the created customer # 2. trigger a 3DS check if one if required, while the customer is still present future_usage = 'off_session' if self.tokenize else None checkout_session = self.acquirer_id._stripe_make_request( 'checkout/sessions', payload={ **common_session_values, 'mode': 'payment', 'success_url': return_url, 'cancel_url': return_url, 'line_items[0][price_data][currency]': self.currency_id.name, 'line_items[0][price_data][product_data][name]': self.reference, 'line_items[0][price_data][unit_amount]': payment_utils.to_minor_currency_units( self.amount, self.currency_id ), 'line_items[0][quantity]': 1, 'payment_intent_data[description]': self.reference, 'payment_intent_data[setup_future_usage]': future_usage, } ) self.stripe_payment_intent = checkout_session['payment_intent'] else: # 'validation' # {CHECKOUT_SESSION_ID} is a template filled by Stripe when the Session is created return_url = f'{urls.url_join(base_url, StripeController._validation_return_url)}' \ f'?reference={urls.url_quote_plus(self.reference)}' \ f'&checkout_session_id={{CHECKOUT_SESSION_ID}}' checkout_session = self.acquirer_id._stripe_make_request( 'checkout/sessions', payload={ **common_session_values, 'mode': 'setup', 'success_url': return_url, 'cancel_url': return_url, } ) return checkout_session def _stripe_create_customer(self): """ Create and return a Customer. :return: The Customer :rtype: dict """ customer = self.acquirer_id._stripe_make_request( 'customers', payload={ 'address[city]': self.partner_city or None, 'address[country]': self.partner_country_id.code or None, 'address[line1]': self.partner_address or None, 'address[postal_code]': self.partner_zip or None, 'address[state]': self.partner_state_id.name or None, 'description': f'Odoo Partner: {self.partner_id.name} (id: {self.partner_id.id})', 'email': self.partner_email or None, 'name': self.partner_name, 'phone': self.partner_phone and self.partner_phone[:20] or None, } ) return customer def _get_common_stripe_session_values(self, pmt_values, customer): """ Return the Stripe Session values that are common to redirection and validation. Note: This method serves as a hook for modules that would fully implement Stripe Connect. :param dict pmt_values: The payment method types values :param dict customer: The Stripe customer to assign to the session :return: The common Stripe Session values :rtype: dict """ return { **pmt_values, 'client_reference_id': self.reference, # Assign a customer to the session so that Stripe automatically attaches the payment # method to it in a validation flow. In checkout flow, a customer is automatically # created if not provided but we still do it here to avoid requiring the customer to # enter his email on the checkout page. 'customer': customer['id'], } def _send_payment_request(self): """ Override of payment to send a payment request to Stripe with a confirmed PaymentIntent. Note: self.ensure_one() :return: None :raise: UserError if the transaction is not linked to a token """ super()._send_payment_request() if self.provider != 'stripe': return # Make the payment request to Stripe if not self.token_id: raise UserError("Stripe: " + _("The transaction is not linked to a token.")) payment_intent = self._stripe_create_payment_intent() feedback_data = {'reference': self.reference} StripeController._include_payment_intent_in_feedback_data(payment_intent, feedback_data) _logger.info("entering _handle_feedback_data with data:\n%s", pprint.pformat(feedback_data)) self._handle_feedback_data('stripe', feedback_data) def _stripe_create_payment_intent(self): """ Create and return a PaymentIntent. Note: self.ensure_one() :return: The Payment Intent :rtype: dict """ if not self.token_id.stripe_payment_method: # Pre-SCA token -> migrate it self.token_id._stripe_sca_migrate_customer() response = self.acquirer_id._stripe_make_request( 'payment_intents', payload=self._stripe_prepare_payment_intent_payload(), offline=self.operation == 'offline', idempotency_key=payment_utils.generate_idempotency_key( self, scope='payment_intents_token' ) if self.operation == 'offline' else None, ) # Make the request idempotent to prevent multiple payments (e.g., rollback mechanism). if 'error' not in response: payment_intent = response else: # A processing error was returned in place of the payment intent error_msg = response['error'].get('message') self._set_error("Stripe: " + _( "The communication with the API failed.\n" "Stripe gave us the following info about the problem:\n'%s'", error_msg )) # Flag transaction as in error now as the intent status might have a valid value payment_intent = response['error'].get('payment_intent') # Get the PI from the error return payment_intent def _stripe_prepare_payment_intent_payload(self): """ Prepare the payload for the creation of a payment intent in Stripe format. Note: This method serves as a hook for modules that would fully implement Stripe Connect. Note: self.ensure_one() :return: The Stripe-formatted payload for the payment intent request :rtype: dict """ return { 'amount': payment_utils.to_minor_currency_units(self.amount, self.currency_id), 'currency': self.currency_id.name.lower(), 'confirm': True, 'customer': self.token_id.acquirer_ref, 'off_session': True, 'payment_method': self.token_id.stripe_payment_method, 'description': self.reference, } @api.model def _get_tx_from_feedback_data(self, provider, data): """ Override of payment to find the transaction based on Stripe data. :param str provider: The provider of the acquirer that handled the transaction :param dict data: The feedback data sent by the provider :return: The transaction if found :rtype: recordset of `payment.transaction` :raise: ValidationError if inconsistent data were received :raise: ValidationError if the data match no transaction """ tx = super()._get_tx_from_feedback_data(provider, data) if provider != 'stripe': return tx reference = data.get('reference') if not reference: raise ValidationError("Stripe: " + _("Received data with missing merchant reference")) tx = self.search([('reference', '=', reference), ('provider', '=', 'stripe')]) if not tx: raise ValidationError( "Stripe: " + _("No transaction found matching reference %s.", reference) ) return tx def _process_feedback_data(self, data): """ Override of payment to process the transaction based on Adyen data. Note: self.ensure_one() :param dict data: The feedback data build from information passed to the return route. Depending on the operation of the transaction, the entries with the keys 'payment_intent', 'charge', 'setup_intent' and 'payment_method' can be populated with their corresponding Stripe API objects. :return: None :raise: ValidationError if inconsistent data were received """ super()._process_feedback_data(data) if self.provider != 'stripe': return if 'charge' in data: self.acquirer_reference = data['charge']['id'] # Handle the intent status if self.operation == 'validation': intent_status = data.get('setup_intent', {}).get('status') else: # 'online_redirect', 'online_token', 'offline' intent_status = data.get('payment_intent', {}).get('status') if not intent_status: raise ValidationError( "Stripe: " + _("Received data with missing intent status.") ) if intent_status in INTENT_STATUS_MAPPING['draft']: pass elif intent_status in INTENT_STATUS_MAPPING['pending']: self._set_pending() elif intent_status in INTENT_STATUS_MAPPING['done']: if self.tokenize: self._stripe_tokenize_from_feedback_data(data) self._set_done() elif intent_status in INTENT_STATUS_MAPPING['cancel']: self._set_canceled() else: # Classify unknown intent statuses as `error` tx state _logger.warning("received data with invalid intent status: %s", intent_status) self._set_error( "Stripe: " + _("Received data with invalid intent status: %s", intent_status) ) def _stripe_tokenize_from_feedback_data(self, data): """ Create a new token based on the feedback data. :param dict data: The feedback data built with Stripe objects. See `_process_feedback_data`. :return: None """ if self.operation == 'online_redirect': payment_method_id = data.get('charge', {}).get('payment_method') customer_id = data.get('charge', {}).get('customer') else: # 'validation' payment_method_id = data.get('setup_intent', {}).get('payment_method', {}).get('id') customer_id = data.get('setup_intent', {}).get('customer') payment_method = data.get('payment_method') if not payment_method_id or not payment_method: _logger.warning("requested tokenization with payment method missing from feedback data") return if payment_method.get('type') != 'card': # Only 'card' payment methods can be tokenized. This case should normally not happen as # non-recurring payment methods are not shown to the customer if the "Save my payment # details checkbox" is shown. Still, better be on the safe side.. _logger.warning("requested tokenization of non-recurring payment method") return token = self.env['payment.token'].create({ 'acquirer_id': self.acquirer_id.id, 'name': payment_utils.build_token_name(payment_method['card'].get('last4')), 'partner_id': self.partner_id.id, 'acquirer_ref': customer_id, 'verified': True, 'stripe_payment_method': payment_method_id, }) self.write({ 'token_id': token, 'tokenize': False, }) _logger.info( "created token with id %s for partner with id %s", token.id, self.partner_id.id )
45.821326
15,900
2,026
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. import logging import pprint from odoo import _, fields, models from odoo.exceptions import ValidationError _logger = logging.getLogger(__name__) class PaymentToken(models.Model): _inherit = 'payment.token' stripe_payment_method = fields.Char(string="Stripe Payment Method ID", readonly=True) def _stripe_sca_migrate_customer(self): """ Migrate a token from the old implementation of Stripe to the SCA-compliant one. In the old implementation, it was possible to create a Charge by giving only the customer id and let Stripe use the default source (= default payment method). Stripe now requires to specify the payment method for each new PaymentIntent. To do so, we fetch the payment method associated to a customer and save its id on the token. This migration happens once per token created with the old implementation. Note: self.ensure_one() :return: None """ self.ensure_one() # Fetch the available payment method of type 'card' for the given customer response_content = self.acquirer_id._stripe_make_request( 'payment_methods', payload={ 'customer': self.acquirer_ref, 'type': 'card', 'limit': 1, # A new customer is created for each new token. Never > 1 card. }, method='GET' ) _logger.info("received payment_methods response:\n%s", pprint.pformat(response_content)) # Store the payment method ID on the token payment_methods = response_content.get('data', []) payment_method_id = payment_methods and payment_methods[0].get('id') if not payment_method_id: raise ValidationError("Stripe: " + _("Unable to convert payment token to new API.")) self.stripe_payment_method = payment_method_id _logger.info("converted token with id %s to new API", self.id)
40.52
2,026
429
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, models class AccountPaymentMethod(models.Model): _inherit = 'account.payment.method' @api.model def _get_payment_method_information(self): res = super()._get_payment_method_information() res['stripe'] = {'mode': 'unique', 'domain': [('type', '=', 'bank')]} return res
30.642857
429
17,153
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. import logging import uuid import requests from werkzeug.urls import url_encode, url_join from odoo import _, api, fields, models from odoo.exceptions import ValidationError from odoo.addons.payment_stripe import utils as stripe_utils from odoo.addons.payment_stripe import const from odoo.addons.payment_stripe.controllers.main import StripeController from odoo.addons.payment_stripe.controllers.onboarding import OnboardingController _logger = logging.getLogger(__name__) class PaymentAcquirer(models.Model): _inherit = 'payment.acquirer' provider = fields.Selection( selection_add=[('stripe', "Stripe")], ondelete={'stripe': 'set default'}) stripe_publishable_key = fields.Char( string="Publishable Key", help="The key solely used to identify the account with Stripe", required_if_provider='stripe') stripe_secret_key = fields.Char( string="Secret Key", required_if_provider='stripe', groups='base.group_system') stripe_webhook_secret = fields.Char( string="Webhook Signing Secret", help="If a webhook is enabled on your Stripe account, this signing secret must be set to " "authenticate the messages sent from Stripe to Odoo.", groups='base.group_system') #=== CONSTRAINT METHODS ===# @api.constrains('state', 'stripe_publishable_key', 'stripe_secret_key') def _check_state_of_connected_account_is_never_test(self): """ Check that the acquirer of a connected account can never been set to 'test'. This constraint is defined in the present module to allow the export of the translation string of the `ValidationError` should it be raised by modules that would fully implement Stripe Connect. Additionally, the field `state` is used as a trigger for this constraint to allow those modules to indirectly trigger it when writing on custom fields. Indeed, by always writing on `state` together with writing on those custom fields, the constraint would be triggered. :return: None :raise ValidationError: If the acquirer of a connected account is set in state 'test'. """ for acquirer in self: if acquirer.state == 'test' and acquirer._stripe_has_connected_account(): raise ValidationError(_( "You cannot set the acquirer to Test Mode while it is linked with your Stripe " "account." )) def _stripe_has_connected_account(self): """ Return whether the acquirer is linked to a connected Stripe account. Note: This method serves as a hook for modules that would fully implement Stripe Connect. Note: self.ensure_one() :return: Whether the acquirer is linked to a connected Stripe account :rtype: bool """ self.ensure_one() return False @api.constrains('state') def _check_onboarding_of_enabled_provider_is_completed(self): """ Check that the acquirer cannot be set to 'enabled' if the onboarding is ongoing. This constraint is defined in the present module to allow the export of the translation string of the `ValidationError` should it be raised by modules that would fully implement Stripe Connect. :return: None :raise ValidationError: If the acquirer of a connected account is set in state 'enabled' while the onboarding is not finished. """ for acquirer in self: if acquirer.state == 'enabled' and acquirer._stripe_onboarding_is_ongoing(): raise ValidationError(_( "You cannot set the acquirer state to Enabled until your onboarding to Stripe " "is completed." )) def _stripe_onboarding_is_ongoing(self): """ Return whether the acquirer is linked to an ongoing onboarding to Stripe Connect. Note: This method serves as a hook for modules that would fully implement Stripe Connect. Note: self.ensure_one() :return: Whether the acquirer is linked to an ongoing onboarding to Stripe Connect :rtype: bool """ self.ensure_one() return False # === ACTION METHODS === # def action_stripe_connect_account(self, menu_id=None): """ Create a Stripe Connect account and redirect the user to the next onboarding step. If the acquirer is already enabled, close the current window. Otherwise, generate a Stripe Connect onboarding link and redirect the user to it. If provided, the menu id is included in the URL the user is redirected to when coming back on Odoo after the onboarding. If the link generation failed, redirect the user to the acquirer form. Note: This method serves as a hook for modules that would fully implement Stripe Connect. Note: self.ensure_one() :param int menu_id: The menu from which the user started the onboarding step, as an `ir.ui.menu` id. :return: The next step action :rtype: dict """ self.ensure_one() if self.state == 'enabled': self.company_id._mark_payment_onboarding_step_as_done() action = {'type': 'ir.actions.act_window_close'} else: # Account creation connected_account = self._stripe_fetch_or_create_connected_account() # Link generation menu_id = menu_id or self.env.ref('payment.payment_acquirer_menu').id account_link_url = self._stripe_create_account_link(connected_account['id'], menu_id) if account_link_url: action = { 'type': 'ir.actions.act_url', 'url': account_link_url, 'target': 'self', } else: action = { 'type': 'ir.actions.act_window', 'model': 'payment.acquirer', 'views': [[False, 'form']], 'res_id': self.id, } return action def action_stripe_create_webhook(self): """ Create a webhook and return a feedback notification. Note: This action only works for instances using a public URL :return: The feedback notification :rtype: dict """ self.ensure_one() if self.stripe_webhook_secret: message = _("Your Stripe Webhook is already set up.") notification_type = 'warning' elif not self.stripe_secret_key: message = _("You cannot create a Stripe Webhook if your Stripe Secret Key is not set.") notification_type = 'danger' else: webhook = self._stripe_make_request( 'webhook_endpoints', payload={ 'url': self._get_stripe_webhook_url(), 'enabled_events[]': const.WEBHOOK_HANDLED_EVENTS, 'api_version': const.API_VERSION, } ) self.stripe_webhook_secret = webhook.get('secret') message = _("You Stripe Webhook was successfully set up!") notification_type = 'info' return { 'type': 'ir.actions.client', 'tag': 'display_notification', 'params': { 'message': message, 'sticky': False, 'type': notification_type, 'next': {'type': 'ir.actions.act_window_close'}, # Refresh the form to show the key } } def _get_stripe_webhook_url(self): return url_join(self.get_base_url(), StripeController._webhook_url) # === BUSINESS METHODS - PAYMENT FLOW === # def _stripe_make_request( self, endpoint, payload=None, method='POST', offline=False, idempotency_key=None ): """ Make a request to Stripe API at the specified endpoint. Note: self.ensure_one() :param str endpoint: The endpoint to be reached by the request :param dict payload: The payload of the request :param str method: The HTTP method of the request :param bool offline: Whether the operation of the transaction being processed is 'offline' :param str idempotency_key: The idempotency key to pass in the request. :return The JSON-formatted content of the response :rtype: dict :raise: ValidationError if an HTTP error occurs """ self.ensure_one() url = url_join('https://api.stripe.com/v1/', endpoint) headers = { 'AUTHORIZATION': f'Bearer {stripe_utils.get_secret_key(self)}', 'Stripe-Version': const.API_VERSION, # SetupIntent requires a specific version. **self._get_stripe_extra_request_headers(), } if method == 'POST' and idempotency_key: headers['Idempotency-Key'] = idempotency_key try: response = requests.request(method, url, data=payload, headers=headers, timeout=60) # Stripe can send 4XX errors for payment failures (not only for badly-formed requests). # Check if an error code is present in the response content and raise only if not. # See https://stripe.com/docs/error-codes. # If the request originates from an offline operation, don't raise and return the resp. if not response.ok \ and not offline \ and 400 <= response.status_code < 500 \ and response.json().get('error'): # The 'code' entry is sometimes missing try: response.raise_for_status() except requests.exceptions.HTTPError: _logger.exception("invalid API request at %s with data %s", url, payload) error_msg = response.json().get('error', {}).get('message', '') raise ValidationError( "Stripe: " + _( "The communication with the API failed.\n" "Stripe gave us the following info about the problem:\n'%s'", error_msg ) ) except requests.exceptions.ConnectionError: _logger.exception("unable to reach endpoint at %s", url) raise ValidationError("Stripe: " + _("Could not establish the connection to the API.")) return response.json() def _get_stripe_extra_request_headers(self): """ Return the extra headers for the Stripe API request. Note: This method serves as a hook for modules that would fully implement Stripe Connect. :return: The extra request headers. :rtype: dict """ return {} def _get_default_payment_method_id(self): self.ensure_one() if self.provider != 'stripe': return super()._get_default_payment_method_id() return self.env.ref('payment_stripe.payment_method_stripe').id # === BUSINESS METHODS - STRIPE CONNECT ONBOARDING === # def _stripe_fetch_or_create_connected_account(self): """ Fetch the connected Stripe account and create one if not already done. Note: This method serves as a hook for modules that would fully implement Stripe Connect. :return: The connected account :rtype: dict """ return self._stripe_make_proxy_request( 'accounts', payload=self._stripe_prepare_connect_account_payload() ) def _stripe_prepare_connect_account_payload(self): """ Prepare the payload for the creation of a connected account in Stripe format. Note: This method serves as a hook for modules that would fully implement Stripe Connect. Note: self.ensure_one() :return: The Stripe-formatted payload for the creation request :rtype: dict """ self.ensure_one() return { 'type': 'standard', 'country': self.company_id.country_id.code, 'email': self.company_id.email, 'business_type': 'individual', 'company[address][city]': self.company_id.city or '', 'company[address][country]': self.company_id.country_id.code or '', 'company[address][line1]': self.company_id.street or '', 'company[address][line2]': self.company_id.street2 or '', 'company[address][postal_code]': self.company_id.zip or '', 'company[address][state]': self.company_id.state_id.name or '', 'company[name]': self.company_id.name, 'individual[address][city]': self.company_id.city or '', 'individual[address][country]': self.company_id.country_id.code or '', 'individual[address][line1]': self.company_id.street or '', 'individual[address][line2]': self.company_id.street2 or '', 'individual[address][postal_code]': self.company_id.zip or '', 'individual[address][state]': self.company_id.state_id.name or '', 'individual[email]': self.company_id.email or '', 'business_profile[name]': self.company_id.name, } def _stripe_create_account_link(self, connected_account_id, menu_id): """ Create an account link and return its URL. An account link url is the beginning URL of Stripe Onboarding. This URL is only valid once, and can only be used once. Note: self.ensure_one() :param str connected_account_id: The id of the connected account. :param int menu_id: The menu from which the user started the onboarding step, as an `ir.ui.menu` id :return: The account link URL :rtype: str """ self.ensure_one() base_url = self.company_id.get_base_url() return_url = OnboardingController._onboarding_return_url refresh_url = OnboardingController._onboarding_refresh_url return_params = dict(acquirer_id=self.id, menu_id=menu_id) refresh_params = dict(**return_params, account_id=connected_account_id) account_link = self._stripe_make_proxy_request('account_links', payload={ 'account': connected_account_id, 'return_url': f'{url_join(base_url, return_url)}?{url_encode(return_params)}', 'refresh_url': f'{url_join(base_url, refresh_url)}?{url_encode(refresh_params)}', 'type': 'account_onboarding', }) return account_link['url'] def _stripe_make_proxy_request(self, endpoint, payload=None, version=1): """ Make a request to the Stripe proxy at the specified endpoint. :param str endpoint: The proxy endpoint to be reached by the request :param dict payload: The payload of the request :param int version: The proxy version used :return The JSON-formatted content of the response :rtype: dict :raise: ValidationError if an HTTP error occurs """ proxy_payload = { 'jsonrpc': '2.0', 'id': uuid.uuid4().hex, 'method': 'call', 'params': { 'payload': payload, # Stripe data. 'proxy_data': self._stripe_prepare_proxy_data(stripe_payload=payload), }, } url = url_join(const.PROXY_URL, f'{version}/{endpoint}') try: response = requests.post(url=url, json=proxy_payload, timeout=60) response.raise_for_status() except requests.exceptions.ConnectionError: _logger.exception("unable to reach endpoint at %s", url) raise ValidationError(_("Stripe Proxy: Could not establish the connection.")) except requests.exceptions.HTTPError: _logger.exception("invalid API request at %s with data %s", url, payload) raise ValidationError( _("Stripe Proxy: An error occurred when communicating with the proxy.") ) # Stripe proxy endpoints always respond with HTTP 200 as they implement JSON-RPC 2.0 response_content = response.json() if response_content.get('error'): # An exception was raised on the proxy error_data = response_content['error']['data'] _logger.error("request forwarded with error: %s", error_data['message']) raise ValidationError(_("Stripe Proxy error: %(error)s", error=error_data['message'])) return response_content.get('result', {}) def _stripe_prepare_proxy_data(self, stripe_payload=None): """ Prepare the contextual data passed to the proxy when making a request. Note: This method serves as a hook for modules that would fully implement Stripe Connect. Note: self.ensure_one() :param dict stripe_payload: The part of the request payload to be forwarded to Stripe. :return: The proxy data. :rtype: dict """ self.ensure_one() return {}
43.757653
17,153
2,457
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. from werkzeug.urls import url_encode from odoo import http from odoo.http import request class OnboardingController(http.Controller): _onboarding_return_url = '/payment/stripe/onboarding/return' _onboarding_refresh_url = '/payment/stripe/onboarding/refresh' @http.route(_onboarding_return_url, type='http', methods=['GET'], auth='user') def stripe_return_from_onboarding(self, acquirer_id, menu_id): """ Redirect the user to the acquirer form of the onboarded Stripe account. The user is redirected to this route by Stripe after or during (if the user clicks on a dedicated button) the onboarding. :param str acquirer_id: The acquirer linked to the Stripe account being onboarded, as a `payment.acquirer` id :param str menu_id: The menu from which the user started the onboarding step, as an `ir.ui.menu` id """ stripe_acquirer = request.env['payment.acquirer'].browse(int(acquirer_id)) stripe_acquirer.company_id._mark_payment_onboarding_step_as_done() action = request.env.ref( 'payment_stripe.action_payment_acquirer_onboarding', raise_if_not_found=False ) or request.env.ref('payment.action_payment_acquirer') get_params_string = url_encode({'action': action.id, 'id': acquirer_id, 'menu_id': menu_id}) return request.redirect(f'/web?#{get_params_string}') @http.route(_onboarding_refresh_url, type='http', methods=['GET'], auth='user') def stripe_refresh_onboarding(self, acquirer_id, account_id, menu_id): """ Redirect the user to a new Stripe Connect onboarding link. The user is redirected to this route by Stripe if the onboarding link they used was expired. :param str acquirer_id: The acquirer linked to the Stripe account being onboarded, as a `payment.acquirer` id :param str account_id: The id of the connected account :param str menu_id: The menu from which the user started the onboarding step, as an `ir.ui.menu` id """ stripe_acquirer = request.env['payment.acquirer'].browse(int(acquirer_id)) account_link = stripe_acquirer._stripe_create_account_link(account_id, int(menu_id)) return request.redirect(account_link, local=False)
52.276596
2,457
8,264
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. import hashlib import hmac import json import logging import pprint from datetime import datetime from odoo import http from odoo.exceptions import ValidationError from odoo.http import request from odoo.tools import consteq from odoo.addons.payment_stripe import utils as stripe_utils _logger = logging.getLogger(__name__) class StripeController(http.Controller): _checkout_return_url = '/payment/stripe/checkout_return' _validation_return_url = '/payment/stripe/validation_return' _webhook_url = '/payment/stripe/webhook' WEBHOOK_AGE_TOLERANCE = 10*60 # seconds @http.route(_checkout_return_url, type='http', auth='public', csrf=False) def stripe_return_from_checkout(self, **data): """ Process the data returned by Stripe after redirection for checkout. :param dict data: The GET params appended to the URL in `_stripe_create_checkout_session` """ # Retrieve the tx and acquirer based on the tx reference included in the return url tx_sudo = request.env['payment.transaction'].sudo()._get_tx_from_feedback_data( 'stripe', data ) acquirer_sudo = tx_sudo.acquirer_id # Fetch the PaymentIntent, Charge and PaymentMethod objects from Stripe payment_intent = acquirer_sudo._stripe_make_request( f'payment_intents/{tx_sudo.stripe_payment_intent}', method='GET' ) _logger.info("received payment_intents response:\n%s", pprint.pformat(payment_intent)) self._include_payment_intent_in_feedback_data(payment_intent, data) # Handle the feedback data crafted with Stripe API objects request.env['payment.transaction'].sudo()._handle_feedback_data('stripe', data) # Redirect the user to the status page return request.redirect('/payment/status') @http.route(_validation_return_url, type='http', auth='public', csrf=False) def stripe_return_from_validation(self, **data): """ Process the data returned by Stripe after redirection for validation. :param dict data: The GET params appended to the URL in `_stripe_create_checkout_session` """ # Retrieve the acquirer based on the tx reference included in the return url acquirer_sudo = request.env['payment.transaction'].sudo()._get_tx_from_feedback_data( 'stripe', data ).acquirer_id # Fetch the Session, SetupIntent and PaymentMethod objects from Stripe checkout_session = acquirer_sudo._stripe_make_request( f'checkout/sessions/{data.get("checkout_session_id")}', payload={'expand[]': 'setup_intent.payment_method'}, # Expand all required objects method='GET' ) _logger.info("received checkout/session response:\n%s", pprint.pformat(checkout_session)) self._include_setup_intent_in_feedback_data(checkout_session.get('setup_intent', {}), data) # Handle the feedback data crafted with Stripe API objects request.env['payment.transaction'].sudo()._handle_feedback_data('stripe', data) # Redirect the user to the status page return request.redirect('/payment/status') @http.route(_webhook_url, type='json', auth='public') def stripe_webhook(self): """ Process the `checkout.session.completed` event sent by Stripe to the webhook. :return: An empty string to acknowledge the notification with an HTTP 200 response :rtype: str """ event = json.loads(request.httprequest.data) _logger.info("event received:\n%s", pprint.pformat(event)) try: if event['type'] == 'checkout.session.completed': checkout_session = event['data']['object'] # Check the source and integrity of the event data = {'reference': checkout_session['client_reference_id']} tx_sudo = request.env['payment.transaction'].sudo()._get_tx_from_feedback_data( 'stripe', data ) if self._verify_webhook_signature( stripe_utils.get_webhook_secret(tx_sudo.acquirer_id) ): # Fetch the PaymentIntent, Charge and PaymentMethod objects from Stripe if checkout_session.get('payment_intent'): # Can be None payment_intent = tx_sudo.acquirer_id._stripe_make_request( f'payment_intents/{tx_sudo.stripe_payment_intent}', method='GET' ) _logger.info( "received payment_intents response:\n%s", pprint.pformat(payment_intent) ) self._include_payment_intent_in_feedback_data(payment_intent, data) # Fetch the SetupIntent and PaymentMethod objects from Stripe if checkout_session.get('setup_intent'): # Can be None setup_intent = tx_sudo.acquirer_id._stripe_make_request( f'setup_intents/{checkout_session.get("setup_intent")}', payload={'expand[]': 'payment_method'}, method='GET' ) _logger.info( "received setup_intents response:\n%s", pprint.pformat(setup_intent) ) self._include_setup_intent_in_feedback_data(setup_intent, data) # Handle the feedback data crafted with Stripe API objects as a regular feedback request.env['payment.transaction'].sudo()._handle_feedback_data('stripe', data) except ValidationError: # Acknowledge the notification to avoid getting spammed _logger.exception("unable to handle the event data; skipping to acknowledge") return '' @staticmethod def _include_payment_intent_in_feedback_data(payment_intent, data): data.update({'payment_intent': payment_intent}) if payment_intent.get('charges', {}).get('total_count', 0) > 0: charge = payment_intent['charges']['data'][0] # Use the latest charge object data.update({ 'charge': charge, 'payment_method': charge.get('payment_method_details'), }) @staticmethod def _include_setup_intent_in_feedback_data(setup_intent, data): data.update({ 'setup_intent': setup_intent, 'payment_method': setup_intent.get('payment_method') }) def _verify_webhook_signature(self, webhook_secret): """ Check that the signature computed from the feedback matches the received one. See https://stripe.com/docs/webhooks/signatures#verify-manually. :param str webhook_secret: The secret webhook key of the acquirer handling the transaction :return: Whether the signatures match :rtype: str """ if not webhook_secret: _logger.warning("ignored webhook event due to undefined webhook secret") return False notification_payload = request.httprequest.data.decode('utf-8') signature_entries = request.httprequest.headers.get('Stripe-Signature').split(',') signature_data = {k: v for k, v in [entry.split('=') for entry in signature_entries]} # Check the timestamp of the event event_timestamp = int(signature_data['t']) if datetime.utcnow().timestamp() - event_timestamp > self.WEBHOOK_AGE_TOLERANCE: _logger.warning("ignored webhook event due to age tolerance: %s", event_timestamp) return False # Compare signatures received_signature = signature_data['v1'] signed_payload = f'{event_timestamp}.{notification_payload}' expected_signature = hmac.new( webhook_secret.encode('utf-8'), signed_payload.encode('utf-8'), hashlib.sha256 ).hexdigest() if not consteq(received_signature, expected_signature): _logger.warning("ignored event with invalid signature") return False return True
46.689266
8,264
5,917
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Web Editor', 'category': 'Hidden', 'description': """ Odoo Web Editor widget. ========================== """, 'depends': ['web'], 'data': [ 'security/ir.model.access.csv', 'data/editor_assets.xml', 'views/editor.xml', 'views/snippets.xml', ], 'assets': { #---------------------------------------------------------------------- # MAIN BUNDLES #---------------------------------------------------------------------- 'web.assets_qweb': [ 'web_editor/static/src/xml/*.xml', ], 'web_editor.assets_wysiwyg': [ # lib 'web_editor/static/lib/cropperjs/cropper.css', 'web_editor/static/lib/cropperjs/cropper.js', 'web_editor/static/lib/jquery-cropper/jquery-cropper.js', 'web_editor/static/lib/jQuery.transfo.js', 'web/static/lib/nearest/jquery.nearest.js', 'web_editor/static/lib/webgl-image-filter/webgl-image-filter.js', # odoo-editor 'web_editor/static/lib/odoo-editor/src/style.css', 'web_editor/static/lib/odoo-editor/src/OdooEditor.js', 'web_editor/static/lib/odoo-editor/src/utils/constants.js', 'web_editor/static/lib/odoo-editor/src/utils/sanitize.js', 'web_editor/static/lib/odoo-editor/src/utils/serialize.js', 'web_editor/static/lib/odoo-editor/src/utils/DOMPurify.js', 'web_editor/static/lib/odoo-editor/src/tablepicker/TablePicker.js', 'web_editor/static/lib/odoo-editor/src/powerbox/patienceDiff.js', 'web_editor/static/lib/odoo-editor/src/powerbox/Powerbox.js', 'web_editor/static/lib/odoo-editor/src/commands/align.js', 'web_editor/static/lib/odoo-editor/src/commands/commands.js', 'web_editor/static/lib/odoo-editor/src/commands/deleteBackward.js', 'web_editor/static/lib/odoo-editor/src/commands/deleteForward.js', 'web_editor/static/lib/odoo-editor/src/commands/enter.js', 'web_editor/static/lib/odoo-editor/src/commands/shiftEnter.js', 'web_editor/static/lib/odoo-editor/src/commands/shiftTab.js', 'web_editor/static/lib/odoo-editor/src/commands/tab.js', 'web_editor/static/lib/odoo-editor/src/commands/toggleList.js', # utils 'web_editor/static/src/js/wysiwyg/PeerToPeer.js', # odoo utils ('include', 'web._assets_helpers'), 'web_editor/static/src/scss/bootstrap_overridden.scss', 'web/static/lib/bootstrap/scss/_variables.scss', # integration 'web_editor/static/src/scss/wysiwyg.scss', 'web_editor/static/src/scss/wysiwyg_iframe.scss', 'web_editor/static/src/scss/wysiwyg_snippets.scss', 'web_editor/static/src/js/wysiwyg/fonts.js', 'web_editor/static/src/js/base.js', 'web_editor/static/src/js/editor/image_processing.js', 'web_editor/static/src/js/editor/custom_colors.js', # widgets & plugins 'web_editor/static/src/js/wysiwyg/widgets/**/*', 'web_editor/static/src/js/editor/snippets.editor.js', 'web_editor/static/src/js/editor/toolbar.js', 'web_editor/static/src/js/editor/snippets.options.js', # Launcher 'web_editor/static/src/js/wysiwyg/wysiwyg.js', 'web_editor/static/src/js/wysiwyg/wysiwyg_iframe.js', ], 'web.assets_common': [ 'web_editor/static/lib/odoo-editor/src/base_style.css', 'web_editor/static/lib/vkbeautify/**/*', 'web_editor/static/src/js/common/**/*', 'web_editor/static/lib/odoo-editor/src/utils/utils.js', ], 'web.assets_backend': [ 'web_editor/static/src/js/wysiwyg/linkDialogCommand.js', 'web_editor/static/src/scss/web_editor.common.scss', 'web_editor/static/src/scss/web_editor.backend.scss', 'web_editor/static/src/js/wysiwyg/dialog.js', 'web_editor/static/src/js/frontend/loader.js', 'web_editor/static/src/js/backend/**/*', ], 'web.assets_frontend_minimal': [ 'web_editor/static/src/js/frontend/loader_loading.js', ], 'web.assets_frontend': [ 'web_editor/static/src/scss/web_editor.common.scss', 'web_editor/static/src/scss/web_editor.frontend.scss', 'web_editor/static/src/js/wysiwyg/dialog.js', 'web_editor/static/src/js/frontend/loader.js', ], #---------------------------------------------------------------------- # SUB BUNDLES #---------------------------------------------------------------------- 'web._assets_primary_variables': [ 'web_editor/static/src/scss/web_editor.variables.scss', ], 'web._assets_secondary_variables': [ 'web_editor/static/src/scss/secondary_variables.scss', ], 'web._assets_backend_helpers': [ 'web_editor/static/src/scss/bootstrap_overridden_backend.scss', 'web_editor/static/src/scss/bootstrap_overridden.scss', ], 'web._assets_frontend_helpers': [ ('prepend', 'web_editor/static/src/scss/bootstrap_overridden.scss'), ], # ---------------------------------------------------------------------- # TESTS BUNDLES # ---------------------------------------------------------------------- 'web.qunit_suite_tests': [ ('include', 'web_editor.assets_wysiwyg'), 'web_editor/static/tests/**/*', ], }, 'auto_install': True, 'license': 'LGPL-3', }
41.669014
5,917
2,871
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo.tests import TransactionCase class TestViews(TransactionCase): def setUp(self): super().setUp() View = self.env['ir.ui.view'] self.first_view = View.create({ 'name': 'Test View 1', 'type': 'qweb', 'arch': '<div>Hello World</div>', 'key': 'web_editor.test_first_view', }) self.second_view = View.create({ 'name': 'Test View 2', 'type': 'qweb', 'arch': '<div><t t-call="web_editor.test_first_view"/></div>', 'key': 'web_editor.test_second_view', }) def test_infinite_inherit_loop(self): # Creates an infinite loop: A t-call B and A inherit from B View = self.env['ir.ui.view'] self.second_view.write({ 'inherit_id': self.first_view.id, }) # Test for RecursionError: maximum recursion depth exceeded in this function View._views_get(self.first_view) def test_oe_structure_as_inherited_view(self): View = self.env['ir.ui.view'] base = View.create({ 'name': 'Test View oe_structure', 'type': 'qweb', 'arch': """<xpath expr='//t[@t-call="web_editor.test_first_view"]' position='after'> <div class="oe_structure" id='oe_structure_test_view_oe_structure'/> </xpath>""", 'key': 'web_editor.oe_structure_view', 'inherit_id': self.second_view.id }) # check view mode self.assertEqual(base.mode, 'extension') # update content of the oe_structure value = '''<div class="oe_structure" id="oe_structure_test_view_oe_structure" data-oe-id="%s" data-oe-xpath="/div" data-oe-model="ir.ui.view" data-oe-field="arch"> <p>Hello World!</p> </div>''' % base.id base.save(value=value, xpath='/xpath/div') self.assertEqual(len(base.inherit_children_ids), 1) self.assertEqual(base.inherit_children_ids.mode, 'extension') self.assertIn( '<p>Hello World!</p>', base.inherit_children_ids.get_combined_arch(), ) def test_find_available_name(self): View = self.env['ir.ui.view'] used_names = ['Unrelated name'] initial_name = "Test name" name = View._find_available_name(initial_name, used_names) self.assertEqual(initial_name, name) used_names.append(name) name = View._find_available_name(initial_name, used_names) self.assertEqual('Test name (2)', name) used_names.append(name) name = View._find_available_name(initial_name, used_names) self.assertEqual('Test name (3)', name)
37.285714
2,871
6,894
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import textwrap from lxml import etree, html from lxml.builder import E from odoo.tests import common from odoo.tests.common import BaseCase from odoo.addons.web_editor.models.ir_qweb import html_to_text class TestHTMLToText(BaseCase): def test_rawstring(self): self.assertEqual( "foobar", html_to_text(E.div("foobar"))) def test_br(self): self.assertEqual( "foo\nbar", html_to_text(E.div("foo", E.br(), "bar"))) self.assertEqual( "foo\n\nbar\nbaz", html_to_text(E.div( "foo", E.br(), E.br(), "bar", E.br(), "baz"))) def test_p(self): self.assertEqual( "foo\n\nbar\n\nbaz", html_to_text(E.div( "foo", E.p("bar"), "baz"))) self.assertEqual( "foo", html_to_text(E.div(E.p("foo")))) self.assertEqual( "foo\n\nbar", html_to_text(E.div("foo", E.p("bar")))) self.assertEqual( "foo\n\nbar", html_to_text(E.div(E.p("foo"), "bar"))) self.assertEqual( "foo\n\nbar\n\nbaz", html_to_text(E.div( E.p("foo"), E.p("bar"), E.p("baz"), ))) def test_div(self): self.assertEqual( "foo\nbar\nbaz", html_to_text(E.div( "foo", E.div("bar"), "baz" ))) self.assertEqual( "foo", html_to_text(E.div(E.div("foo")))) self.assertEqual( "foo\nbar", html_to_text(E.div("foo", E.div("bar")))) self.assertEqual( "foo\nbar", html_to_text(E.div(E.div("foo"), "bar"))) self.assertEqual( "foo\nbar\nbaz", html_to_text(E.div( "foo", E.div("bar"), E.div("baz") ))) def test_other_block(self): self.assertEqual( "foo\nbar\nbaz", html_to_text(E.div( "foo", E.section("bar"), "baz" ))) def test_inline(self): self.assertEqual( "foobarbaz", html_to_text(E.div("foo", E.span("bar"), "baz"))) def test_whitespace(self): self.assertEqual( "foo bar\nbaz", html_to_text(E.div( "foo\nbar", E.br(), "baz") )) self.assertEqual( "foo bar\nbaz", html_to_text(E.div( E.div(E.span("foo"), " bar"), "baz"))) class TestConvertBack(common.TransactionCase): def setUp(self): super(TestConvertBack, self).setUp() self.env = self.env(context={'inherit_branding': True}) def field_rountrip_result(self, field, value, expected): model = 'web_editor.converter.test' record = self.env[model].create({field: value}) t = etree.Element('t') e = etree.Element('span') t.append(e) field_value = 'record.%s' % field e.set('t-field', field_value) rendered = self.env['ir.qweb']._render(t, {'record': record}) element = html.fromstring(rendered, parser=html.HTMLParser(encoding='utf-8')) model = 'ir.qweb.field.' + element.get('data-oe-type', '') converter = self.env[model] if model in self.env else self.env['ir.qweb.field'] value_back = converter.from_html(model, record._fields[field], element) if isinstance(expected, bytes): expected = expected.decode('utf-8') self.assertEqual(value_back, expected) def field_roundtrip(self, field, value): self.field_rountrip_result(field, value, value) def test_integer(self): self.field_roundtrip('integer', 42) self.field_roundtrip('integer', 42000) def test_float(self): self.field_roundtrip('float', 42.567890) self.field_roundtrip('float', 324542.567890) def test_numeric(self): self.field_roundtrip('numeric', 42.77) def test_char(self): self.field_roundtrip('char', "foo bar") self.field_roundtrip('char', "ⒸⓄⓇⒼⒺ") def test_selection_str(self): self.field_roundtrip('selection_str', 'B') def test_text(self): self.field_roundtrip('text', textwrap.dedent("""\ You must obey the dance commander Givin' out the order for fun You must obey the dance commander You know that he's the only one Who gives the orders here, Alright Who gives the orders here, Alright It would be awesome If we could dance-a It would be awesome, yeah Let's take the chance-a It would be awesome, yeah Let's start the show Because you never know You never know You never know until you go""")) def test_m2o(self): """ the M2O field conversion (from html) is markedly different from others as it directly writes into the m2o and returns nothing at all. """ field = 'many2one' subrec1 = self.env['web_editor.converter.test.sub'].create({'name': "Foo"}) subrec2 = self.env['web_editor.converter.test.sub'].create({'name': "Bar"}) record = self.env['web_editor.converter.test'].create({field: subrec1.id}) t = etree.Element('t') e = etree.Element('span') t.append(e) field_value = 'record.%s' % field e.set('t-field', field_value) rendered = self.env['ir.qweb']._render(t, {'record': record}) element = html.fromstring(rendered, parser=html.HTMLParser(encoding='utf-8')) # emulate edition element.set('data-oe-many2one-id', str(subrec2.id)) element.text = "New content" model = 'ir.qweb.field.' + element.get('data-oe-type') converter = self.env[model] if model in self.env else self.env['ir.qweb.field'] value_back = converter.from_html('web_editor.converter.test', record._fields[field], element) self.assertIsNone( value_back, "the m2o converter should return None to avoid spurious" " or useless writes on the parent record") self.assertEqual( subrec1.name, "Foo", "element edition can't change directly the m2o record" ) self.assertEqual( record.many2one.name, "Bar", "element edition should have been change the m2o id" )
30.732143
6,884
357
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import odoo.tests @odoo.tests.tagged("post_install", "-at_install") class TestOdooEditor(odoo.tests.HttpCase): def test_odoo_editor_suite(self): self.browser_js('/web_editor/static/lib/odoo-editor/test/editor-test.html', "", "", timeout=1800)
35.7
357
2,894
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import binascii from odoo.addons.http_routing.models.ir_http import slug import odoo.tests from odoo.tests.common import HttpCase @odoo.tests.tagged('-at_install', 'post_install') class TestController(HttpCase): def test_01_illustration_shape(self): self.authenticate('admin', 'admin') # SVG with all replaceable colors. svg = b""" <svg viewBox="0 0 400 400"> <rect width="300" height="300" style="fill:#3AADAA;" /> <rect x="20" y="20" width="300" height="300" style="fill:#7C6576;" /> <rect x="40" y="40" width="300" height="300" style="fill:#F6F6F6;" /> <rect x="60" y="60" width="300" height="300" style="fill:#FFFFFF;" /> <rect x="80" y="80" width="300" height="300" style="fill:#383E45;" /> </svg> """ # Need to bypass security check to write image with mimetype image/svg+xml context = {'binary_field_real_user': self.env['res.users'].sudo().browse([1])} attachment = self.env['ir.attachment'].sudo().with_context(context).create({ 'name': 'test.svg', 'mimetype': 'image/svg+xml', 'datas': binascii.b2a_base64(svg, newline=False), 'public': True, 'res_model': 'ir.ui.view', 'res_id': 0, }) # Shape illustration with slug. url = '/web_editor/shape/illustration/%s' % slug(attachment) palette = 'c1=%233AADAA&c2=%237C6576&&c3=%23F6F6F6&&c4=%23FFFFFF&&c5=%23383E45' attachment['url'] = '%s?%s' % (url, palette) response = self.url_open(url) self.assertEqual(200, response.status_code, 'Expect response') self.assertEqual(svg, response.content, 'Expect unchanged SVG') response = self.url_open(url + '?c1=%23ABCDEF') self.assertEqual(200, response.status_code, 'Expect response') self.assertEqual(len(svg), len(response.content), 'Expect same length as original') self.assertTrue('ABCDEF' in str(response.content), 'Expect patched c1') self.assertTrue('3AADAA' not in str(response.content), 'Old c1 should not be there anymore') # Shape illustration without slug. url = '/web_editor/shape/illustration/noslug' attachment['url'] = url response = self.url_open(url) self.assertEqual(200, response.status_code, 'Expect response') self.assertEqual(svg, response.content, 'Expect unchanged SVG') response = self.url_open(url + '?c1=%23ABCDEF') self.assertEqual(200, response.status_code, 'Expect response') self.assertEqual(len(svg), len(response.content), 'Expect same length as original') self.assertTrue('ABCDEF' in str(response.content), 'Expect patched c1') self.assertTrue('3AADAA' not in str(response.content), 'Old c1 should not be there anymore')
47.442623
2,894
2,052
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from lxml import etree from odoo import models, api from odoo.tools.translate import encode, xml_translate, html_translate def edit_translation_mapping(data): data = dict(data, model=data['name'].partition(',')[0], value=data['value'] or data['src']) return '<span data-oe-model="%(model)s" data-oe-translation-id="%(id)s" data-oe-translation-state="%(state)s">%(value)s</span>' % data class IrTranslation(models.Model): _inherit = 'ir.translation' @api.model def _get_terms_mapping(self, field, records): if self._context.get('edit_translations'): self.insert_missing(field, records) return edit_translation_mapping return super(IrTranslation, self)._get_terms_mapping(field, records) def save_html(self, value): """ Convert the HTML fragment ``value`` to XML if necessary, and write it as the value of translation ``self``. """ assert len(self) == 1 and self.type == 'model_terms' mname, fname = self.name.split(',') field = self.env[mname]._fields[fname] if field.translate == xml_translate: # wrap value inside a div and parse it as HTML div = "<div>%s</div>" % encode(value) root = etree.fromstring(div, etree.HTMLParser(encoding='utf-8')) # root is html > body > div # serialize div as XML and discard surrounding tags value = etree.tostring(root[0][0], encoding='utf-8')[5:-6] elif field.translate == html_translate: # wrap value inside a div and parse it as HTML div = "<div>%s</div>" % encode(value) root = etree.fromstring(div, etree.HTMLParser(encoding='utf-8')) # root is html > body > div # serialize div as HTML and discard surrounding tags value = etree.tostring(root[0][0], encoding='utf-8', method='html')[5:-6] return self.write({'value': value})
44.608696
2,052
951
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import models from odoo.http import request class IrHttp(models.AbstractModel): _inherit = 'ir.http' @classmethod def _dispatch(cls): context = dict(request.context) if 'editable' in request.httprequest.args and 'editable' not in context: context['editable'] = True if 'edit_translations' in request.httprequest.args and 'edit_translations' not in context: context['edit_translations'] = True if context.get('edit_translations') and 'translatable' not in context: context['translatable'] = True request.context = context return super(IrHttp, cls)._dispatch() @classmethod def _get_translation_frontend_modules_name(cls): mods = super(IrHttp, cls)._get_translation_frontend_modules_name() return mods + ['web_editor']
36.576923
951
2,892
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from werkzeug.urls import url_quote from odoo import api, models, fields, tools SUPPORTED_IMAGE_MIMETYPES = ['image/gif', 'image/jpe', 'image/jpeg', 'image/jpg', 'image/png', 'image/svg+xml'] SUPPORTED_IMAGE_EXTENSIONS = ['.gif', '.jpe', '.jpeg', '.jpg', '.png', '.svg'] class IrAttachment(models.Model): _inherit = "ir.attachment" local_url = fields.Char("Attachment URL", compute='_compute_local_url') image_src = fields.Char(compute='_compute_image_src') image_width = fields.Integer(compute='_compute_image_size') image_height = fields.Integer(compute='_compute_image_size') original_id = fields.Many2one('ir.attachment', string="Original (unoptimized, unresized) attachment", index=True) def _compute_local_url(self): for attachment in self: if attachment.url: attachment.local_url = attachment.url else: attachment.local_url = '/web/image/%s?unique=%s' % (attachment.id, attachment.checksum) @api.depends('mimetype', 'url', 'name') def _compute_image_src(self): for attachment in self: # Only add a src for supported images if attachment.mimetype not in SUPPORTED_IMAGE_MIMETYPES: attachment.image_src = False continue if attachment.type == 'url': attachment.image_src = attachment.url else: # Adding unique in URLs for cache-control unique = attachment.checksum[:8] if attachment.url: # For attachments-by-url, unique is used as a cachebuster. They # currently do not leverage max-age headers. separator = '&' if '?' in attachment.url else '?' attachment.image_src = '%s%sunique=%s' % (attachment.url, separator, unique) else: name = url_quote(attachment.name) attachment.image_src = '/web/image/%s-%s/%s' % (attachment.id, unique, name) @api.depends('datas') def _compute_image_size(self): for attachment in self: try: image = tools.base64_to_image(attachment.datas) attachment.image_width = image.width attachment.image_height = image.height except Exception: attachment.image_width = 0 attachment.image_height = 0 def _get_media_info(self): """Return a dict with the values that we need on the media dialog.""" self.ensure_one() return self._read_format(['id', 'name', 'description', 'mimetype', 'checksum', 'url', 'type', 'res_id', 'res_model', 'public', 'access_token', 'image_src', 'image_width', 'image_height', 'original_id'])[0]
44.492308
2,892
16,026
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import copy import logging import uuid from lxml import etree, html from odoo import api, models, _ from odoo.osv import expression from odoo.exceptions import AccessError, ValidationError _logger = logging.getLogger(__name__) EDITING_ATTRIBUTES = ['data-oe-model', 'data-oe-id', 'data-oe-field', 'data-oe-xpath', 'data-note-id'] class IrUiView(models.Model): _inherit = 'ir.ui.view' def _render(self, values=None, engine='ir.qweb', minimal_qcontext=False): if values and values.get('editable'): try: self.check_access_rights('write') self.check_access_rule('write') except AccessError: values['editable'] = False return super(IrUiView, self)._render(values=values, engine=engine, minimal_qcontext=minimal_qcontext) #------------------------------------------------------ # Save from html #------------------------------------------------------ @api.model def extract_embedded_fields(self, arch): return arch.xpath('//*[@data-oe-model != "ir.ui.view"]') @api.model def extract_oe_structures(self, arch): return arch.xpath('//*[hasclass("oe_structure")][contains(@id, "oe_structure")]') @api.model def get_default_lang_code(self): return False @api.model def save_embedded_field(self, el): Model = self.env[el.get('data-oe-model')] field = el.get('data-oe-field') model = 'ir.qweb.field.' + el.get('data-oe-type') converter = self.env[model] if model in self.env else self.env['ir.qweb.field'] try: value = converter.from_html(Model, Model._fields[field], el) except ValueError: raise ValidationError(_("Invalid field value for %s: %s", Model._fields[field].string, el.text_content().strip())) if value is not None: # TODO: batch writes? if not self.env.context.get('lang') and self.get_default_lang_code(): Model.browse(int(el.get('data-oe-id'))).with_context(lang=self.get_default_lang_code()).write({field: value}) else: Model.browse(int(el.get('data-oe-id'))).write({field: value}) def save_oe_structure(self, el): self.ensure_one() if el.get('id') in self.key: # Do not inherit if the oe_structure already has its own inheriting view return False arch = etree.Element('data') xpath = etree.Element('xpath', expr="//*[hasclass('oe_structure')][@id='{}']".format(el.get('id')), position="replace") arch.append(xpath) attributes = {k: v for k, v in el.attrib.items() if k not in EDITING_ATTRIBUTES} structure = etree.Element(el.tag, attrib=attributes) structure.text = el.text xpath.append(structure) for child in el.iterchildren(tag=etree.Element): structure.append(copy.deepcopy(child)) vals = { 'inherit_id': self.id, 'name': '%s (%s)' % (self.name, el.get('id')), 'arch': self._pretty_arch(arch), 'key': '%s_%s' % (self.key, el.get('id')), 'type': 'qweb', 'mode': 'extension', } vals.update(self._save_oe_structure_hook()) self.env['ir.ui.view'].create(vals) return True @api.model def _save_oe_structure_hook(self): return {} @api.model def _pretty_arch(self, arch): # remove_blank_string does not seem to work on HTMLParser, and # pretty-printing with lxml more or less requires stripping # whitespace: http://lxml.de/FAQ.html#why-doesn-t-the-pretty-print-option-reformat-my-xml-output # so serialize to XML, parse as XML (remove whitespace) then serialize # as XML (pretty print) arch_no_whitespace = etree.fromstring( etree.tostring(arch, encoding='utf-8'), parser=etree.XMLParser(encoding='utf-8', remove_blank_text=True)) return etree.tostring( arch_no_whitespace, encoding='unicode', pretty_print=True) @api.model def _are_archs_equal(self, arch1, arch2): # Note that comparing the strings would not be ok as attributes order # must not be relevant if arch1.tag != arch2.tag: return False if arch1.text != arch2.text: return False if arch1.tail != arch2.tail: return False if arch1.attrib != arch2.attrib: return False if len(arch1) != len(arch2): return False return all(self._are_archs_equal(arch1, arch2) for arch1, arch2 in zip(arch1, arch2)) @api.model def _get_allowed_root_attrs(self): return ['style', 'class'] def replace_arch_section(self, section_xpath, replacement, replace_tail=False): # the root of the arch section shouldn't actually be replaced as it's # not really editable itself, only the content truly is editable. self.ensure_one() arch = etree.fromstring(self.arch.encode('utf-8')) # => get the replacement root if not section_xpath: root = arch else: # ensure there's only one match [root] = arch.xpath(section_xpath) root.text = replacement.text # We need to replace some attrib for styles changes on the root element for attribute in self._get_allowed_root_attrs(): if attribute in replacement.attrib: root.attrib[attribute] = replacement.attrib[attribute] # Note: after a standard edition, the tail *must not* be replaced if replace_tail: root.tail = replacement.tail # replace all children del root[:] for child in replacement: root.append(copy.deepcopy(child)) return arch @api.model def to_field_ref(self, el): # filter out meta-information inserted in the document attributes = {k: v for k, v in el.attrib.items() if not k.startswith('data-oe-')} attributes['t-field'] = el.get('data-oe-expression') out = html.html_parser.makeelement(el.tag, attrib=attributes) out.tail = el.tail return out @api.model def to_empty_oe_structure(self, el): out = html.html_parser.makeelement(el.tag, attrib=el.attrib) out.tail = el.tail return out @api.model def _set_noupdate(self): self.sudo().mapped('model_data_id').write({'noupdate': True}) def save(self, value, xpath=None): """ Update a view section. The view section may embed fields to write Note that `self` record might not exist when saving an embed field :param str xpath: valid xpath to the tag to replace """ self.ensure_one() arch_section = html.fromstring( value, parser=html.HTMLParser(encoding='utf-8')) if xpath is None: # value is an embedded field on its own, not a view section self.save_embedded_field(arch_section) return for el in self.extract_embedded_fields(arch_section): self.save_embedded_field(el) # transform embedded field back to t-field el.getparent().replace(el, self.to_field_ref(el)) for el in self.extract_oe_structures(arch_section): if self.save_oe_structure(el): # empty oe_structure in parent view empty = self.to_empty_oe_structure(el) if el == arch_section: arch_section = empty else: el.getparent().replace(el, empty) new_arch = self.replace_arch_section(xpath, arch_section) old_arch = etree.fromstring(self.arch.encode('utf-8')) if not self._are_archs_equal(old_arch, new_arch): self._set_noupdate() self.write({'arch': self._pretty_arch(new_arch)}) @api.model def _view_get_inherited_children(self, view): if self._context.get('no_primary_children', False): original_hierarchy = self._context.get('__views_get_original_hierarchy', []) return view.inherit_children_ids.filtered(lambda extension: extension.mode != 'primary' or extension.id in original_hierarchy) return view.inherit_children_ids @api.model def _view_obj(self, view_id): if isinstance(view_id, str): return self.search([('key', '=', view_id)], limit=1) or self.env.ref(view_id) elif isinstance(view_id, int): return self.browse(view_id) # It can already be a view object when called by '_views_get()' that is calling '_view_obj' # for it's inherit_children_ids, passing them directly as object record. return view_id # Returns all views (called and inherited) related to a view # Used by translation mechanism, SEO and optional templates @api.model def _views_get(self, view_id, get_children=True, bundles=False, root=True, visited=None): """ For a given view ``view_id``, should return: * the view itself (starting from its top most parent) * all views inheriting from it, enabled or not - but not the optional children of a non-enabled child * all views called from it (via t-call) :returns recordset of ir.ui.view """ try: view = self._view_obj(view_id) except ValueError: _logger.warning("Could not find view object with view_id '%s'", view_id) return self.env['ir.ui.view'] if visited is None: visited = [] original_hierarchy = self._context.get('__views_get_original_hierarchy', []) while root and view.inherit_id: original_hierarchy.append(view.id) view = view.inherit_id views_to_return = view node = etree.fromstring(view.arch) xpath = "//t[@t-call]" if bundles: xpath += "| //t[@t-call-assets]" for child in node.xpath(xpath): try: called_view = self._view_obj(child.get('t-call', child.get('t-call-assets'))) except ValueError: continue if called_view and called_view not in views_to_return and called_view.id not in visited: views_to_return += self._views_get(called_view, get_children=get_children, bundles=bundles, visited=visited + views_to_return.ids) if not get_children: return views_to_return extensions = self._view_get_inherited_children(view) # Keep children in a deterministic order regardless of their applicability for extension in extensions.sorted(key=lambda v: v.id): # only return optional grandchildren if this child is enabled if extension.id not in visited: for ext_view in self._views_get(extension, get_children=extension.active, root=False, visited=visited + views_to_return.ids): if ext_view not in views_to_return: views_to_return += ext_view return views_to_return @api.model def get_related_views(self, key, bundles=False): """ Get inherit view's informations of the template ``key``. returns templates info (which can be active or not) ``bundles=True`` returns also the asset bundles """ user_groups = set(self.env.user.groups_id) View = self.with_context(active_test=False, lang=None) views = View._views_get(key, bundles=bundles) return views.filtered(lambda v: not v.groups_id or len(user_groups.intersection(v.groups_id))) # -------------------------------------------------------------------------- # Snippet saving # -------------------------------------------------------------------------- @api.model def _get_snippet_addition_view_key(self, template_key, key): return '%s.%s' % (template_key, key) @api.model def _snippet_save_view_values_hook(self): return {} def _find_available_name(self, name, used_names): attempt = 1 candidate_name = name while candidate_name in used_names: attempt += 1 candidate_name = f"{name} ({attempt})" return candidate_name @api.model def save_snippet(self, name, arch, template_key, snippet_key, thumbnail_url): """ Saves a new snippet arch so that it appears with the given name when using the given snippets template. :param name: the name of the snippet to save :param arch: the html structure of the snippet to save :param template_key: the key of the view regrouping all snippets in which the snippet to save is meant to appear :param snippet_key: the key (without module part) to identify the snippet from which the snippet to save originates :param thumbnail_url: the url of the thumbnail to use when displaying the snippet to save """ app_name = template_key.split('.')[0] snippet_key = '%s_%s' % (snippet_key, uuid.uuid4().hex) full_snippet_key = '%s.%s' % (app_name, snippet_key) # find available name current_website = self.env['website'].browse(self._context.get('website_id')) website_domain = current_website.website_domain() used_names = self.search(expression.AND([ [('name', '=like', '%s%%' % name)], website_domain ])).mapped('name') name = self._find_available_name(name, used_names) # html to xml to add '/' at the end of self closing tags like br, ... xml_arch = etree.tostring(html.fromstring(arch), encoding='utf-8') new_snippet_view_values = { 'name': name, 'key': full_snippet_key, 'type': 'qweb', 'arch': xml_arch, } new_snippet_view_values.update(self._snippet_save_view_values_hook()) self.create(new_snippet_view_values) custom_section = self.search([('key', '=', template_key)]) snippet_addition_view_values = { 'name': name + ' Block', 'key': self._get_snippet_addition_view_key(template_key, snippet_key), 'inherit_id': custom_section.id, 'type': 'qweb', 'arch': """ <data inherit_id="%s"> <xpath expr="//div[@id='snippet_custom']" position="attributes"> <attribute name="class" remove="d-none" separator=" "/> </xpath> <xpath expr="//div[@id='snippet_custom_body']" position="inside"> <t t-snippet="%s" t-thumbnail="%s"/> </xpath> </data> """ % (template_key, full_snippet_key, thumbnail_url), } snippet_addition_view_values.update(self._snippet_save_view_values_hook()) self.create(snippet_addition_view_values) @api.model def rename_snippet(self, name, view_id, template_key): snippet_view = self.browse(view_id) key = snippet_view.key.split('.')[1] custom_key = self._get_snippet_addition_view_key(template_key, key) snippet_addition_view = self.search([('key', '=', custom_key)]) if snippet_addition_view: snippet_addition_view.name = name + ' Block' snippet_view.name = name @api.model def delete_snippet(self, view_id, template_key): snippet_view = self.browse(view_id) key = snippet_view.key.split('.')[1] custom_key = self._get_snippet_addition_view_key(template_key, key) snippet_addition_view = self.search([('key', '=', custom_key)]) (snippet_addition_view | snippet_view).unlink()
40.367758
16,026
8,536
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import base64 import os import re import uuid from lxml import etree from odoo import models from odoo.tools import misc from odoo.addons.base.models.assetsbundle import EXTENSIONS _match_asset_file_url_regex = re.compile("^/(\w+)/(.+?)(\.custom\.(.+))?\.(\w+)$") class Assets(models.AbstractModel): _name = 'web_editor.assets' _description = 'Assets Utils' def get_all_custom_attachments(self, urls): """ Fetch all the ir.attachment records related to given URLs. Params: urls (str[]): list of urls Returns: ir.attachment(): attachment records related to the given URLs. """ return self._get_custom_attachment(urls, op='in') def get_asset_content(self, url, url_info=None, custom_attachments=None): """ Fetch the content of an asset (scss / js) file. That content is either the one of the related file on the disk or the one of the corresponding custom ir.attachment record. Params: url (str): the URL of the asset (scss / js) file/ir.attachment url_info (dict, optional): the related url info (see get_asset_info) (allows to optimize some code which already have the info and do not want this function to re-get it) custom_attachments (ir.attachment(), optional): the related custom ir.attachment records the function might need to search into (allows to optimize some code which already have that info and do not want this function to re-get it) Returns: utf-8 encoded content of the asset (scss / js) """ if url_info is None: url_info = self.get_asset_info(url) if url_info["customized"]: # If the file is already customized, the content is found in the # corresponding attachment attachment = None if custom_attachments is None: attachment = self._get_custom_attachment(url) else: attachment = custom_attachments.filtered(lambda r: r.url == url) return attachment and base64.b64decode(attachment.datas) or False # If the file is not yet customized, the content is found by reading # the local file with misc.file_open(url.strip('/'), 'rb', filter_ext=EXTENSIONS) as f: return f.read() def get_asset_info(self, url): """ Return information about an asset (scss / js) file/ir.attachment just by looking at its URL. Params: url (str): the url of the asset (scss / js) file/ir.attachment Returns: dict: module (str): the original asset's related app resource_path (str): the relative path to the original asset from the related app customized (bool): whether the asset is a customized one or not bundle (str): the name of the bundle the asset customizes (False if this is not a customized asset) """ m = _match_asset_file_url_regex.match(url) if not m: return False return { 'module': m.group(1), 'resource_path': "%s.%s" % (m.group(2), m.group(5)), 'customized': bool(m.group(3)), 'bundle': m.group(4) or False } def make_custom_asset_file_url(self, url, bundle_xmlid): """ Return the customized version of an asset URL, that is the URL the asset would have if it was customized. Params: url (str): the original asset's url bundle_xmlid (str): the name of the bundle the asset would customize Returns: str: the URL the given asset would have if it was customized in the given bundle """ parts = url.rsplit(".", 1) return "%s.custom.%s.%s" % (parts[0], bundle_xmlid, parts[1]) def reset_asset(self, url, bundle): """ Delete the potential customizations made to a given (original) asset. Params: url (str): the URL of the original asset (scss / js) file bundle (str): the name of the bundle in which the customizations to delete were made """ custom_url = self.make_custom_asset_file_url(url, bundle) # Simply delete the attachement which contains the modified scss/js file # and the xpath view which links it self._get_custom_attachment(custom_url).unlink() self._get_custom_asset(custom_url).unlink() def save_asset(self, url, bundle, content, file_type): """ Customize the content of a given asset (scss / js). Params: url (src): the URL of the original asset to customize (whether or not the asset was already customized) bundle (src): the name of the bundle in which the customizations will take effect content (src): the new content of the asset (scss / js) file_type (src): either 'scss' or 'js' according to the file being customized """ custom_url = self.make_custom_asset_file_url(url, bundle) datas = base64.b64encode((content or "\n").encode("utf-8")) # Check if the file to save had already been modified custom_attachment = self._get_custom_attachment(custom_url) if custom_attachment: # If it was already modified, simply override the corresponding # attachment content custom_attachment.write({"datas": datas}) else: # If not, create a new attachment to copy the original scss/js file # content, with its modifications new_attach = { 'name': url.split("/")[-1], 'type': "binary", 'mimetype': (file_type == 'js' and 'text/javascript' or 'text/scss'), 'datas': datas, 'url': custom_url, } new_attach.update(self._save_asset_hook()) self.env["ir.attachment"].create(new_attach) # Create an asset with the new attachment IrAsset = self.env['ir.asset'] new_asset = { 'path': custom_url, 'target': url, 'directive': 'replace', **self._save_asset_hook(), } target_asset = self._get_custom_asset(url) if target_asset: new_asset['name'] = target_asset.name + ' override' new_asset['bundle'] = target_asset.bundle new_asset['sequence'] = target_asset.sequence else: path_parts = '/'.join(os.path.split(custom_url)).split('/') new_asset['name'] = '%s: replace %s' % (bundle, path_parts[-1]) new_asset['bundle'] = IrAsset._get_related_bundle(url, bundle) IrAsset.create(new_asset) self.env["ir.qweb"].clear_caches() def _get_custom_attachment(self, custom_url, op='='): """ Fetch the ir.attachment record related to the given customized asset. Params: custom_url (str): the URL of the customized asset op (str, default: '='): the operator to use to search the records Returns: ir.attachment() """ assert op in ('in', '='), 'Invalid operator' return self.env["ir.attachment"].search([("url", op, custom_url)]) def _get_custom_asset(self, custom_url): """ Fetch the ir.asset record related to the given customized asset (the inheriting view which replace the original asset by the customized one). Params: custom_url (str): the URL of the customized asset Returns: ir.asset() """ url = custom_url[1:] if custom_url.startswith(('/', '\\')) else custom_url return self.env['ir.asset'].search([('path', 'like', url)]) def _save_asset_hook(self): """ Returns the additional values to use to write the DB on customized attachment and asset creation. Returns: dict """ return {}
36.169492
8,536
23,179
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. """ Web_editor-context rendering needs to add some metadata to rendered and allow to edit fields, as well as render a few fields differently. Also, adds methods to convert values back to Odoo models. """ import babel import base64 import io import itertools import json import logging import os import re import hashlib from datetime import datetime import pytz import requests from datetime import datetime from lxml import etree, html from PIL import Image as I from werkzeug import urls import odoo.modules from odoo import _, api, models, fields from odoo.tools import ustr, posix_to_ldml, pycompat from odoo.tools import html_escape as escape from odoo.tools.misc import get_lang, babel_locale_parse from odoo.addons.base.models import ir_qweb REMOTE_CONNECTION_TIMEOUT = 2.5 logger = logging.getLogger(__name__) class QWeb(models.AbstractModel): """ QWeb object for rendering editor stuff """ _inherit = 'ir.qweb' # compile directives def _compile_node(self, el, options, indent): snippet_key = options.get('snippet-key') if snippet_key == options['template'] \ or options.get('snippet-sub-call-key') == options['template']: # Get the path of element to only consider the first node of the # snippet template content (ignoring all ancestors t elements which # are not t-call ones) nb_real_elements_in_hierarchy = 0 node = el while node is not None and nb_real_elements_in_hierarchy < 2: if node.tag != 't' or 't-call' in node.attrib: nb_real_elements_in_hierarchy += 1 node = node.getparent() if nb_real_elements_in_hierarchy == 1: # The first node might be a call to a sub template sub_call = el.get('t-call') if sub_call: el.set('t-options', f"{{'snippet-key': '{snippet_key}', 'snippet-sub-call-key': '{sub_call}'}}") # If it already has a data-snippet it is a saved or an inherited snippet. # Do not override it. elif 'data-snippet' not in el.attrib: el.attrib['data-snippet'] = snippet_key.split('.', 1)[-1] return super()._compile_node(el, options, indent) def _compile_directive_snippet(self, el, options, indent): key = el.attrib.pop('t-snippet') el.set('t-call', key) el.set('t-options', "{'snippet-key': '" + key + "'}") View = self.env['ir.ui.view'].sudo() view_id = View.get_view_id(key) name = View.browse(view_id).name thumbnail = el.attrib.pop('t-thumbnail', "oe-thumbnail") # Forbid sanitize contains the specific reason: # - "true": always forbid # - "form": forbid if forms are sanitized forbid_sanitize = el.attrib.get('t-forbid-sanitize') div = '<div name="%s" data-oe-type="snippet" data-oe-thumbnail="%s" data-oe-snippet-id="%s" data-oe-keywords="%s" %s>' % ( escape(pycompat.to_text(name)), escape(pycompat.to_text(thumbnail)), escape(pycompat.to_text(view_id)), escape(pycompat.to_text(el.findtext('keywords'))), f'data-oe-forbid-sanitize="{forbid_sanitize}"' if forbid_sanitize else '', ) self._appendText(div, options) code = self._compile_node(el, options, indent) self._appendText('</div>', options) return code def _compile_directive_snippet_call(self, el, options, indent): key = el.attrib.pop('t-snippet-call') el.set('t-call', key) el.set('t-options', "{'snippet-key': '" + key + "'}") return self._compile_node(el, options, indent) def _compile_directive_install(self, el, options, indent): if self.user_has_groups('base.group_system'): module = self.env['ir.module.module'].search([('name', '=', el.attrib.get('t-install'))]) if not module or module.state == 'installed': return [] name = el.attrib.get('string') or 'Snippet' thumbnail = el.attrib.pop('t-thumbnail', 'oe-thumbnail') div = '<div name="%s" data-oe-type="snippet" data-module-id="%s" data-oe-thumbnail="%s"><section/></div>' % ( escape(pycompat.to_text(name)), module.id, escape(pycompat.to_text(thumbnail)) ) self._appendText(div, options) return [] def _compile_directive_tag(self, el, options, indent): if el.get('t-placeholder'): el.set('t-att-placeholder', el.attrib.pop('t-placeholder')) return super(QWeb, self)._compile_directive_tag(el, options, indent) # order and ignore def _directives_eval_order(self): directives = super(QWeb, self)._directives_eval_order() directives.insert(directives.index('call'), 'snippet') directives.insert(directives.index('call'), 'snippet-call') directives.insert(directives.index('call'), 'install') return directives #------------------------------------------------------ # QWeb fields #------------------------------------------------------ class Field(models.AbstractModel): _name = 'ir.qweb.field' _description = 'Qweb Field' _inherit = 'ir.qweb.field' @api.model def attributes(self, record, field_name, options, values): attrs = super(Field, self).attributes(record, field_name, options, values) field = record._fields[field_name] placeholder = options.get('placeholder') or getattr(field, 'placeholder', None) if placeholder: attrs['placeholder'] = placeholder if options['translate'] and field.type in ('char', 'text'): name = "%s,%s" % (record._name, field_name) domain = [('name', '=', name), ('res_id', '=', record.id), ('type', '=', 'model'), ('lang', '=', options.get('lang'))] translation = record.env['ir.translation'].search(domain, limit=1) attrs['data-oe-translation-state'] = translation and translation.state or 'to_translate' return attrs def value_from_string(self, value): return value @api.model def from_html(self, model, field, element): return self.value_from_string(element.text_content().strip()) class Integer(models.AbstractModel): _name = 'ir.qweb.field.integer' _description = 'Qweb Field Integer' _inherit = 'ir.qweb.field.integer' @api.model def from_html(self, model, field, element): lang = self.user_lang() value = element.text_content().strip() return int(value.replace(lang.thousands_sep or '', '')) class Float(models.AbstractModel): _name = 'ir.qweb.field.float' _description = 'Qweb Field Float' _inherit = 'ir.qweb.field.float' @api.model def from_html(self, model, field, element): lang = self.user_lang() value = element.text_content().strip() return float(value.replace(lang.thousands_sep or '', '') .replace(lang.decimal_point, '.')) class ManyToOne(models.AbstractModel): _name = 'ir.qweb.field.many2one' _description = 'Qweb Field Many to One' _inherit = 'ir.qweb.field.many2one' @api.model def attributes(self, record, field_name, options, values): attrs = super(ManyToOne, self).attributes(record, field_name, options, values) if options.get('inherit_branding'): many2one = getattr(record, field_name) if many2one: attrs['data-oe-many2one-id'] = many2one.id attrs['data-oe-many2one-model'] = many2one._name return attrs @api.model def from_html(self, model, field, element): Model = self.env[element.get('data-oe-model')] id = int(element.get('data-oe-id')) M2O = self.env[field.comodel_name] field_name = element.get('data-oe-field') many2one_id = int(element.get('data-oe-many2one-id')) record = many2one_id and M2O.browse(many2one_id) if record and record.exists(): # save the new id of the many2one Model.browse(id).write({field_name: many2one_id}) # not necessary, but might as well be explicit about it return None class Contact(models.AbstractModel): _name = 'ir.qweb.field.contact' _description = 'Qweb Field Contact' _inherit = 'ir.qweb.field.contact' @api.model def attributes(self, record, field_name, options, values): attrs = super(Contact, self).attributes(record, field_name, options, values) if options.get('inherit_branding'): options.pop('template_options') # remove options not specific to this widget attrs['data-oe-contact-options'] = json.dumps(options) return attrs # helper to call the rendering of contact field @api.model def get_record_to_html(self, ids, options=None): return self.value_to_html(self.env['res.partner'].search([('id', '=', ids[0])]), options=options) class Date(models.AbstractModel): _name = 'ir.qweb.field.date' _description = 'Qweb Field Date' _inherit = 'ir.qweb.field.date' @api.model def attributes(self, record, field_name, options, values): attrs = super(Date, self).attributes(record, field_name, options, values) if options.get('inherit_branding'): attrs['data-oe-original'] = record[field_name] if record._fields[field_name].type == 'datetime': attrs = self.env['ir.qweb.field.datetime'].attributes(record, field_name, options, values) attrs['data-oe-type'] = 'datetime' return attrs lg = self.env['res.lang']._lang_get(self.env.user.lang) or get_lang(self.env) locale = babel_locale_parse(lg.code) babel_format = value_format = posix_to_ldml(lg.date_format, locale=locale) if record[field_name]: date = fields.Date.from_string(record[field_name]) value_format = pycompat.to_text(babel.dates.format_date(date, format=babel_format, locale=locale)) attrs['data-oe-original-with-format'] = value_format return attrs @api.model def from_html(self, model, field, element): value = element.text_content().strip() if not value: return False lg = self.env['res.lang']._lang_get(self.env.user.lang) or get_lang(self.env) date = datetime.strptime(value, lg.date_format) return fields.Date.to_string(date) class DateTime(models.AbstractModel): _name = 'ir.qweb.field.datetime' _description = 'Qweb Field Datetime' _inherit = 'ir.qweb.field.datetime' @api.model def attributes(self, record, field_name, options, values): attrs = super(DateTime, self).attributes(record, field_name, options, values) if options.get('inherit_branding'): value = record[field_name] lg = self.env['res.lang']._lang_get(self.env.user.lang) or get_lang(self.env) locale = babel_locale_parse(lg.code) babel_format = value_format = posix_to_ldml('%s %s' % (lg.date_format, lg.time_format), locale=locale) tz = record.env.context.get('tz') or self.env.user.tz if isinstance(value, str): value = fields.Datetime.from_string(value) if value: # convert from UTC (server timezone) to user timezone value = fields.Datetime.context_timestamp(self.with_context(tz=tz), timestamp=value) value_format = pycompat.to_text(babel.dates.format_datetime(value, format=babel_format, locale=locale)) value = fields.Datetime.to_string(value) attrs['data-oe-original'] = value attrs['data-oe-original-with-format'] = value_format attrs['data-oe-original-tz'] = tz return attrs @api.model def from_html(self, model, field, element): value = element.text_content().strip() if not value: return False # parse from string to datetime lg = self.env['res.lang']._lang_get(self.env.user.lang) or get_lang(self.env) dt = datetime.strptime(value, '%s %s' % (lg.date_format, lg.time_format)) # convert back from user's timezone to UTC tz_name = element.attrib.get('data-oe-original-tz') or self.env.context.get('tz') or self.env.user.tz if tz_name: try: user_tz = pytz.timezone(tz_name) utc = pytz.utc dt = user_tz.localize(dt).astimezone(utc) except Exception: logger.warning( "Failed to convert the value for a field of the model" " %s back from the user's timezone (%s) to UTC", model, tz_name, exc_info=True) # format back to string return fields.Datetime.to_string(dt) class Text(models.AbstractModel): _name = 'ir.qweb.field.text' _description = 'Qweb Field Text' _inherit = 'ir.qweb.field.text' @api.model def from_html(self, model, field, element): return html_to_text(element) class Selection(models.AbstractModel): _name = 'ir.qweb.field.selection' _description = 'Qweb Field Selection' _inherit = 'ir.qweb.field.selection' @api.model def from_html(self, model, field, element): value = element.text_content().strip() selection = field.get_description(self.env)['selection'] for k, v in selection: if isinstance(v, str): v = ustr(v) if value == v: return k raise ValueError(u"No value found for label %s in selection %s" % ( value, selection)) class HTML(models.AbstractModel): _name = 'ir.qweb.field.html' _description = 'Qweb Field HTML' _inherit = 'ir.qweb.field.html' @api.model def attributes(self, record, field_name, options, values=None): attrs = super().attributes(record, field_name, options, values) if options.get('inherit_branding'): field = record._fields[field_name] if field.sanitize: attrs['data-oe-sanitize'] = 1 if field.sanitize_form else 'allow_form' return attrs @api.model def from_html(self, model, field, element): content = [] if element.text: content.append(element.text) content.extend(html.tostring(child, encoding='unicode') for child in element.iterchildren(tag=etree.Element)) return '\n'.join(content) class Image(models.AbstractModel): """ Widget options: ``class`` set as attribute on the generated <img> tag """ _name = 'ir.qweb.field.image' _description = 'Qweb Field Image' _inherit = 'ir.qweb.field.image' local_url_re = re.compile(r'^/(?P<module>[^]]+)/static/(?P<rest>.+)$') @api.model def from_html(self, model, field, element): if element.find('img') is None: return False url = element.find('img').get('src') url_object = urls.url_parse(url) if url_object.path.startswith('/web/image'): fragments = url_object.path.split('/') query = url_object.decode_query() url_id = fragments[3].split('-')[0] # ir.attachment image urls: /web/image/<id>[-<checksum>][/...] if url_id.isdigit(): model = 'ir.attachment' oid = url_id field = 'datas' # url of binary field on model: /web/image/<model>/<id>/<field>[/...] else: model = query.get('model', fragments[3]) oid = query.get('id', fragments[4]) field = query.get('field', fragments[5]) item = self.env[model].browse(int(oid)) return item[field] if self.local_url_re.match(url_object.path): return self.load_local_url(url) return self.load_remote_url(url) def load_local_url(self, url): match = self.local_url_re.match(urls.url_parse(url).path) rest = match.group('rest') for sep in os.sep, os.altsep: if sep and sep != '/': rest.replace(sep, '/') path = odoo.modules.get_module_resource( match.group('module'), 'static', *(rest.split('/'))) if not path: return None try: with open(path, 'rb') as f: # force complete image load to ensure it's valid image data image = I.open(f) image.load() f.seek(0) return base64.b64encode(f.read()) except Exception: logger.exception("Failed to load local image %r", url) return None def load_remote_url(self, url): try: # should probably remove remote URLs entirely: # * in fields, downloading them without blowing up the server is a # challenge # * in views, may trigger mixed content warnings if HTTPS CMS # linking to HTTP images # implement drag & drop image upload to mitigate? req = requests.get(url, timeout=REMOTE_CONNECTION_TIMEOUT) # PIL needs a seekable file-like image so wrap result in IO buffer image = I.open(io.BytesIO(req.content)) # force a complete load of the image data to validate it image.load() except Exception: logger.exception("Failed to load remote image %r", url) return None # don't use original data in case weird stuff was smuggled in, with # luck PIL will remove some of it? out = io.BytesIO() image.save(out, image.format) return base64.b64encode(out.getvalue()) class Monetary(models.AbstractModel): _name = 'ir.qweb.field.monetary' _inherit = 'ir.qweb.field.monetary' @api.model def from_html(self, model, field, element): lang = self.user_lang() value = element.find('span').text.strip() return float(value.replace(lang.thousands_sep or '', '') .replace(lang.decimal_point, '.')) class Duration(models.AbstractModel): _name = 'ir.qweb.field.duration' _description = 'Qweb Field Duration' _inherit = 'ir.qweb.field.duration' @api.model def attributes(self, record, field_name, options, values): attrs = super(Duration, self).attributes(record, field_name, options, values) if options.get('inherit_branding'): attrs['data-oe-original'] = record[field_name] return attrs @api.model def from_html(self, model, field, element): value = element.text_content().strip() # non-localized value return float(value) class RelativeDatetime(models.AbstractModel): _name = 'ir.qweb.field.relative' _description = 'Qweb Field Relative' _inherit = 'ir.qweb.field.relative' # get formatting from ir.qweb.field.relative but edition/save from datetime class QwebView(models.AbstractModel): _name = 'ir.qweb.field.qweb' _description = 'Qweb Field qweb' _inherit = 'ir.qweb.field.qweb' def html_to_text(element): """ Converts HTML content with HTML-specified line breaks (br, p, div, ...) in roughly equivalent textual content. Used to replace and fixup the roundtripping of text and m2o: when using libxml 2.8.0 (but not 2.9.1) and parsing HTML with lxml.html.fromstring whitespace text nodes (text nodes composed *solely* of whitespace) are stripped out with no recourse, and fundamentally relying on newlines being in the text (e.g. inserted during user edition) is probably poor form anyway. -> this utility function collapses whitespace sequences and replaces nodes by roughly corresponding linebreaks * p are pre-and post-fixed by 2 newlines * br are replaced by a single newline * block-level elements not already mentioned are pre- and post-fixed by a single newline ought be somewhat similar (but much less high-tech) to aaronsw's html2text. the latter produces full-blown markdown, our text -> html converter only replaces newlines by <br> elements at this point so we're reverting that, and a few more newline-ish elements in case the user tried to add newlines/paragraphs into the text field :param element: lxml.html content :returns: corresponding pure-text output """ # output is a list of str | int. Integers are padding requests (in minimum # number of newlines). When multiple padding requests, fold them into the # biggest one output = [] _wrap(element, output) # remove any leading or tailing whitespace, replace sequences of # (whitespace)\n(whitespace) by a single newline, where (whitespace) is a # non-newline whitespace in this case return re.sub( r'[ \t\r\f]*\n[ \t\r\f]*', '\n', ''.join(_realize_padding(output)).strip()) _PADDED_BLOCK = set('p h1 h2 h3 h4 h5 h6'.split()) # https://developer.mozilla.org/en-US/docs/HTML/Block-level_elements minus p _MISC_BLOCK = set(( 'address article aside audio blockquote canvas dd dl div figcaption figure' ' footer form header hgroup hr ol output pre section tfoot ul video' ).split()) def _collapse_whitespace(text): """ Collapses sequences of whitespace characters in ``text`` to a single space """ return re.sub('\s+', ' ', text) def _realize_padding(it): """ Fold and convert padding requests: integers in the output sequence are requests for at least n newlines of padding. Runs thereof can be collapsed into the largest requests and converted to newlines. """ padding = 0 for item in it: if isinstance(item, int): padding = max(padding, item) continue if padding: yield '\n' * padding padding = 0 yield item # leftover padding irrelevant as the output will be stripped def _wrap(element, output, wrapper=''): """ Recursively extracts text from ``element`` (via _element_to_text), and wraps it all in ``wrapper``. Extracted text is added to ``output`` :type wrapper: basestring | int """ output.append(wrapper) if element.text: output.append(_collapse_whitespace(element.text)) for child in element: _element_to_text(child, output) output.append(wrapper) def _element_to_text(e, output): if e.tag == 'br': output.append('\n') elif e.tag in _PADDED_BLOCK: _wrap(e, output, 2) elif e.tag in _MISC_BLOCK: _wrap(e, output, 1) else: # inline _wrap(e, output) if e.tail: output.append(_collapse_whitespace(e.tail))
36.387755
23,179
1,266
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import models, fields class ConverterTest(models.Model): _name = 'web_editor.converter.test' _description = 'Web Editor Converter Test' # disable translation export for those brilliant field labels and values _translate = False char = fields.Char() integer = fields.Integer() float = fields.Float() numeric = fields.Float(digits=(16, 2)) many2one = fields.Many2one('web_editor.converter.test.sub') binary = fields.Binary(attachment=False) date = fields.Date() datetime = fields.Datetime() selection_str = fields.Selection([ ('A', "Qu'il n'est pas arrivé à Toronto"), ('B', "Qu'il était supposé arriver à Toronto"), ('C', "Qu'est-ce qu'il fout ce maudit pancake, tabernacle ?"), ('D', "La réponse D"), ], string=u"Lorsqu'un pancake prend l'avion à destination de Toronto et " u"qu'il fait une escale technique à St Claude, on dit:") html = fields.Html() text = fields.Text() class ConverterTestSub(models.Model): _name = 'web_editor.converter.test.sub' _description = 'Web Editor Converter Subtest' name = fields.Char()
34
1,258
1,796
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import re from odoo.http import request from odoo.addons.bus.controllers.main import BusController from odoo.exceptions import AccessDenied class EditorCollaborationController(BusController): # --------------------------- # Extends BUS Controller Poll # --------------------------- def _poll(self, dbname, channels, last, options): if request.session.uid: # Do not alter original list. channels = list(channels) for channel in channels: if isinstance(channel, str): match = re.match(r'editor_collaboration:(\w+(?:\.\w+)*):(\w+):(\d+)', channel) if match: model_name = match[1] field_name = match[2] res_id = int(match[3]) # Verify access to the edition channel. if not request.env.user.has_group('base.group_user'): raise AccessDenied() document = request.env[model_name].browse([res_id]) document.check_access_rights('read') document.check_field_access_rights('read', [field_name]) document.check_access_rule('read') document.check_access_rights('write') document.check_field_access_rights('write', [field_name]) document.check_access_rule('write') channels.append((request.db, 'editor_collaboration', model_name, field_name, res_id)) return super(EditorCollaborationController, self)._poll(dbname, channels, last, options)
43.804878
1,796
34,412
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import io import json import logging import re import time import requests import werkzeug.urls import werkzeug.wrappers from PIL import Image, ImageFont, ImageDraw from lxml import etree from base64 import b64decode, b64encode from math import floor from odoo.http import request from odoo import http, tools, _, SUPERUSER_ID from odoo.addons.http_routing.models.ir_http import slug, unslug from odoo.exceptions import UserError, ValidationError from odoo.modules.module import get_resource_path from odoo.tools.mimetypes import guess_mimetype from odoo.tools.image import image_data_uri, base64_to_image from odoo.addons.base.models.assetsbundle import AssetsBundle from ..models.ir_attachment import SUPPORTED_IMAGE_EXTENSIONS, SUPPORTED_IMAGE_MIMETYPES logger = logging.getLogger(__name__) DEFAULT_LIBRARY_ENDPOINT = 'https://media-api.odoo.com' diverging_history_regex = 'data-last-history-steps="([0-9,]*?)"' def ensure_no_history_divergence(record, html_field_name, incoming_history_ids): server_history_matches = re.search(diverging_history_regex, record[html_field_name] or '') # Do not check old documents without data-last-history-steps. if server_history_matches: server_last_history_id = server_history_matches[1].split(',')[-1] if server_last_history_id not in incoming_history_ids: logger.warning('The document was already saved from someone with a different history for model %r, field %r with id %r.', record._name, html_field_name, record.id) raise ValidationError(_('The document was already saved from someone with a different history for model %r, field %r with id %r.', record._name, html_field_name, record.id)) def handle_history_divergence(record, html_field_name, vals): # Do not handle history divergence if the field is not in the values. if html_field_name not in vals: return incoming_html = vals[html_field_name] incoming_history_matches = re.search(diverging_history_regex, incoming_html) # When there is no incoming history id, it means that the value does not # comes from the odoo editor or the collaboration was not activated. In # project, it could come from the collaboration pad. In that case, we do not # handle history divergences. if incoming_history_matches is None: return incoming_history_ids = incoming_history_matches[1].split(',') incoming_last_history_id = incoming_history_ids[-1] if record[html_field_name]: ensure_no_history_divergence(record, html_field_name, incoming_history_ids) # Save only the latest id. vals[html_field_name] = incoming_html[0:incoming_history_matches.start(1)] + incoming_last_history_id + incoming_html[incoming_history_matches.end(1):] class Web_Editor(http.Controller): #------------------------------------------------------ # convert font into picture #------------------------------------------------------ @http.route([ '/web_editor/font_to_img/<icon>', '/web_editor/font_to_img/<icon>/<color>', '/web_editor/font_to_img/<icon>/<color>/<int:size>', '/web_editor/font_to_img/<icon>/<color>/<int:width>x<int:height>', '/web_editor/font_to_img/<icon>/<color>/<int:size>/<int:alpha>', '/web_editor/font_to_img/<icon>/<color>/<int:width>x<int:height>/<int:alpha>', '/web_editor/font_to_img/<icon>/<color>/<bg>', '/web_editor/font_to_img/<icon>/<color>/<bg>/<int:size>', '/web_editor/font_to_img/<icon>/<color>/<bg>/<int:width>x<int:height>', '/web_editor/font_to_img/<icon>/<color>/<bg>/<int:width>x<int:height>/<int:alpha>', ], type='http', auth="none") def export_icon_to_png(self, icon, color='#000', bg=None, size=100, alpha=255, font='/web/static/lib/fontawesome/fonts/fontawesome-webfont.ttf', width=None, height=None): """ This method converts an unicode character to an image (using Font Awesome font by default) and is used only for mass mailing because custom fonts are not supported in mail. :param icon : decimal encoding of unicode character :param color : RGB code of the color :param bg : RGB code of the background color :param size : Pixels in integer :param alpha : transparency of the image from 0 to 255 :param font : font path :param width : Pixels in integer :param height : Pixels in integer :returns PNG image converted from given font """ size = max(width, height, 1) if width else size width = width or size height = height or size # Make sure we have at least size=1 width = max(1, min(width, 512)) height = max(1, min(height, 512)) # Initialize font with tools.file_open(font.lstrip('/'), 'rb') as f: font_obj = ImageFont.truetype(f, size) # if received character is not a number, keep old behaviour (icon is character) icon = chr(int(icon)) if icon.isdigit() else icon # Background standardization if bg is not None and bg.startswith('rgba'): bg = bg.replace('rgba', 'rgb') bg = ','.join(bg.split(',')[:-1])+')' # Convert the opacity value compatible with PIL Image color (0 to 255) # when color specifier is 'rgba' if color is not None and color.startswith('rgba'): *rgb, a = color.strip(')').split(',') opacity = str(floor(float(a) * 255)) color = ','.join([*rgb, opacity]) + ')' # Determine the dimensions of the icon image = Image.new("RGBA", (width, height), color) draw = ImageDraw.Draw(image) boxw, boxh = draw.textsize(icon, font=font_obj) draw.text((0, 0), icon, font=font_obj) left, top, right, bottom = image.getbbox() # Create an alpha mask imagemask = Image.new("L", (boxw, boxh), 0) drawmask = ImageDraw.Draw(imagemask) drawmask.text((-left, -top), icon, font=font_obj, fill=255) # Create a solid color image and apply the mask if color.startswith('rgba'): color = color.replace('rgba', 'rgb') color = ','.join(color.split(',')[:-1])+')' iconimage = Image.new("RGBA", (boxw, boxh), color) iconimage.putalpha(imagemask) # Create output image outimage = Image.new("RGBA", (boxw, height), bg or (0, 0, 0, 0)) outimage.paste(iconimage, (left, top), iconimage) # output image output = io.BytesIO() outimage.save(output, format="PNG") response = werkzeug.wrappers.Response() response.mimetype = 'image/png' response.data = output.getvalue() response.headers['Cache-Control'] = 'public, max-age=604800' response.headers['Access-Control-Allow-Origin'] = '*' response.headers['Access-Control-Allow-Methods'] = 'GET, POST' response.headers['Connection'] = 'close' response.headers['Date'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime()) response.headers['Expires'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime(time.time()+604800*60)) return response #------------------------------------------------------ # Update a checklist in the editor on check/uncheck #------------------------------------------------------ @http.route('/web_editor/checklist', type='json', auth='user') def update_checklist(self, res_model, res_id, filename, checklistId, checked, **kwargs): record = request.env[res_model].browse(res_id) value = filename in record._fields and record[filename] htmlelem = etree.fromstring("<div>%s</div>" % value, etree.HTMLParser()) checked = bool(checked) li = htmlelem.find(".//li[@id='checklist-id-" + str(checklistId) + "']") if li is None: return value classname = li.get('class', '') if ('o_checked' in classname) != checked: if checked: classname = '%s o_checked' % classname else: classname = re.sub(r"\s?o_checked\s?", '', classname) li.set('class', classname) else: return value value = etree.tostring(htmlelem[0][0], encoding='utf-8', method='html')[5:-6].decode("utf-8") record.write({filename: value}) return value @http.route('/web_editor/attachment/add_data', type='json', auth='user', methods=['POST'], website=True) def add_data(self, name, data, is_image, quality=0, width=0, height=0, res_id=False, res_model='ir.ui.view', generate_access_token=False, **kwargs): if is_image: format_error_msg = _("Uploaded image's format is not supported. Try with: %s", ', '.join(SUPPORTED_IMAGE_EXTENSIONS)) try: data = tools.image_process(data, size=(width, height), quality=quality, verify_resolution=True) mimetype = guess_mimetype(b64decode(data)) if mimetype not in SUPPORTED_IMAGE_MIMETYPES: return {'error': format_error_msg} except UserError: # considered as an image by the browser file input, but not # recognized as such by PIL, eg .webp return {'error': format_error_msg} except ValueError as e: return {'error': e.args[0]} self._clean_context() attachment = self._attachment_create(name=name, data=data, res_id=res_id, res_model=res_model, generate_access_token=generate_access_token) return attachment._get_media_info() @http.route('/web_editor/attachment/add_url', type='json', auth='user', methods=['POST'], website=True) def add_url(self, url, res_id=False, res_model='ir.ui.view', **kwargs): self._clean_context() attachment = self._attachment_create(url=url, res_id=res_id, res_model=res_model) return attachment._get_media_info() @http.route('/web_editor/attachment/remove', type='json', auth='user', website=True) def remove(self, ids, **kwargs): """ Removes a web-based image attachment if it is used by no view (template) Returns a dict mapping attachments which would not be removed (if any) mapped to the views preventing their removal """ self._clean_context() Attachment = attachments_to_remove = request.env['ir.attachment'] Views = request.env['ir.ui.view'] # views blocking removal of the attachment removal_blocked_by = {} for attachment in Attachment.browse(ids): # in-document URLs are html-escaped, a straight search will not # find them url = tools.html_escape(attachment.local_url) views = Views.search([ "|", ('arch_db', 'like', '"%s"' % url), ('arch_db', 'like', "'%s'" % url) ]) if views: removal_blocked_by[attachment.id] = views.read(['name']) else: attachments_to_remove += attachment if attachments_to_remove: attachments_to_remove.unlink() return removal_blocked_by @http.route('/web_editor/get_image_info', type='json', auth='user', website=True) def get_image_info(self, src=''): """This route is used to determine the original of an attachment so that it can be used as a base to modify it again (crop/optimization/filters). """ attachment = None id_match = re.search('^/web/image/([^/?]+)', src) if id_match: url_segment = id_match.group(1) number_match = re.match('^(\d+)', url_segment) if '.' in url_segment: # xml-id attachment = request.env['ir.http']._xmlid_to_obj(request.env, url_segment) elif number_match: # numeric id attachment = request.env['ir.attachment'].browse(int(number_match.group(1))) else: # Find attachment by url. There can be multiple matches because of default # snippet images referencing the same image in /static/, so we limit to 1 attachment = request.env['ir.attachment'].search([ '|', ('url', '=like', src), ('url', '=like', '%s?%%' % src), ('mimetype', 'in', SUPPORTED_IMAGE_MIMETYPES), ], limit=1) if not attachment: return { 'attachment': False, 'original': False, } return { 'attachment': attachment.read(['id'])[0], 'original': (attachment.original_id or attachment).read(['id', 'image_src', 'mimetype'])[0], } def _attachment_create(self, name='', data=False, url=False, res_id=False, res_model='ir.ui.view', generate_access_token=False): """Create and return a new attachment.""" if name.lower().endswith('.bmp'): # Avoid mismatch between content type and mimetype, see commit msg name = name[:-4] if not name and url: name = url.split("/").pop() if res_model != 'ir.ui.view' and res_id: res_id = int(res_id) else: res_id = False attachment_data = { 'name': name, 'public': res_model == 'ir.ui.view', 'res_id': res_id, 'res_model': res_model, } if data: attachment_data['datas'] = data elif url: attachment_data.update({ 'type': 'url', 'url': url, }) else: raise UserError(_("You need to specify either data or url to create an attachment.")) attachment = request.env['ir.attachment'].create(attachment_data) if generate_access_token: attachment.generate_access_token() return attachment def _clean_context(self): # avoid allowed_company_ids which may erroneously restrict based on website context = dict(request.context) context.pop('allowed_company_ids', None) request.context = context @http.route("/web_editor/get_assets_editor_resources", type="json", auth="user", website=True) def get_assets_editor_resources(self, key, get_views=True, get_scss=True, get_js=True, bundles=False, bundles_restriction=[], only_user_custom_files=True): """ Transmit the resources the assets editor needs to work. Params: key (str): the key of the view the resources are related to get_views (bool, default=True): True if the views must be fetched get_scss (bool, default=True): True if the style must be fetched get_js (bool, default=True): True if the javascript must be fetched bundles (bool, default=False): True if the bundles views must be fetched bundles_restriction (list, default=[]): Names of the bundles in which to look for scss files (if empty, search in all of them) only_user_custom_files (bool, default=True): True if only user custom files must be fetched Returns: dict: views, scss, js """ # Related views must be fetched if the user wants the views and/or the style views = request.env["ir.ui.view"].with_context(no_primary_children=True, __views_get_original_hierarchy=[]).get_related_views(key, bundles=bundles) views = views.read(['name', 'id', 'key', 'xml_id', 'arch', 'active', 'inherit_id']) scss_files_data_by_bundle = [] js_files_data_by_bundle = [] if get_scss: scss_files_data_by_bundle = self._load_resources('scss', views, bundles_restriction, only_user_custom_files) if get_js: js_files_data_by_bundle = self._load_resources('js', views, bundles_restriction, only_user_custom_files) return { 'views': get_views and views or [], 'scss': get_scss and scss_files_data_by_bundle or [], 'js': get_js and js_files_data_by_bundle or [], } def _load_resources(self, file_type, views, bundles_restriction, only_user_custom_files): AssetsUtils = request.env['web_editor.assets'] files_data_by_bundle = [] resources_type_info = {'t_call_assets_attribute': 't-js', 'mimetype': 'text/javascript'} if file_type == 'scss': resources_type_info = {'t_call_assets_attribute': 't-css', 'mimetype': 'text/scss'} # Compile regex outside of the loop # This will used to exclude library scss files from the result excluded_url_matcher = re.compile("^(.+/lib/.+)|(.+import_bootstrap.+\.scss)$") # First check the t-call-assets used in the related views url_infos = dict() for v in views: for asset_call_node in etree.fromstring(v["arch"]).xpath("//t[@t-call-assets]"): attr = asset_call_node.get(resources_type_info['t_call_assets_attribute']) if attr and not json.loads(attr.lower()): continue asset_name = asset_call_node.get("t-call-assets") # Loop through bundle files to search for file info files_data = [] for file_info in request.env["ir.qweb"]._get_asset_content(asset_name)[0]: if file_info["atype"] != resources_type_info['mimetype']: continue url = file_info["url"] # Exclude library files (see regex above) if excluded_url_matcher.match(url): continue # Check if the file is customized and get bundle/path info file_data = AssetsUtils.get_asset_info(url) if not file_data: continue # Save info according to the filter (arch will be fetched later) url_infos[url] = file_data if '/user_custom_' in url \ or file_data['customized'] \ or file_type == 'scss' and not only_user_custom_files: files_data.append(url) # scss data is returned sorted by bundle, with the bundles # names and xmlids if len(files_data): files_data_by_bundle.append([asset_name, files_data]) # Filter bundles/files: # - A file which appears in multiple bundles only appears in the # first one (the first in the DOM) # - Only keep bundles with files which appears in the asked bundles # and only keep those files for i in range(0, len(files_data_by_bundle)): bundle_1 = files_data_by_bundle[i] for j in range(0, len(files_data_by_bundle)): bundle_2 = files_data_by_bundle[j] # In unwanted bundles, keep only the files which are in wanted bundles too (web._helpers) if bundle_1[0] not in bundles_restriction and bundle_2[0] in bundles_restriction: bundle_1[1] = [item_1 for item_1 in bundle_1[1] if item_1 in bundle_2[1]] for i in range(0, len(files_data_by_bundle)): bundle_1 = files_data_by_bundle[i] for j in range(i + 1, len(files_data_by_bundle)): bundle_2 = files_data_by_bundle[j] # In every bundle, keep only the files which were not found # in previous bundles bundle_2[1] = [item_2 for item_2 in bundle_2[1] if item_2 not in bundle_1[1]] # Only keep bundles which still have files and that were requested files_data_by_bundle = [ data for data in files_data_by_bundle if (len(data[1]) > 0 and (not bundles_restriction or data[0] in bundles_restriction)) ] # Fetch the arch of each kept file, in each bundle urls = [] for bundle_data in files_data_by_bundle: urls += bundle_data[1] custom_attachments = AssetsUtils.get_all_custom_attachments(urls) for bundle_data in files_data_by_bundle: for i in range(0, len(bundle_data[1])): url = bundle_data[1][i] url_info = url_infos[url] content = AssetsUtils.get_asset_content(url, url_info, custom_attachments) bundle_data[1][i] = { 'url': "/%s/%s" % (url_info["module"], url_info["resource_path"]), 'arch': content, 'customized': url_info["customized"], } return files_data_by_bundle @http.route("/web_editor/save_asset", type="json", auth="user", website=True) def save_asset(self, url, bundle, content, file_type): """ Save a given modification of a scss/js file. Params: url (str): the original url of the scss/js file which has to be modified bundle (str): the name of the bundle in which the scss/js file addition can be found content (str): the new content of the scss/js file file_type (str): 'scss' or 'js' """ request.env['web_editor.assets'].save_asset(url, bundle, content, file_type) @http.route("/web_editor/reset_asset", type="json", auth="user", website=True) def reset_asset(self, url, bundle): """ The reset_asset route is in charge of reverting all the changes that were done to a scss/js file. Params: url (str): the original URL of the scss/js file to reset bundle (str): the name of the bundle in which the scss/js file addition can be found """ request.env['web_editor.assets'].reset_asset(url, bundle) @http.route("/web_editor/public_render_template", type="json", auth="public", website=True) def public_render_template(self, args): # args[0]: xml id of the template to render # args[1]: optional dict of rendering values, only trusted keys are supported len_args = len(args) assert len_args >= 1 and len_args <= 2, 'Need a xmlID and potential rendering values to render a template' trusted_value_keys = ('debug',) xmlid = args[0] values = len_args > 1 and args[1] or {} View = request.env['ir.ui.view'] return View.render_public_asset(xmlid, {k: values[k] for k in values if k in trusted_value_keys}) @http.route('/web_editor/modify_image/<model("ir.attachment"):attachment>', type="json", auth="user", website=True) def modify_image(self, attachment, res_model=None, res_id=None, name=None, data=None, original_id=None, mimetype=None): """ Creates a modified copy of an attachment and returns its image_src to be inserted into the DOM. """ fields = { 'original_id': attachment.id, 'datas': data, 'type': 'binary', 'res_model': res_model or 'ir.ui.view', 'mimetype': mimetype or attachment.mimetype, } if fields['res_model'] == 'ir.ui.view': fields['res_id'] = 0 elif res_id: fields['res_id'] = res_id if name: fields['name'] = name attachment = attachment.copy(fields) if attachment.url: # Don't keep url if modifying static attachment because static images # are only served from disk and don't fallback to attachments. if re.match(r'^/\w+/static/', attachment.url): attachment.url = None # Uniquify url by adding a path segment with the id before the name. # This allows us to keep the unsplash url format so it still reacts # to the unsplash beacon. else: url_fragments = attachment.url.split('/') url_fragments.insert(-1, str(attachment.id)) attachment.url = '/'.join(url_fragments) if attachment.public: return attachment.image_src attachment.generate_access_token() return '%s?access_token=%s' % (attachment.image_src, attachment.access_token) def _get_shape_svg(self, module, *segments): shape_path = get_resource_path(module, 'static', *segments) if not shape_path: raise werkzeug.exceptions.NotFound() with tools.file_open(shape_path, 'r', filter_ext=('.svg',)) as file: return file.read() def _update_svg_colors(self, options, svg): user_colors = [] svg_options = {} default_palette = { '1': '#3AADAA', '2': '#7C6576', '3': '#F6F6F6', '4': '#FFFFFF', '5': '#383E45', } bundle_css = None regex_hex = r'#[0-9A-F]{6,8}' regex_rgba = r'rgba?\(\d{1,3}, ?\d{1,3}, ?\d{1,3}(?:, ?[0-9.]{1,4})?\)' for key, value in options.items(): colorMatch = re.match('^c([1-5])$', key) if colorMatch: css_color_value = value # Check that color is hex or rgb(a) to prevent arbitrary injection if not re.match(r'(?i)^%s$|^%s$' % (regex_hex, regex_rgba), css_color_value.replace(' ', '')): if re.match('^o-color-([1-5])$', css_color_value): if not bundle_css: bundle = 'web.assets_frontend' files, _ = request.env["ir.qweb"]._get_asset_content(bundle) asset = AssetsBundle(bundle, files) bundle_css = asset.css().index_content color_search = re.search(r'(?i)--%s:\s+(%s|%s)' % (css_color_value, regex_hex, regex_rgba), bundle_css) if not color_search: raise werkzeug.exceptions.BadRequest() css_color_value = color_search.group(1) else: raise werkzeug.exceptions.BadRequest() user_colors.append([tools.html_escape(css_color_value), colorMatch.group(1)]) else: svg_options[key] = value color_mapping = {default_palette[palette_number]: color for color, palette_number in user_colors} # create a case-insensitive regex to match all the colors to replace, eg: '(?i)(#3AADAA)|(#7C6576)' regex = '(?i)%s' % '|'.join('(%s)' % color for color in color_mapping.keys()) def subber(match): key = match.group().upper() return color_mapping[key] if key in color_mapping else key return re.sub(regex, subber, svg), svg_options @http.route(['/web_editor/shape/<module>/<path:filename>'], type='http', auth="public", website=True) def shape(self, module, filename, **kwargs): """ Returns a color-customized svg (background shape or illustration). """ svg = None if module == 'illustration': attachment = request.env['ir.attachment'].sudo().browse(unslug(filename)[1]) if (not attachment.exists() or attachment.type != 'binary' or not attachment.public or not attachment.url.startswith(request.httprequest.path)): # Fallback to URL lookup to allow using shapes that were # imported from data files. attachment = request.env['ir.attachment'].sudo().search([ ('type', '=', 'binary'), ('public', '=', True), ('url', '=', request.httprequest.path), ], limit=1) if not attachment: raise werkzeug.exceptions.NotFound() svg = b64decode(attachment.datas).decode('utf-8') else: svg = self._get_shape_svg(module, 'shapes', filename) svg, options = self._update_svg_colors(kwargs, svg) flip_value = options.get('flip', False) if flip_value == 'x': svg = svg.replace('<svg ', '<svg style="transform: scaleX(-1);" ', 1) elif flip_value == 'y': svg = svg.replace('<svg ', '<svg style="transform: scaleY(-1)" ', 1) elif flip_value == 'xy': svg = svg.replace('<svg ', '<svg style="transform: scale(-1)" ', 1) return request.make_response(svg, [ ('Content-type', 'image/svg+xml'), ('Cache-control', 'max-age=%s' % http.STATIC_CACHE_LONG), ]) @http.route(['/web_editor/image_shape/<string:img_key>/<module>/<path:filename>'], type='http', auth="public", website=True) def image_shape(self, module, filename, img_key, **kwargs): svg = self._get_shape_svg(module, 'image_shapes', filename) _, _, image_base64 = request.env['ir.http'].binary_content( xmlid=img_key, model='ir.attachment', field='datas', default_mimetype='image/png') if not image_base64: image_base64 = b64encode(request.env['ir.http']._placeholder()) image = base64_to_image(image_base64) width, height = tuple(str(size) for size in image.size) root = etree.fromstring(svg) root.attrib.update({'width': width, 'height': height}) # Update default color palette on shape SVG. svg, _ = self._update_svg_colors(kwargs, etree.tostring(root, pretty_print=True).decode('utf-8')) # Add image in base64 inside the shape. uri = image_data_uri(image_base64) svg = svg.replace('<image xlink:href="', '<image xlink:href="%s' % uri) return request.make_response(svg, [ ('Content-type', 'image/svg+xml'), ('Cache-control', 'max-age=%s' % http.STATIC_CACHE_LONG), ]) @http.route(['/web_editor/media_library_search'], type='json', auth="user", website=True) def media_library_search(self, **params): ICP = request.env['ir.config_parameter'].sudo() endpoint = ICP.get_param('web_editor.media_library_endpoint', DEFAULT_LIBRARY_ENDPOINT) params['dbuuid'] = ICP.get_param('database.uuid') response = requests.post('%s/media-library/1/search' % endpoint, data=params) if response.status_code == requests.codes.ok and response.headers['content-type'] == 'application/json': return response.json() else: return {'error': response.status_code} @http.route('/web_editor/save_library_media', type='json', auth='user', methods=['POST']) def save_library_media(self, media): """ Saves images from the media library as new attachments, making them dynamic SVGs if needed. media = { <media_id>: { 'query': 'space separated search terms', 'is_dynamic_svg': True/False, 'dynamic_colors': maps color names to their color, }, ... } """ attachments = [] ICP = request.env['ir.config_parameter'].sudo() library_endpoint = ICP.get_param('web_editor.media_library_endpoint', DEFAULT_LIBRARY_ENDPOINT) media_ids = ','.join(media.keys()) params = { 'dbuuid': ICP.get_param('database.uuid'), 'media_ids': media_ids, } response = requests.post('%s/media-library/1/download_urls' % library_endpoint, data=params) if response.status_code != requests.codes.ok: raise Exception(_("ERROR: couldn't get download urls from media library.")) for id, url in response.json().items(): req = requests.get(url) name = '_'.join([media[id]['query'], url.split('/')[-1]]) # Need to bypass security check to write image with mimetype image/svg+xml # ok because svgs come from whitelisted origin context = {'binary_field_real_user': request.env['res.users'].sudo().browse([SUPERUSER_ID])} attachment = request.env['ir.attachment'].sudo().with_context(context).create({ 'name': name, 'mimetype': req.headers['content-type'], 'datas': b64encode(req.content), 'public': True, 'res_model': 'ir.ui.view', 'res_id': 0, }) if media[id]['is_dynamic_svg']: colorParams = werkzeug.urls.url_encode(media[id]['dynamic_colors']) attachment['url'] = '/web_editor/shape/illustration/%s?%s' % (slug(attachment), colorParams) attachments.append(attachment._get_media_info()) return attachments @http.route("/web_editor/get_ice_servers", type='json', auth="user") def get_ice_servers(self): return request.env['mail.ice.server']._get_ice_servers() @http.route("/web_editor/bus_broadcast", type="json", auth="user") def bus_broadcast(self, model_name, field_name, res_id, bus_data): document = request.env[model_name].browse([res_id]) document.check_access_rights('read') document.check_field_access_rights('read', [field_name]) document.check_access_rule('read') document.check_access_rights('write') document.check_field_access_rights('write', [field_name]) document.check_access_rule('write') channel = (request.db, 'editor_collaboration', model_name, field_name, int(res_id)) bus_data.update({'model_name': model_name, 'field_name': field_name, 'res_id': res_id}) request.env['bus.bus']._sendone(channel, 'editor_collaboration', bus_data) @http.route("/web_editor/ensure_common_history", type="json", auth="user") def ensure_common_history(self, model_name, field_name, res_id, history_ids): record = request.env[model_name].browse([res_id]) try: ensure_no_history_divergence(record, field_name, history_ids) except ValidationError: return record[field_name]
46.128686
34,412
1,416
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': "Partner Autocomplete", 'summary': "Auto-complete partner companies' data", 'version': '1.1', 'description': """ Auto-complete partner companies' data """, 'author': "Odoo SA", 'category': 'Hidden/Tools', 'depends': [ 'iap_mail', ], 'data': [ 'security/ir.model.access.csv', 'views/res_partner_views.xml', 'views/res_company_views.xml', 'views/res_config_settings_views.xml', 'data/cron.xml', ], 'auto_install': True, 'assets': { 'web.assets_backend': [ 'partner_autocomplete/static/src/scss/partner_autocomplete.scss', 'partner_autocomplete/static/src/js/partner_autocomplete_core.js', 'partner_autocomplete/static/src/js/partner_autocomplete_fieldchar.js', 'partner_autocomplete/static/src/js/partner_autocomplete_many2one.js', 'partner_autocomplete/static/src/js/web_company_autocomplete.js', ], 'web.tests_assets': [ 'partner_autocomplete/static/lib/**/*', ], 'web.qunit_suite_tests': [ 'partner_autocomplete/static/tests/**/*', ], 'web.assets_qweb': [ 'partner_autocomplete/static/src/xml/**/*', ], }, 'license': 'LGPL-3', }
32.181818
1,416
5,419
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from contextlib import contextmanager from unittest.mock import patch from odoo import exceptions from odoo.addons.iap.tools import iap_tools from odoo.addons.partner_autocomplete.models.iap_autocomplete_api import IapAutocompleteEnrichAPI from odoo.tests import common class MockIAPPartnerAutocomplete(common.BaseCase): """ Mock PartnerAutocomplete IAP calls for testing purpose. Example of company_data { 'partner_gid': 51580, 'website': 'mywebsite.be', 'additional_info': { "name": "Mywebsite", "description": "Mywebsite is the largest of Belgium\'s custom companies and part of Mywebsite Group.", "facebook": "mywebsitebe", "twitter": "mywebsite", "linkedin": "company/mywebsite", "twitter_followers": 99999, "twitter_bio": "This is the official Twitter account of MyWebsite.", "industry_group": "Technology Hardware & Equipment", "sub_industry": "Computer Networking", "industry": "Communications Equipment", "sector": ["Information Technology", "Technology Hardware & Equipment"], "sector_primary": "Information Technology" "tech": ["Tealium", "Hotjar", "Google Analytics", "Instagram", "Facebook Advertiser", "Facebook Connect", "Google Tag Manager", "Mandrill", "Bazaarvoice", "Mailgun", "Conversio"], "email": [], "crunchbase": "organization/mywebsite-group", "phone_numbers": ["+32 800 00 000", "+32 800 00 001", "+32 800 00 002"], "timezone": "Europe/Brussels", "timezone_url": "https://time.is/Brussels", "company_type": "private", "employees": 15000.0, "annual_revenue": 0.0, "estimated_annual_revenue": false, "founded_year": 0, "logo": "https://logo.clearbit.com/mywebsite.be"}, 'child_ids': [{ 'is_company': False, 'type': 'contact', 'city': False, 'email': False, 'name': 'Client support - SMEs', 'street': 'False False', 'phone': '0800 00 500', 'zip': False, 'country_id': False, 'state_id': False}, { 'is_company': False, 'type': 'contact', 'city': False, 'email': False, 'name': 'Client Support - Large Business', 'street': 'False False', 'phone': '0800 00 501', 'zip': False, 'country_id': False, 'state_id': False}], 'city': 'Brussel', 'vat': 'BE0202239951', 'email': False, 'logo': 'https://logo.clearbit.com/mywebsite.be', 'name': 'Proximus', 'zip': '1000', 'ignored': False, 'phone': '+32 800 00 800', 'bank_ids': [{ 'acc_number': 'BE012012012', 'acc_holder_name': 'MyWebsite'}, { 'acc_number': 'BE013013013', 'acc_holder_name': 'MyWebsite Online'}], 'street': 'Rue Perdues 27', 'country_code': 'de', 'country_name': 'Germany', 'state_id': False } """ @classmethod def _init_mock_partner_autocomplete(cls): cls.base_de = cls.env.ref('base.de') cls.base_be = cls.env.ref('base.be') cls.be_state_bw = cls.env['res.country.state'].create({'name': 'Béwééé dis', 'code': 'bw', 'country_id': cls.base_be.id}) @contextmanager def mockPartnerAutocomplete(self, default_data=None, sim_error=None): def _contact_iap(local_endpoint, action, params, timeout): sim_result = { 'partner_gid': '9876', 'website': 'https://www.heinrich.de', 'additional_info': {}, 'city': 'Mönchengladbach', 'email': False, 'logo': 'https://logo.clearbit.com/heinrichsroofing.com', 'name': 'Heinrich', 'zip': '41179', 'ignored': False, 'phone': '+49 0000 112233', 'street': 'Mennrather Str. 123456', 'country_code': self.base_de.code, 'country_name': self.base_de.name, 'state_id': False, 'child_ids': [{ 'is_company': False, 'type': 'contact', 'city': 'Orcq', 'name': 'Heinrich Support' }, { 'is_company': False, 'type': 'contact', 'email': '[email protected]', 'name': 'Heinrich Client Support', 'street': 'Rue des Bourlottes, 9', 'phone': '0456 00 11 22', 'zip': '1367', 'country_code': self.base_be.code, 'country_name': self.base_be.name, 'state_code': self.be_state_bw.code, 'state_name': self.be_state_bw.name }], } if default_data: sim_result.update(default_data) # mock enrich only currently, to update further if action == 'enrich': if sim_error and sim_error == 'credit': raise iap_tools.InsufficientCreditError('InsufficientCreditError') elif sim_error and sim_error == 'jsonrpc_exception': raise exceptions.AccessError( 'The url that this service requested returned an error. Please contact the author of the app. The url it tried to contact was ' + local_endpoint ) elif sim_error and sim_error == 'token': raise ValueError('No account token') return {'company_data': sim_result} try: with patch.object(IapAutocompleteEnrichAPI, '_contact_iap', side_effect=_contact_iap): yield finally: pass
55.244898
5,414
2,565
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo.tests import common from odoo.addons.partner_autocomplete.tests.common import MockIAPPartnerAutocomplete class TestResCompany(common.TransactionCase, MockIAPPartnerAutocomplete): @classmethod def setUpClass(cls): super(TestResCompany, cls).setUpClass() cls._init_mock_partner_autocomplete() def test_enrich(self): company = self.env['res.company'].create({'name': "Test Company 1"}) with self.mockPartnerAutocomplete(): res = company._enrich() self.assertFalse(res) company.write({'email': '[email protected]'}) with self.mockPartnerAutocomplete(): # asserts are synchronized with default mock values res = company._enrich() self.assertTrue(res) self.assertEqual(company.country_id, self.env.ref('base.de')) self.assertEqual(len(company.partner_id.child_ids), 2) def test_extract_company_domain(self): company_1 = self.env['res.company'].create({'name': "Test Company 1"}) company_1.website = 'http://www.info.proximus.be/faq/test' self.assertEqual(company_1._get_company_domain(), "proximus.be") company_1.email = '[email protected]' self.assertEqual(company_1._get_company_domain(), "waterlink.be") company_1.website = False company_1.email = False self.assertEqual(company_1._get_company_domain(), False) company_1.email = "at@" self.assertEqual(company_1._get_company_domain(), False) company_1.website = "http://superFalsyWebsiteName" self.assertEqual(company_1._get_company_domain(), False) company_1.website = "http://www.superwebsite.com" self.assertEqual(company_1._get_company_domain(), 'superwebsite.com') company_1.website = "http://superwebsite.com" self.assertEqual(company_1._get_company_domain(), 'superwebsite.com') company_1.website = "http://localhost:8069/%7Eguido/Python.html" self.assertEqual(company_1._get_company_domain(), False) company_1.website = "http://runbot.odoo.com" self.assertEqual(company_1._get_company_domain(), 'odoo.com') company_1.website = "http://www.example.com/biniou" self.assertEqual(company_1._get_company_domain(), False) company_1.website = "http://www.cwi.nl:80/%7Eguido/Python.html" self.assertEqual(company_1._get_company_domain(), "cwi.nl")
40.714286
2,565
531
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import models from odoo.http import request class Http(models.AbstractModel): _inherit = 'ir.http' def session_info(self): """ Add information about iap enrich to perform """ session_info = super(Http, self).session_info() if session_info.get('is_admin'): session_info['iap_company_enrich'] = not request.env.user.company_id.iap_enrich_auto_done return session_info
33.1875
531
2,297
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import logging from odoo import api, models, exceptions, _ from odoo.addons.iap.tools import iap_tools from requests.exceptions import HTTPError _logger = logging.getLogger(__name__) class IapAutocompleteEnrichAPI(models.AbstractModel): _name = 'iap.autocomplete.api' _description = 'IAP Partner Autocomplete API' _DEFAULT_ENDPOINT = 'https://partner-autocomplete.odoo.com' @api.model def _contact_iap(self, local_endpoint, action, params, timeout=15): if self.env.registry.in_test_mode(): raise exceptions.ValidationError(_('Test mode')) account = self.env['iap.account'].get('partner_autocomplete') if not account.account_token: raise ValueError(_('No account token')) params.update({ 'db_uuid': self.env['ir.config_parameter'].sudo().get_param('database.uuid'), 'account_token': account.account_token, 'country_code': self.env.company.country_id.code, 'zip': self.env.company.zip, }) base_url = self.env['ir.config_parameter'].sudo().get_param('iap.partner_autocomplete.endpoint', self._DEFAULT_ENDPOINT) return iap_tools.iap_jsonrpc(base_url + local_endpoint + '/' + action, params=params, timeout=timeout) @api.model def _request_partner_autocomplete(self, action, params, timeout=15): """ Contact endpoint to get autocomplete data. :return tuple: results, error code """ try: results = self._contact_iap('/iap/partner_autocomplete', action, params, timeout=timeout) except exceptions.ValidationError: return False, 'Insufficient Credit' except (ConnectionError, HTTPError, exceptions.AccessError, exceptions.UserError) as exception: _logger.error('Autocomplete API error: %s', str(exception)) return False, str(exception) except iap_tools.InsufficientCreditError as exception: _logger.warning('Insufficient Credits for Autocomplete Service: %s', str(exception)) return False, 'Insufficient Credit' except ValueError: return False, 'No account token' return results, False
44.173077
2,297
1,358
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import logging from odoo import api, fields, models _logger = logging.getLogger(__name__) class ResPartnerAutocompleteSync(models.Model): _name = 'res.partner.autocomplete.sync' _description = 'Partner Autocomplete Sync' partner_id = fields.Many2one('res.partner', string="Partner", ondelete='cascade') synched = fields.Boolean('Is synched', default=False) @api.model def start_sync(self): to_sync_items = self.search([('synched', '=', False)]) for to_sync_item in to_sync_items: partner = to_sync_item.partner_id params = { 'partner_gid': partner.partner_gid, } if partner.vat and partner._is_vat_syncable(partner.vat): params['vat'] = partner.vat _, error = self.env['iap.autocomplete.api']._request_partner_autocomplete('update', params) if error: _logger.warning('Send Partner to sync failed: %s', str(error)) to_sync_item.write({'synched': True}) def add_to_queue(self, partner_id): to_sync = self.search([('partner_id', '=', partner_id)]) if not to_sync: to_sync = self.create({'partner_id': partner_id}) return to_sync
35.736842
1,358
5,388
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import json import logging import threading from odoo.addons.iap.tools import iap_tools from odoo import api, fields, models, tools, _ _logger = logging.getLogger(__name__) COMPANY_AC_TIMEOUT = 5 class ResCompany(models.Model): _name = 'res.company' _inherit = 'res.company' partner_gid = fields.Integer('Company database ID', related="partner_id.partner_gid", inverse="_inverse_partner_gid", store=True) iap_enrich_auto_done = fields.Boolean('Enrich Done') def _inverse_partner_gid(self): for company in self: company.partner_id.partner_gid = company.partner_gid @api.model_create_multi def create(self, vals_list): res = super().create(vals_list) if not getattr(threading.current_thread(), 'testing', False): res.iap_enrich_auto() return res def iap_enrich_auto(self): """ Enrich company. This method should be called by automatic processes and a protection is added to avoid doing enrich in a loop. """ if self.env.user._is_system(): for company in self.filtered(lambda company: not company.iap_enrich_auto_done): company._enrich() self.iap_enrich_auto_done = True return True def _enrich(self): """ This method calls the partner autocomplete service from IAP to enrich partner related fields of the company. :return bool: either done, either failed """ self.ensure_one() _logger.info("Starting enrich of company %s (%s)", self.name, self.id) company_domain = self._get_company_domain() if not company_domain: return False company_data = self.env['res.partner'].enrich_company(company_domain, False, self.vat, timeout=COMPANY_AC_TIMEOUT) if company_data.get('error'): return False additional_data = company_data.pop('additional_info', False) # Keep only truthy values that are not already set on the target partner # Erase image_1920 even if something is in it. Indeed as partner_autocomplete is probably installed as a # core app (mail -> iap -> partner_autocomplete auto install chain) it is unlikely that people already # updated their company logo. self.env['res.partner']._iap_replace_logo(company_data) company_data = {field: value for field, value in company_data.items() if field in self.partner_id._fields and value and (field == 'image_1920' or not self.partner_id[field])} # for company and childs: from state_id / country_id name_get like to IDs company_data.update(self._enrich_extract_m2o_id(company_data, ['state_id', 'country_id'])) if company_data.get('child_ids'): company_data['child_ids'] = [ dict(child_data, **self._enrich_extract_m2o_id(child_data, ['state_id', 'country_id'])) for child_data in company_data['child_ids'] ] # handle o2m values, e.g. {'bank_ids': ['acc_number': 'BE012012012', 'acc_holder_name': 'MyWebsite']} self._enrich_replace_o2m_creation(company_data) self.partner_id.write(company_data) if additional_data: template_values = json.loads(additional_data) template_values['flavor_text'] = _("Company auto-completed by Odoo Partner Autocomplete Service") self.partner_id.message_post_with_view( 'iap_mail.enrich_company', values=template_values, subtype_id=self.env.ref('mail.mt_note').id, ) return True def _enrich_extract_m2o_id(self, iap_data, m2o_fields): """ Extract m2O ids from data (because of res.partner._format_data_company) """ extracted_data = {} for m2o_field in m2o_fields: relation_data = iap_data.get(m2o_field) if relation_data and isinstance(relation_data, dict): extracted_data[m2o_field] = relation_data.get('id', False) return extracted_data def _enrich_replace_o2m_creation(self, iap_data): for o2m_field, values in iap_data.items(): if isinstance(values, list): commands = [( 0, 0, create_value ) for create_value in values if isinstance(create_value, dict)] if commands: iap_data[o2m_field] = commands else: iap_data.pop(o2m_field, None) return iap_data def _get_company_domain(self): """ Extract the company domain to be used by IAP services. The domain is extracted from the website or the email information. e.g: - www.info.proximus.be -> proximus.be - [email protected] -> proximus.be """ self.ensure_one() company_domain = tools.email_domain_extract(self.email) if self.email else False if company_domain and company_domain not in iap_tools._MAIL_DOMAIN_BLACKLIST: return company_domain company_domain = tools.url_domain_extract(self.website) if self.website else False if not company_domain or company_domain in ['localhost', 'example.com']: return False return company_domain
42.09375
5,388
814
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models class ResConfigSettings(models.TransientModel): _inherit = 'res.config.settings' partner_autocomplete_insufficient_credit = fields.Boolean('Insufficient credit', compute="_compute_partner_autocomplete_insufficient_credit") def _compute_partner_autocomplete_insufficient_credit(self): self.partner_autocomplete_insufficient_credit = self.env['iap.account'].get_credits('partner_autocomplete') <= 0 def redirect_to_buy_autocomplete_credit(self): Account = self.env['iap.account'] return { 'type': 'ir.actions.act_url', 'url': Account.get_credits_url('partner_autocomplete'), 'target': '_new', }
38.761905
814
6,885
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import base64 import json import logging import requests from odoo import api, fields, models, tools, _ _logger = logging.getLogger(__name__) PARTNER_AC_TIMEOUT = 5 class ResPartner(models.Model): _name = 'res.partner' _inherit = 'res.partner' partner_gid = fields.Integer('Company database ID') additional_info = fields.Char('Additional info') @api.model def _iap_replace_location_codes(self, iap_data): country_code, country_name = iap_data.pop('country_code', False), iap_data.pop('country_name', False) state_code, state_name = iap_data.pop('state_code', False), iap_data.pop('state_name', False) country, state = None, None if country_code: country = self.env['res.country'].search([['code', '=ilike', country_code]]) if not country and country_name: country = self.env['res.country'].search([['name', '=ilike', country_name]]) if country: if state_code: state = self.env['res.country.state'].search([ ('country_id', '=', country.id), ('code', '=ilike', state_code) ], limit=1) if not state and state_name: state = self.env['res.country.state'].search([ ('country_id', '=', country.id), ('name', '=ilike', state_name) ], limit=1) else: _logger.info('Country code not found: %s', country_code) if country: iap_data['country_id'] = {'id': country.id, 'display_name': country.display_name} if state: iap_data['state_id'] = {'id': state.id, 'display_name': state.display_name} return iap_data @api.model def _iap_replace_logo(self, iap_data): if iap_data.get('logo'): try: iap_data['image_1920'] = base64.b64encode( requests.get(iap_data['logo'], timeout=PARTNER_AC_TIMEOUT).content ) except Exception: iap_data['image_1920'] = False finally: iap_data.pop('logo') # avoid keeping falsy images (may happen that a blank page is returned that leads to an incorrect image) if iap_data['image_1920']: try: tools.base64_to_image(iap_data['image_1920']) except Exception: iap_data.pop('image_1920') return iap_data @api.model def _format_data_company(self, iap_data): self._iap_replace_location_codes(iap_data) if iap_data.get('child_ids'): child_ids = [] for child in iap_data.get('child_ids'): child_ids.append(self._iap_replace_location_codes(child)) iap_data['child_ids'] = child_ids if iap_data.get('additional_info'): iap_data['additional_info'] = json.dumps(iap_data['additional_info']) return iap_data @api.model def autocomplete(self, query, timeout=15): suggestions, _ = self.env['iap.autocomplete.api']._request_partner_autocomplete('search', { 'query': query, }, timeout=timeout) if suggestions: results = [] for suggestion in suggestions: results.append(self._format_data_company(suggestion)) return results else: return [] @api.model def enrich_company(self, company_domain, partner_gid, vat, timeout=15): response, error = self.env['iap.autocomplete.api']._request_partner_autocomplete('enrich', { 'domain': company_domain, 'partner_gid': partner_gid, 'vat': vat, }, timeout=timeout) if response and response.get('company_data'): result = self._format_data_company(response.get('company_data')) else: result = {} if response and response.get('credit_error'): result.update({ 'error': True, 'error_message': 'Insufficient Credit' }) elif error: result.update({ 'error': True, 'error_message': error }) return result @api.model def read_by_vat(self, vat, timeout=15): vies_vat_data, _ = self.env['iap.autocomplete.api']._request_partner_autocomplete('search_vat', { 'vat': vat, }, timeout=timeout) if vies_vat_data: return [self._format_data_company(vies_vat_data)] else: return [] @api.model def _is_company_in_europe(self, country_code): country = self.env['res.country'].search([('code', '=ilike', country_code)]) if country: country_id = country.id europe = self.env.ref('base.europe') if not europe: europe = self.env["res.country.group"].search([('name', '=', 'Europe')], limit=1) if not europe or country_id not in europe.country_ids.ids: return False return True def _is_vat_syncable(self, vat): vat_country_code = vat[:2] partner_country_code = self.country_id.code if self.country_id else '' return self._is_company_in_europe(vat_country_code) and (partner_country_code == vat_country_code or not partner_country_code) def _is_synchable(self): already_synched = self.env['res.partner.autocomplete.sync'].search([('partner_id', '=', self.id), ('synched', '=', True)]) return self.is_company and self.partner_gid and not already_synched def _update_autocomplete_data(self, vat): self.ensure_one() if vat and self._is_synchable() and self._is_vat_syncable(vat): self.env['res.partner.autocomplete.sync'].sudo().add_to_queue(self.id) @api.model_create_multi def create(self, vals_list): partners = super(ResPartner, self).create(vals_list) if len(vals_list) == 1: partners._update_autocomplete_data(vals_list[0].get('vat', False)) if partners.additional_info: template_values = json.loads(partners.additional_info) template_values['flavor_text'] = _("Partner created by Odoo Partner Autocomplete Service") partners.message_post_with_view( 'iap_mail.enrich_company', values=template_values, subtype_id=self.env.ref('mail.mt_note').id, ) partners.write({'additional_info': False}) return partners def write(self, values): res = super(ResPartner, self).write(values) if len(self) == 1: self._update_autocomplete_data(values.get('vat', False)) return res
37.622951
6,885
1,172
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Israel - Accounting', 'version': '1.0', 'category': 'Accounting/Localizations/Account Charts', 'description': """ This is the latest basic Israelian localisation necessary to run Odoo in Israel: ================================================================================ This module consists of: - Generic Israel Chart of Accounts - Taxes and tax report - Multiple Fiscal positions """, 'website': 'http://www.odoo.com/accounting', 'depends': ['l10n_multilang'], 'data': [ 'data/account_chart_template_data.xml', 'data/account_tax_group_data.xml', 'data/account_account_tag.xml', 'data/account.account.template.csv', 'data/account.group.template.csv', 'data/account_tax_report_data.xml', 'data/account_tax_template_data.xml', 'data/fiscal_templates_data.xml', 'data/account_chart_template_post_data.xml', 'data/account_chart_template_configure_data.xml', ], 'demo': [ 'demo/demo_company.xml', ], 'license': 'LGPL-3', }
33.485714
1,172
586
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. { 'name': 'Base import module', 'description': """ Import a custom data module =========================== This module allows authorized users to import a custom data module (.xml files and static assests) for customization purpose. """, 'category': 'Hidden/Tools', 'depends': ['web'], 'installable': True, 'auto_install': False, 'data': [ 'security/ir.model.access.csv', 'views/base_import_module_view.xml' ], 'license': 'LGPL-3', }
27.904762
586
8,680
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import json from io import BytesIO from zipfile import ZipFile from odoo.tools import cloc from odoo.addons.base.tests import test_cloc VALID_XML = """ <templates id="template" xml:space="preserve"> <t t-name="stock_barcode.LineComponent"> <div t-if="line.picking_id and line.picking_id.origin" name="origin"> <i class="fa fa-fw fa-file" /> <span t-esc="line.picking_id.origin" /> </div> </t> </templates> """ class TestClocFields(test_cloc.TestClocCustomization): def create_studio_module(self): # Studio module does not exist at this stage, so we simulate it # Check for existing module in case the test run on an existing database if not self.env['ir.module.module'].search([('name', '=', 'studio_customization')]): self.env['ir.module.module'].create({ 'author': 'Odoo', 'imported': True, 'latest_version': '13.0.1.0.0', 'name': 'studio_customization', 'state': 'installed', 'summary': 'Studio Customization', }) def test_fields_from_import_module(self): """ Check that custom computed fields installed with an imported module is counted as customization """ self.env['ir.module.module'].create({ 'name': 'imported_module', 'state': 'installed', 'imported': True, }) f1 = self.create_field('x_imported_field') self.create_xml_id('imported_module', 'import_field', f1) cl = cloc.Cloc() cl.count_customization(self.env) self.assertEqual(cl.code.get('imported_module', 0), 1, 'Count fields with xml_id of imported module') def test_fields_from_studio(self): self.create_studio_module() f1 = self.create_field('x_field_count') self.create_xml_id('studio_customization', 'field_count', f1) cl = cloc.Cloc() cl.count_customization(self.env) self.assertEqual(cl.code.get('studio_customization', 0), 0, "Don't count field generated by studio") f2 = self.create_field('x_studio_manual_field') self.create_xml_id('studio_customization', 'manual_field', f2) cl = cloc.Cloc() cl.count_customization(self.env) self.assertEqual(cl.code.get('studio_customization', 0), 1, "Count manual field created via studio") def test_fields_module_name(self): """ Check that custom computed fields installed with an imported module is counted as customization """ self.env['ir.module.module'].create({ 'name': 'imported_module', 'state': 'installed', 'imported': True, }) f1 = self.create_field('x_imported_field') self.create_xml_id('imported_module', 'import_field', f1) self.create_xml_id('__export__', 'import_field', f1) sa = self.create_server_action("Test imported double xml_id") self.create_xml_id("imported_module", "first", sa) self.create_xml_id("__export__", "second", sa) cl = cloc.Cloc() cl.count_customization(self.env) self.assertEqual(cl.code.get('imported_module', 0), 3) def test_count_qweb_imported_module(self): self.env['ir.module.module'].create({ 'author': 'Odoo', 'imported': True, 'latest_version': '15.0.1.0.0', 'name': 'test_imported_module', 'state': 'installed', 'summary': 'Test imported module for cloc', }) self.create_studio_module() qweb_view = self.env['ir.ui.view'].create({ "name": "Qweb Test", "type": "qweb", "mode": "primary", "arch_base": "<html>\n <body>\n <t t-out=\"Hello World\"/>\n </body>\n</html>", }) self.create_xml_id('test_imported_module', "qweb_view_test", qweb_view) # Add qweb view from non imported module qweb_view = self.env['ir.ui.view'].create({ "name": "Qweb Test", "type": "qweb", "arch_base": "<html>\n <body>\n <t t-out=\"Hello World\"/>\n </body>\n</html>", }) self.create_xml_id("studio_customization", "qweb_view_test", qweb_view) # Add form view from module form_view = self.env['ir.ui.view'].create({ "name": "Test partner", "type": "form", "model": "res.partner", "arch_base": "<form>\n <field name=\"name\" \n invisible=\"1\" />\n</form>", }) self.create_xml_id("test_imported_module", "form_view_test", form_view) cl = cloc.Cloc() cl.count_customization(self.env) self.assertEqual(cl.code.get('test_imported_module', 0), 5, "Count only qweb view from imported module") self.assertEqual(cl.code.get('studio_customization', 0), 0, "Do not count from studio_customization module") def test_count_attachment_imported_module(self): manifest_content = json.dumps({ 'name': 'test_imported_module', 'description': 'Test', 'assets': { 'web.assets_backend': [ 'test_imported_module/static/src/js/test.js', 'test_imported_module/static/src/css/test.scss', ] }, 'license': 'LGPL-3', }) stream = BytesIO() with ZipFile(stream, 'w') as archive: archive.writestr('test_imported_module/__manifest__.py', manifest_content) archive.writestr('test_imported_module/static/src/js/test.js', test_cloc.JS_TEST) archive.writestr('test_imported_module/static/src/js/test.scss', test_cloc.SCSS_TEST) archive.writestr('test_imported_module/static/src/js/test.xml', VALID_XML) # Import test module self.env['ir.module.module'].import_zipfile(stream) cl = cloc.Cloc() cl.count_customization(self.env) self.assertEqual(cl.code.get('test_imported_module', 0), 35) def test_exclude_qweb(self): self.env['ir.module.module'].create({ 'author': 'Odoo', 'imported': True, 'latest_version': '15.0.1.0.0', 'name': 'test_imported_module', 'state': 'installed', 'summary': 'Test imported module for cloc', }) qweb_view = self.env['ir.ui.view'].create({ "name": "Qweb Test", "type": "qweb", "mode": "primary", "arch_base": "<html>\n <body>\n <t t-out=\"Hello World\"/>\n </body>\n</html>", }) self.create_xml_id('test_imported_module', "qweb_view_test", qweb_view) self.create_xml_id('__cloc_exclude__', "qweb_view_test", qweb_view) cl = cloc.Cloc() cl.count_customization(self.env) self.assertEqual(cl.code.get('test_imported_module', 0), 0, "Do not count view with cloc_exclude") def test_exclude_attachment_imported_module(self): manifest_content = json.dumps({ 'name': 'test_imported_module', 'description': 'Test', 'assets': { 'web.assets_backend': [ 'test_imported_module/static/src/js/test.js', 'test_imported_module/static/src/css/test.scss', ] }, 'license': 'LGPL-3', }) stream = BytesIO() with ZipFile(stream, 'w') as archive: archive.writestr('test_imported_module/__manifest__.py', manifest_content) archive.writestr('test_imported_module/static/src/js/test.js', test_cloc.JS_TEST) archive.writestr('test_imported_module/static/src/js/test.scss', test_cloc.SCSS_TEST) archive.writestr('test_imported_module/static/src/js/test.xml', VALID_XML) id_names = [ 'attachment_/test_imported_module/static/src/js/test_js', 'attachment_/test_imported_module/static/src/js/test_scss', 'attachment_/test_imported_module/static/src/js/test_xml', ] # Import test module self.env['ir.module.module'].import_zipfile(stream) # Create exclude xml_id for name in id_names: rec = self.env.ref(f'test_imported_module.{name}') self.create_xml_id('__cloc_exclude__', name, rec) cl = cloc.Cloc() cl.count_customization(self.env) self.assertEqual(cl.code.get('test_imported_module', 0), 0)
40.943396
8,680
16,190
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import base64 import json import os import tempfile from io import BytesIO from zipfile import ZipFile import odoo.tests from odoo.tests import new_test_user from unittest.mock import patch from odoo.addons import __path__ as __addons_path__ from odoo.tools import mute_logger @odoo.tests.tagged('post_install', '-at_install') class TestImportModule(odoo.tests.TransactionCase): def import_zipfile(self, files): archive = BytesIO() with ZipFile(archive, 'w') as zipf: for path, data in files: zipf.writestr(path, data) return self.env['ir.module.module'].import_zipfile(archive) def test_import_zip(self): """Assert the behaviors expected by the module import feature using a ZIP archive""" files = [ ('foo/__manifest__.py', b"{'data': ['data.xml', 'res.partner.csv', 'data.sql']}"), ('foo/data.xml', b""" <data> <record id="foo" model="res.partner"> <field name="name">foo</field> </record> </data> """), ('foo/res.partner.csv', b'"id","name"\n' \ b'bar,bar' ), ('foo/data.sql', b"INSERT INTO res_partner (active, name) VALUES (true, 'baz');"), ('foo/static/css/style.css', b".foo{color: black;}"), ('foo/static/js/foo.js', b"console.log('foo')"), ('bar/__manifest__.py', b"{'data': ['data.xml']}"), ('bar/data.xml', b""" <data> <record id="foo" model="res.country"> <field name="name">foo</field> </record> </data> """), ] self.import_zipfile(files) self.assertEqual(self.env.ref('foo.foo')._name, 'res.partner') self.assertEqual(self.env.ref('foo.foo').name, 'foo') self.assertEqual(self.env.ref('foo.bar')._name, 'res.partner') self.assertEqual(self.env.ref('foo.bar').name, 'bar') self.assertEqual(self.env['res.partner'].search_count([('name', '=', 'baz')]), 1) self.assertEqual(self.env.ref('bar.foo')._name, 'res.country') self.assertEqual(self.env.ref('bar.foo').name, 'foo') for path, data in files: if path.split('/')[1] == 'static': static_attachment = self.env['ir.attachment'].search([('url', '=', '/%s' % path)]) self.assertEqual(static_attachment.name, os.path.basename(path)) self.assertEqual(static_attachment.datas, base64.b64encode(data)) def test_import_zip_invalid_manifest(self): """Assert the expected behavior when import a ZIP module with an invalid manifest""" files = [ ('foo/__manifest__.py', b"foo") ] with mute_logger("odoo.addons.base_import_module.models.ir_module"): result = self.import_zipfile(files) self.assertIn("Error while importing module 'foo'", result[0]) def test_import_zip_data_not_in_manifest(self): """Assert a data file not mentioned in the manifest is not imported""" files = [ ('foo/__manifest__.py', b"{'data': ['foo.xml']}"), ('foo/foo.xml', b""" <data> <record id="foo" model="res.partner"> <field name="name">foo</field> </record> </data> """), ('foo/bar.xml', b""" <data> <record id="bar" model="res.partner"> <field name="name">bar</field> </record> </data> """), ] self.import_zipfile(files) self.assertEqual(self.env.ref('foo.foo').name, 'foo') self.assertFalse(self.env.ref('foo.bar', raise_if_not_found=False)) def test_import_zip_ignore_unexpected_data_extension(self): """Assert data files using an unexpected extensions are correctly ignored""" files = [ ('foo/__manifest__.py', b"{'data': ['res.partner.xls']}"), ('foo/res.partner.xls', b'"id","name"\n' \ b'foo,foo' ), ] with self.assertLogs('odoo.addons.base_import_module.models.ir_module') as log_catcher: self.import_zipfile(files) self.assertEqual(len(log_catcher.output), 1) self.assertIn('module foo: skip unsupported file res.partner.xls', log_catcher.output[0]) self.assertFalse(self.env.ref('foo.foo', raise_if_not_found=False)) def test_import_zip_extract_only_useful(self): """Assert only the data and static files are extracted of the ZIP archive during the module import""" files = [ ('foo/__manifest__.py', b"{'data': ['data.xml', 'res.partner.xls']}"), ('foo/data.xml', b""" <data> <record id="foo" model="res.partner"> <field name="name">foo</field> </record> </data> """), ('foo/res.partner.xls', b'"id","name"\n' \ b'foo,foo' ), ('foo/static/css/style.css', b".foo{color: black;}"), ('foo/foo.py', b"foo = 42"), ] extracted_files = [] addons_path = [] origin_import_module = type(self.env['ir.module.module'])._import_module def _import_module(self, *args, **kwargs): _module, path = args for root, _dirs, files in os.walk(path): for file in files: extracted_files.append(os.path.relpath(os.path.join(root, file), path)) addons_path.extend(__addons_path__) return origin_import_module(self, *args, **kwargs) with patch.object(type(self.env['ir.module.module']), '_import_module', _import_module): self.import_zipfile(files) self.assertIn( '__manifest__.py', extracted_files, "__manifest__.py must be in the extracted files") self.assertIn( 'data.xml', extracted_files, "data.xml must be in the extracted files as its in the manifest's data") self.assertIn( 'static/css/style.css', extracted_files, "style.css must be in the extracted files as its in the static folder") self.assertNotIn( 'res.partner.xls', extracted_files, "res.partner.xls must not be in the extracted files as it uses an unsupported extension of data file") self.assertNotIn( 'foo.py', extracted_files, "foo.py must not be in the extracted files as its not the manifest's data") self.assertFalse( set(addons_path).difference(__addons_path__), 'No directory must be added in the addons path during import') def test_import_module_addons_path(self): """Assert it's possible to import a module using directly `_import_module` without zip from the addons path""" files = [ ('foo/__manifest__.py', b"{'data': ['data.xml']}"), ('foo/data.xml', b""" <data> <record id="foo" model="res.partner"> <field name="name">foo</field> </record> </data> """), ('foo/static/css/style.css', b".foo{color: black;}"), ] with tempfile.TemporaryDirectory() as module_dir: for path, data in files: os.makedirs(os.path.join(module_dir, os.path.dirname(path)), exist_ok=True) with open(os.path.join(module_dir, path), 'wb') as fp: fp.write(data) try: __addons_path__.append(module_dir) self.env['ir.module.module']._import_module('foo', os.path.join(module_dir, 'foo')) finally: __addons_path__.remove(module_dir) self.assertEqual(self.env.ref('foo.foo')._name, 'res.partner') self.assertEqual(self.env.ref('foo.foo').name, 'foo') static_path, static_data = files[2] static_attachment = self.env['ir.attachment'].search([('url', '=', '/%s' % static_path)]) self.assertEqual(static_attachment.name, os.path.basename(static_path)) self.assertEqual(static_attachment.datas, base64.b64encode(static_data)) def test_import_and_uninstall_module(self): bundle = 'web.assets_backend' path = '/test_module/static/src/js/test.js' manifest_content = json.dumps({ 'name': 'Test Module', 'description': 'Test', 'assets': { 'web.assets_backend': [ 'test_module/static/src/js/test.js' ] }, 'license': 'LGPL-3', }) stream = BytesIO() with ZipFile(stream, 'w') as archive: archive.writestr('test_module/__manifest__.py', manifest_content) archive.writestr('test_module/static/src/js/test.js', "console.log('AAA');") # Import test module self.env['ir.module.module'].import_zipfile(stream) attachment = self.env['ir.attachment'].search([('url', '=', path)]) self.assertEqual(attachment.name, 'test.js') self.assertEqual(attachment.type, 'binary') self.assertEqual(attachment.raw, b"console.log('AAA');") asset = self.env['ir.asset'].search([('name', '=', f'test_module.{bundle}.{path}')]) self.assertEqual(asset.path, path) self.assertEqual(asset.bundle, bundle) self.assertEqual(asset.directive, 'append') self.assertEqual(asset.target, False) asset_data = self.env['ir.model.data'].search([('model', '=', 'ir.asset'), ('res_id', '=', asset.id)]) self.assertEqual(asset_data.module, 'test_module') self.assertEqual(asset_data.name, f'{bundle}_{path}'.replace(".", "_")) # Uninstall test module self.env['ir.module.module'].search([('name', '=', 'test_module')]).module_uninstall() attachment = self.env['ir.attachment'].search([('url', '=', path)]) self.assertEqual(len(attachment), 0) asset = self.env['ir.asset'].search([('name', '=', f'test_module.{bundle}.{path}')]) self.assertEqual(len(asset), 0) asset_data = self.env['ir.model.data'].search([('model', '=', 'ir.asset'), ('res_id', '=', asset.id)]) self.assertEqual(len(asset_data), 0) def test_import_and_update_module(self): self.test_user = new_test_user( self.env, login='Admin', groups='base.group_user,base.group_system', name='Admin', ) bundle = 'web.assets_backend' path = 'test_module/static/src/js/test.js' manifest_content = json.dumps({ 'name': 'Test Module', 'description': 'Test', 'assets': { bundle: [ path ] }, 'license': 'LGPL-3', }) stream = BytesIO() with ZipFile(stream, 'w') as archive: archive.writestr('test_module/__manifest__.py', manifest_content) archive.writestr(path, "console.log('AAA');") # Import test module self.env['ir.module.module'].with_user(self.test_user).import_zipfile(stream) attachment = self.env['ir.attachment'].search([('url', '=', f'/{path}')]) self.assertEqual(attachment.name, 'test.js') self.assertEqual(attachment.type, 'binary') self.assertEqual(attachment.raw, b"console.log('AAA');") asset = self.env['ir.asset'].search([('name', '=', f'test_module.{bundle}./{path}')]) self.assertEqual(asset.path, f'/{path}') self.assertEqual(asset.bundle, bundle) self.assertEqual(asset.directive, 'append') self.assertEqual(asset.target, False) asset_data = self.env['ir.model.data'].search([('model', '=', 'ir.asset'), ('res_id', '=', asset.id)]) self.assertEqual(asset_data.module, 'test_module') self.assertEqual(asset_data.name, f'{bundle}_/{path}'.replace(".", "_")) # Update test module stream = BytesIO() with ZipFile(stream, 'w') as archive: archive.writestr('test_module/__manifest__.py', manifest_content) archive.writestr(path, "console.log('BBB');") # Import test module self.env['ir.module.module'].with_user(self.test_user).import_zipfile(stream) attachment = self.env['ir.attachment'].search([('url', '=', f'/{path}')]) self.assertEqual(attachment.name, 'test.js') self.assertEqual(attachment.type, 'binary') self.assertEqual(attachment.raw, b"console.log('BBB');") asset = self.env['ir.asset'].search([('name', '=', f'test_module.{bundle}./{path}')]) self.assertEqual(asset.path, f'/{path}') self.assertEqual(asset.bundle, bundle) self.assertEqual(asset.directive, 'append') self.assertEqual(asset.target, False) asset_data = self.env['ir.model.data'].search([('model', '=', 'ir.asset'), ('res_id', '=', asset.id)]) self.assertEqual(asset_data.module, 'test_module') self.assertEqual(asset_data.name, f'{bundle}_/{path}'.replace(".", "_")) class TestImportModuleHttp(TestImportModule, odoo.tests.HttpCase): def test_import_module_icon(self): """Assert import a module with an icon result in the module displaying the icon in the apps menu, and with the base module icon if module without icon""" files = [ ('foo/__manifest__.py', b"{'name': 'foo'}"), ('foo/static/description/icon.png', b"foo_icon"), ('bar/__manifest__.py', b"{'name': 'bar'}"), ] self.import_zipfile(files) foo_icon_path, foo_icon_data = files[1] # Assert icon of module foo, which must be the icon provided in the zip self.assertEqual(self.url_open('/' + foo_icon_path).content, foo_icon_data) # Assert icon of module bar, which must be the icon of the base module as none was provided self.assertEqual(self.env.ref('base.module_bar').icon_image, self.env.ref('base.module_base').icon_image) def test_import_module_field_file(self): files = [ ('foo/__manifest__.py', b"{'data': ['data.xml']}"), ('foo/data.xml', b""" <data> <record id="logo" model="ir.attachment"> <field name="name">Company Logo</field> <field name="datas" type="base64" file="foo/static/src/img/content/logo.png"/> <field name="res_model">ir.ui.view</field> <field name="public" eval="True"/> </record> </data> """), ('foo/static/src/img/content/logo.png', b"foo_logo"), ] self.import_zipfile(files) logo_path, logo_data = files[2] self.assertEqual(base64.b64decode(self.env.ref('foo.logo').datas), logo_data) self.assertEqual(self.url_open('/' + logo_path).content, logo_data) def test_import_module_assets_http(self): asset_bundle = 'web_assets_backend' asset_path = '/foo/static/src/js/test.js' files = [ ('foo/__manifest__.py', json.dumps({ 'assets': { asset_bundle: [ asset_path, ] }, })), ('foo/static/src/js/test.js', b"foo_assets_backend"), ] self.import_zipfile(files) asset = self.env.ref('foo.web_assets_backend_/foo/static/src/js/test_js') self.assertEqual(asset.bundle, asset_bundle) self.assertEqual(asset.path, asset_path) asset_data = files[1][1] self.assertEqual(self.url_open(asset_path).content, asset_data)
43.756757
16,190
1,743
py
PYTHON
15.0
# -*- coding: utf-8 -*- import base64 from io import BytesIO from odoo import api, fields, models class BaseImportModule(models.TransientModel): """ Import Module """ _name = "base.import.module" _description = "Import Module" module_file = fields.Binary(string='Module .ZIP file', required=True, attachment=False) state = fields.Selection([('init', 'init'), ('done', 'done')], string='Status', readonly=True, default='init') import_message = fields.Text() force = fields.Boolean(string='Force init', help="Force init mode even if installed. (will update `noupdate='1'` records)") def import_module(self): self.ensure_one() IrModule = self.env['ir.module.module'] zip_data = base64.decodebytes(self.module_file) fp = BytesIO() fp.write(zip_data) res = IrModule.import_zipfile(fp, force=self.force) self.write({'state': 'done', 'import_message': res[0]}) context = dict(self.env.context, module_name=res[1]) # Return wizard otherwise it will close wizard and will not show result message to user. return { 'name': 'Import Module', 'view_mode': 'form', 'target': 'new', 'res_id': self.id, 'res_model': 'base.import.module', 'type': 'ir.actions.act_window', 'context': context, } def action_module_open(self): self.ensure_one() return { 'domain': [('name', 'in', self.env.context.get('module_name', []))], 'name': 'Modules', 'view_mode': 'tree,form', 'res_model': 'ir.module.module', 'view_id': False, 'type': 'ir.actions.act_window', }
37.891304
1,743
1,012
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from operator import itemgetter from odoo import api, models class IrUiView(models.Model): _inherit = 'ir.ui.view' @api.model def _validate_custom_views(self, model): # views from imported modules should be considered as custom views result = super(IrUiView, self)._validate_custom_views(model) self._cr.execute(""" SELECT max(v.id) FROM ir_ui_view v LEFT JOIN ir_model_data md ON (md.model = 'ir.ui.view' AND md.res_id = v.id) LEFT JOIN ir_module_module m ON (m.name = md.module) WHERE m.imported = true AND v.model = %s AND v.active = true GROUP BY coalesce(v.inherit_id, v.id) """, [model]) ids = (row[0] for row in self._cr.fetchall()) views = self.with_context(load_all_views=True).browse(ids) return views._check_xml() and result
36.142857
1,012
13,020
py
PYTHON
15.0
# -*- coding: utf-8 -*- import ast import base64 import logging import lxml import os import sys import tempfile import zipfile from collections import defaultdict from os.path import join as opj from odoo import api, fields, models, _ from odoo.exceptions import UserError from odoo.modules.module import MANIFEST_NAMES from odoo.tools import convert_csv_import, convert_sql_import, convert_xml_import, exception_to_unicode from odoo.tools import file_open, file_open_temporary_directory _logger = logging.getLogger(__name__) MAX_FILE_SIZE = 100 * 1024 * 1024 # in megabytes class IrModule(models.Model): _inherit = "ir.module.module" imported = fields.Boolean(string="Imported Module") def _get_modules_to_load_domain(self): # imported modules are not expected to be loaded as regular modules return super()._get_modules_to_load_domain() + [('imported', '=', False)] @api.depends('name') def _get_latest_version(self): imported_modules = self.filtered(lambda m: m.imported and m.latest_version) for module in imported_modules: module.installed_version = module.latest_version super(IrModule, self - imported_modules)._get_latest_version() def _import_module(self, module, path, force=False): known_mods = self.search([]) known_mods_names = {m.name: m for m in known_mods} installed_mods = [m.name for m in known_mods if m.state == 'installed'] terp = {} manifest_path = next((opj(path, name) for name in MANIFEST_NAMES if os.path.exists(opj(path, name))), None) if manifest_path: with file_open(manifest_path, 'rb', env=self.env) as f: terp.update(ast.literal_eval(f.read().decode())) if not terp: return False if not terp.get('icon'): icon_path = 'static/description/icon.png' module_icon = module if os.path.exists(opj(path, icon_path)) else 'base' terp['icon'] = opj('/', module_icon, icon_path) values = self.get_values_from_terp(terp) if 'version' in terp: values['latest_version'] = terp['version'] unmet_dependencies = set(terp.get('depends', [])).difference(installed_mods) if unmet_dependencies: if (unmet_dependencies == set(['web_studio']) and _is_studio_custom(path)): err = _("Studio customizations require Studio") else: err = _("Unmet module dependencies: \n\n - %s") % '\n - '.join( known_mods.filtered(lambda mod: mod.name in unmet_dependencies).mapped('shortdesc') ) raise UserError(err) elif 'web_studio' not in installed_mods and _is_studio_custom(path): raise UserError(_("Studio customizations require the Odoo Studio app.")) mod = known_mods_names.get(module) if mod: mod.write(dict(state='installed', **values)) mode = 'update' if not force else 'init' else: assert terp.get('installable', True), "Module not installable" self.create(dict(name=module, state='installed', imported=True, **values)) mode = 'init' for kind in ['data', 'init_xml', 'update_xml']: for filename in terp.get(kind, []): ext = os.path.splitext(filename)[1].lower() if ext not in ('.xml', '.csv', '.sql'): _logger.info("module %s: skip unsupported file %s", module, filename) continue _logger.info("module %s: loading %s", module, filename) noupdate = False if ext == '.csv' and kind in ('init', 'init_xml'): noupdate = True pathname = opj(path, filename) idref = {} with file_open(pathname, 'rb', env=self.env) as fp: if ext == '.csv': convert_csv_import(self.env.cr, module, pathname, fp.read(), idref, mode, noupdate) elif ext == '.sql': convert_sql_import(self.env.cr, fp) elif ext == '.xml': convert_xml_import(self.env.cr, module, fp, idref, mode, noupdate) path_static = opj(path, 'static') IrAttachment = self.env['ir.attachment'] if os.path.isdir(path_static): for root, dirs, files in os.walk(path_static): for static_file in files: full_path = opj(root, static_file) with file_open(full_path, 'rb', env=self.env) as fp: data = base64.b64encode(fp.read()) url_path = '/{}{}'.format(module, full_path.split(path)[1].replace(os.path.sep, '/')) if not isinstance(url_path, str): url_path = url_path.decode(sys.getfilesystemencoding()) filename = os.path.split(url_path)[1] values = dict( name=filename, url=url_path, res_model='ir.ui.view', type='binary', datas=data, ) attachment = IrAttachment.sudo().search([('url', '=', url_path), ('type', '=', 'binary'), ('res_model', '=', 'ir.ui.view')]) if attachment: attachment.write(values) else: attachment = IrAttachment.create(values) self.env['ir.model.data'].create({ 'name': f"attachment_{url_path}".replace('.', '_'), 'model': 'ir.attachment', 'module': module, 'res_id': attachment.id, }) IrAsset = self.env['ir.asset'] assets_vals = [] # Generate 'ir.asset' record values for each asset delared in the manifest for bundle, commands in terp.get('assets', {}).items(): for command in commands: directive, target, path = IrAsset._process_command(command) path = path if path.startswith('/') else '/' + path # Ensures a '/' at the start assets_vals.append({ 'name': f'{module}.{bundle}.{path}', 'directive': directive, 'target': target, 'path': path, 'bundle': bundle, }) # Look for existing assets existing_assets = { asset.name: asset for asset in IrAsset.search([('name', 'in', [vals['name'] for vals in assets_vals])]) } assets_to_create = [] # Update existing assets and generate the list of new assets values for values in assets_vals: if values['name'] in existing_assets: existing_assets[values['name']].write(values) else: assets_to_create.append(values) # Create new assets and attach 'ir.model.data' records to them created_assets = IrAsset.create(assets_to_create) self.env['ir.model.data'].create([{ 'name': f"{asset['bundle']}_{asset['path']}".replace(".", "_"), 'model': 'ir.asset', 'module': module, 'res_id': asset.id, } for asset in created_assets]) return True @api.model def import_zipfile(self, module_file, force=False): if not module_file: raise Exception(_("No file sent.")) if not zipfile.is_zipfile(module_file): raise UserError(_('Only zip files are supported.')) success = [] errors = dict() module_names = [] with zipfile.ZipFile(module_file, "r") as z: for zf in z.filelist: if zf.file_size > MAX_FILE_SIZE: raise UserError(_("File '%s' exceed maximum allowed file size", zf.filename)) with file_open_temporary_directory(self.env) as module_dir: manifest_files = [ file for file in z.filelist if file.filename.count('/') == 1 and file.filename.split('/')[1] in MANIFEST_NAMES ] module_data_files = defaultdict(list) for manifest in manifest_files: manifest_path = z.extract(manifest, module_dir) mod_name = manifest.filename.split('/')[0] try: with file_open(manifest_path, 'rb', env=self.env) as f: terp = ast.literal_eval(f.read().decode()) except Exception: continue for filename in terp.get('data', []) + terp.get('init_xml', []) + terp.get('update_xml', []): if os.path.splitext(filename)[1].lower() not in ('.xml', '.csv', '.sql'): continue module_data_files[mod_name].append('%s/%s' % (mod_name, filename)) for file in z.filelist: filename = file.filename mod_name = filename.split('/')[0] is_data_file = filename in module_data_files[mod_name] is_static = filename.startswith('%s/static' % mod_name) if is_data_file or is_static: z.extract(file, module_dir) dirs = [d for d in os.listdir(module_dir) if os.path.isdir(opj(module_dir, d))] for mod_name in dirs: module_names.append(mod_name) try: # assert mod_name.startswith('theme_') path = opj(module_dir, mod_name) if self._import_module(mod_name, path, force=force): success.append(mod_name) except Exception as e: _logger.exception('Error while importing module') errors[mod_name] = exception_to_unicode(e) r = ["Successfully imported module '%s'" % mod for mod in success] for mod, error in errors.items(): r.append("Error while importing module '%s'.\n\n %s \n Make sure those modules are installed and try again." % (mod, error)) return '\n'.join(r), module_names def module_uninstall(self): # Delete an ir_module_module record completely if it was an imported # one. The rationale behind this is that an imported module *cannot* be # reinstalled anyway, as it requires the data files. Any attempt to # install it again will simply fail without trace. # /!\ modules_to_delete must be calculated before calling super().module_uninstall(), # because when uninstalling `base_import_module` the `imported` column will no longer be # in the database but we'll still have an old registry that runs this code. modules_to_delete = self.filtered('imported') res = super().module_uninstall() if modules_to_delete: deleted_modules_names = modules_to_delete.mapped('name') assets_data = self.env['ir.model.data'].search([ ('model', '=', 'ir.asset'), ('module', 'in', deleted_modules_names), ]) assets = self.env['ir.asset'].search([('id', 'in', assets_data.mapped('res_id'))]) assets.unlink() _logger.info("deleting imported modules upon uninstallation: %s", ", ".join(deleted_modules_names)) modules_to_delete.unlink() return res def _is_studio_custom(path): """ Checks the to-be-imported records to see if there are any references to studio, which would mean that the module was created using studio Returns True if any of the records contains a context with the key studio in it, False if none of the records do """ filepaths = [] for level in os.walk(path): filepaths += [os.path.join(level[0], fn) for fn in level[2]] filepaths = [fp for fp in filepaths if fp.lower().endswith('.xml')] for fp in filepaths: root = lxml.etree.parse(fp).getroot() for record in root: # there might not be a context if it's a non-studio module try: # ast.literal_eval is like eval(), but safer # context is a string representing a python dict ctx = ast.literal_eval(record.get('context')) # there are no cases in which studio is false # so just checking for its existence is enough if ctx and ctx.get('studio'): return True except Exception: continue return False
45.524476
13,020
1,260
py
PYTHON
15.0
# -*- coding: utf-8 -*- import functools from odoo import _ from odoo.exceptions import AccessError from odoo.http import Controller, route, request, Response def webservice(f): @functools.wraps(f) def wrap(*args, **kw): try: return f(*args, **kw) except Exception as e: return Response(response=str(e), status=500) return wrap class ImportModule(Controller): def check_user(self, uid=None): if uid is None: uid = request.uid is_admin = request.env['res.users'].browse(uid)._is_admin() if not is_admin: raise AccessError(_("Only administrators can upload a module")) @route( '/base_import_module/login_upload', type='http', auth='none', methods=['POST'], csrf=False, save_session=False) @webservice def login_upload(self, login, password, db=None, force='', mod_file=None, **kw): if db and db != request.db: raise Exception(_("Could not select database '%s'", db)) uid = request.session.authenticate(request.db, login, password) self.check_user(uid) force = True if force == '1' else False return request.env['ir.module.module'].import_zipfile(mod_file, force=force)[0]
34.054054
1,260
611
py
PYTHON
15.0
# -*- coding: utf-8 -*- { 'name': 'Norway - E-Invoicing (EHF 3)', 'icon': '/l10n_no/static/description/icon.png', 'version': '0.1', 'category': 'Accounting/Localizations/EDI', 'summary': 'E-Invoicing, Universal Business Language (EHF 3)', 'description': """ EHF 3 is the Norwegian implementation of EN 16931 norm. """, 'depends': ['l10n_no', 'account_edi_ubl_bis3'], 'data': [ 'data/account_edi_data.xml', 'data/ehf_3_template.xml', ], 'post_init_hook': '_post_init_hook', 'installable': True, 'auto_install': True, 'license': 'LGPL-3', }
30.55
611
10,558
py
PYTHON
15.0
# -*- coding: utf-8 -*- from odoo.addons.account_edi.tests.common import AccountEdiTestCommon from odoo.tests import tagged from freezegun import freeze_time @tagged('post_install_l10n', 'post_install', '-at_install') class TestUBL(AccountEdiTestCommon): @classmethod def setUpClass(cls, chart_template_ref='l10n_no.no_chart_template', edi_format_ref='l10n_no_edi.edi_ehf_3'): super().setUpClass(chart_template_ref=chart_template_ref, edi_format_ref=edi_format_ref) if cls.env['ir.module.module'].search( [('name', '=', 'account_edi_ubl_cii'), ('state', '=', 'installed')], limit=1, ): cls.skipTest(cls, "EHF Tests skipped because account_edi_ubl_cii is installed.") cls.company_data['company'].partner_id.write({ 'street': 'Archefstraat 42', 'zip': '1000', 'city': 'Amsterdam', 'country_id': cls.env.ref('base.no').id, 'l10n_no_bronnoysund_number': '987654325', 'vat': 'NO987654325MVA', }) cls.partner_a.write({ 'l10n_no_bronnoysund_number': '864234232', 'country_id': cls.env.ref('base.no').id, 'vat': 'NO864234232MVA', }) bank_account = cls.env['res.partner.bank'].create({ 'acc_number': '86011117947', 'partner_id': cls.partner_a.id, }) cls.tax_sale_b.write({ 'amount': 15 }) cls.invoice = cls.env['account.move'].create({ 'partner_id': cls.partner_a.id, 'move_type': 'out_invoice', 'partner_bank_id': bank_account.id, 'invoice_date_due': '2020-12-16', 'invoice_line_ids': [ (0, 0, { 'product_id': cls.product_a.id, 'quantity': 150, 'price_unit': 250, 'discount': 10, 'tax_ids': [(6, 0, cls.tax_sale_a.ids)], }), (0, 0, { 'product_id': cls.product_b.id, 'quantity': 12, 'price_unit': 100, 'tax_ids': [(6, 0, cls.tax_sale_b.ids)], }), ] }) cls.expected_invoice_values = ''' <Invoice xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:Invoice-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2"> <cbc:CustomizationID>urn:cen.eu:en16931:2017#compliant#urn:fdc:peppol.eu:2017:poacc:billing:3.0</cbc:CustomizationID> <cbc:ProfileID>urn:fdc:peppol.eu:2017:poacc:billing:01:1.0</cbc:ProfileID> <cbc:ID>INV/2020/00001</cbc:ID> <cbc:IssueDate>2020-12-16</cbc:IssueDate> <cbc:DueDate>2020-12-16</cbc:DueDate> <cbc:InvoiceTypeCode>380</cbc:InvoiceTypeCode> <cbc:DocumentCurrencyCode>NOK</cbc:DocumentCurrencyCode> <cbc:BuyerReference>partner_a</cbc:BuyerReference> <cac:AccountingSupplierParty> <cac:Party> <cbc:EndpointID schemeID="0192">987654325</cbc:EndpointID> <cac:PartyName> <cbc:Name>company_1_data</cbc:Name> </cac:PartyName> <cac:PostalAddress> <cbc:StreetName>Archefstraat 42</cbc:StreetName> <cbc:CityName>Amsterdam</cbc:CityName> <cbc:PostalZone>1000</cbc:PostalZone> <cac:Country> <cbc:IdentificationCode>NO</cbc:IdentificationCode> </cac:Country> </cac:PostalAddress> <cac:PartyTaxScheme> <cbc:CompanyID>NO987654325MVA</cbc:CompanyID> <cac:TaxScheme> <cbc:ID>VAT</cbc:ID> </cac:TaxScheme> </cac:PartyTaxScheme> <cac:PartyTaxScheme> <cbc:CompanyID>Foretaksregisteret</cbc:CompanyID> <cac:TaxScheme> <cbc:ID>TAX</cbc:ID> </cac:TaxScheme> </cac:PartyTaxScheme> <cac:PartyLegalEntity> <cbc:RegistrationName>company_1_data</cbc:RegistrationName> </cac:PartyLegalEntity> <cac:Contact> <cbc:Name>company_1_data</cbc:Name> </cac:Contact> </cac:Party> </cac:AccountingSupplierParty> <cac:AccountingCustomerParty> <cac:Party> <cbc:EndpointID schemeID="0192">864234232</cbc:EndpointID> <cac:PartyName> <cbc:Name>partner_a</cbc:Name> </cac:PartyName> <cac:PostalAddress> <cac:Country> <cbc:IdentificationCode>NO</cbc:IdentificationCode> </cac:Country> </cac:PostalAddress> <cac:PartyTaxScheme> <cbc:CompanyID>NO864234232MVA</cbc:CompanyID> <cac:TaxScheme> <cbc:ID>VAT</cbc:ID> </cac:TaxScheme> </cac:PartyTaxScheme> <cac:PartyLegalEntity> <cbc:RegistrationName>partner_a</cbc:RegistrationName> </cac:PartyLegalEntity> <cac:Contact> <cbc:Name>partner_a</cbc:Name> </cac:Contact> </cac:Party> </cac:AccountingCustomerParty> <cac:PaymentMeans> <cbc:PaymentMeansCode>31</cbc:PaymentMeansCode> <cac:PayeeFinancialAccount> <cbc:ID>86011117947</cbc:ID> </cac:PayeeFinancialAccount> </cac:PaymentMeans> <cac:TaxTotal> <cbc:TaxAmount currencyID="NOK">8617.50</cbc:TaxAmount> <cac:TaxSubtotal> <cbc:TaxableAmount currencyID="NOK">33750.00</cbc:TaxableAmount> <cbc:TaxAmount currencyID="NOK">8437.50</cbc:TaxAmount> <cac:TaxCategory> <cbc:ID>S</cbc:ID> <cbc:Percent>25.0</cbc:Percent> <cac:TaxScheme> <cbc:ID>VAT</cbc:ID> </cac:TaxScheme> </cac:TaxCategory> </cac:TaxSubtotal><cac:TaxSubtotal> <cbc:TaxableAmount currencyID="NOK">1200.00</cbc:TaxableAmount> <cbc:TaxAmount currencyID="NOK">180.00</cbc:TaxAmount> <cac:TaxCategory> <cbc:ID>S</cbc:ID> <cbc:Percent>15.0</cbc:Percent> <cac:TaxScheme> <cbc:ID>VAT</cbc:ID> </cac:TaxScheme> </cac:TaxCategory> </cac:TaxSubtotal> </cac:TaxTotal> <cac:LegalMonetaryTotal> <cbc:LineExtensionAmount currencyID="NOK">34950.00</cbc:LineExtensionAmount> <cbc:TaxExclusiveAmount currencyID="NOK">34950.00</cbc:TaxExclusiveAmount> <cbc:TaxInclusiveAmount currencyID="NOK">43567.50</cbc:TaxInclusiveAmount> <cbc:PrepaidAmount currencyID="NOK">0.00</cbc:PrepaidAmount> <cbc:PayableAmount currencyID="NOK">43567.50</cbc:PayableAmount> </cac:LegalMonetaryTotal> <cac:InvoiceLine> <cbc:ID>1</cbc:ID> <cbc:Note>Discount (10.0 %)</cbc:Note> <cbc:InvoicedQuantity unitCode="ZZ">150.0</cbc:InvoicedQuantity> <cbc:LineExtensionAmount currencyID="NOK">33750.00</cbc:LineExtensionAmount> <cac:Item> <cbc:Description>product_a</cbc:Description> <cbc:Name>product_a</cbc:Name> <cac:ClassifiedTaxCategory> <cbc:ID>S</cbc:ID> <cbc:Percent>25.0</cbc:Percent> <cac:TaxScheme> <cbc:ID>VAT</cbc:ID> </cac:TaxScheme> </cac:ClassifiedTaxCategory> </cac:Item> <cac:Price> <cbc:PriceAmount currencyID="NOK">225.00</cbc:PriceAmount> <cbc:BaseQuantity>150.0</cbc:BaseQuantity> </cac:Price> </cac:InvoiceLine> <cac:InvoiceLine> <cbc:ID>2</cbc:ID> <cbc:InvoicedQuantity unitCode="ZZ">12.0</cbc:InvoicedQuantity> <cbc:LineExtensionAmount currencyID="NOK">1200.00</cbc:LineExtensionAmount> <cac:Item> <cbc:Description>product_b</cbc:Description> <cbc:Name>product_b</cbc:Name> <cac:ClassifiedTaxCategory> <cbc:ID>S</cbc:ID> <cbc:Percent>15.0</cbc:Percent> <cac:TaxScheme> <cbc:ID>VAT</cbc:ID> </cac:TaxScheme> </cac:ClassifiedTaxCategory> </cac:Item> <cac:Price> <cbc:PriceAmount currencyID="NOK">100.00</cbc:PriceAmount> <cbc:BaseQuantity>12.0</cbc:BaseQuantity> </cac:Price> </cac:InvoiceLine> </Invoice> ''' def test_ehf_import(self): invoice = self.env['account.move'].with_context(default_move_type='in_invoice').create({}) invoice_count = self.env['account.move'].search_count([]) self.update_invoice_from_file('l10n_no_edi', 'test_xml_file', 'ehf_test.xml', invoice) self.assertEqual(self.env['account.move'].search_count([]), invoice_count) self.assertRecordValues(invoice, [{ 'partner_id': self.partner_a.id, 'amount_total': 1801.78, 'amount_tax': 365.28, }]) @freeze_time('2020-12-16') def test_ehf_export(self): self.assert_generated_file_equal(self.invoice, self.expected_invoice_values)
45.904348
10,558
5,414
py
PYTHON
15.0
# -*- coding: utf-8 -*- from odoo import models, _ from odoo.addons.account_edi_ubl_bis3.models.account_edi_format import COUNTRY_EAS class AccountEdiFormat(models.Model): _inherit = 'account.edi.format' #################################################### # Import #################################################### def _is_ubl(self, filename, tree): """ OVERRIDE so that the generic ubl parser does not parse BIS3 any longer. """ is_ubl = super()._is_ubl(filename, tree) return is_ubl and not self._is_ehf_3(filename, tree) def _is_ehf_3(self, filename, tree): ns = self._get_bis3_namespaces() return tree.tag == '{urn:oasis:names:specification:ubl:schema:xsd:Invoice-2}Invoice' \ and 'peppol' in tree.findtext('./cbc:ProfileID', '', namespaces=ns) \ and tree.xpath( "./cac:AccountingSupplierParty/cac:Party/cac:PartyTaxScheme/cbc:CompanyID[text()='Foretaksregisteret']", namespaces=ns) is not None def _bis3_get_extra_partner_domains(self, tree): if self.code == 'ehf_3': ns = self._get_bis3_namespaces() bronnoysund = tree.xpath('./cac:AccountingSupplierParty/cac:Party/cbc:EndpointID[@schemeID="0192"]/text()', namespaces=ns) if bronnoysund: return [('l10n_no_bronnoysund_number', '=', bronnoysund[0])] return super()._bis3_get_extra_partner_domains(tree) #################################################### # Export #################################################### def _get_ehf_3_values(self, invoice): values = super()._get_bis3_values(invoice) for partner_vals in (values['customer_vals'], values['supplier_vals']): partner = partner_vals['partner'] if partner.country_code == 'NO': partner_vals.update( bis3_endpoint=partner.l10n_no_bronnoysund_number, bis3_endpoint_scheme='0192', ) return values def _export_ehf_3(self, invoice): self.ensure_one() # Create file content. xml_content = self.env.ref('l10n_no_edi.export_ehf_3_invoice')._render(self._get_ehf_3_values(invoice)) vat = invoice.company_id.partner_id.commercial_partner_id.vat xml_name = 'ehf-%s%s%s.xml' % (vat or '', '-' if vat else '', invoice.name.replace('/', '_')) return self.env['ir.attachment'].create({ 'name': xml_name, 'raw': xml_content.encode(), 'res_model': 'account.move', 'res_id': invoice.id, 'mimetype': 'application/xml' }) #################################################### # Account.edi.format override #################################################### def _check_move_configuration(self, invoice): errors = super()._check_move_configuration(invoice) if self.code != 'ehf_3' or self._is_account_edi_ubl_cii_available(): return errors supplier = invoice.company_id.partner_id.commercial_partner_id if supplier.country_code == 'NO' and not supplier.l10n_no_bronnoysund_number: errors.append(_("The supplier %r must have a Bronnoysund company registry.", supplier.display_name)) if supplier.country_code != 'NO' and supplier.country_code not in COUNTRY_EAS: errors.append(_("The supplier %r is from a country that is not supported for EHF (Bis3)", supplier.display_name)) customer = invoice.commercial_partner_id if customer.country_code == 'NO' and not customer.l10n_no_bronnoysund_number: errors.append(_("The customer %r must have a Bronnoysund company registry.", customer.display_name)) if customer.country_code != 'NO' and customer.country_code not in COUNTRY_EAS: errors.append(_("The customer %r is from a country that is not supported for EHF (Bis3)", customer.display_name)) return errors def _is_compatible_with_journal(self, journal): self.ensure_one() if self.code != 'ehf_3' or self._is_account_edi_ubl_cii_available(): return super()._is_compatible_with_journal(journal) return journal.type == 'sale' and journal.country_code == 'NO' def _post_invoice_edi(self, invoices): self.ensure_one() if self.code != 'ehf_3' or self._is_account_edi_ubl_cii_available(): return super()._post_invoice_edi(invoices) invoice = invoices # no batch ensure that there is only one invoice attachment = self._export_ehf_3(invoice) return {invoice: {'attachment': attachment}} def _create_invoice_from_xml_tree(self, filename, tree, journal=None): self.ensure_one() if self.code == 'ehf_3' and self._is_ehf_3(filename, tree) and not self._is_account_edi_ubl_cii_available(): return self._decode_bis3(tree, self.env['account.move']) return super()._create_invoice_from_xml_tree(filename, tree, journal=journal) def _update_invoice_from_xml_tree(self, filename, tree, invoice): self.ensure_one() if self.code == 'ehf_3' and self._is_ehf_3(filename, tree) and not self._is_account_edi_ubl_cii_available(): return self._decode_bis3(tree, invoice) return super()._update_invoice_from_xml_tree(filename, tree, invoice)
46.672414
5,414
720
py
PYTHON
15.0
{ 'name' : 'IM Bus', 'version': '1.0', 'category': 'Hidden', 'complexity': 'easy', 'description': "Instant Messaging Bus allow you to send messages to users, in live.", 'depends': ['base', 'web'], 'data': [ 'security/ir.model.access.csv', ], 'installable': True, 'assets': { 'web.assets_backend': [ 'bus/static/src/**/*', ], 'web.assets_frontend': [ 'bus/static/src/js/longpolling_bus.js', 'bus/static/src/js/crosstab_bus.js', 'bus/static/src/js/services/bus_service.js', ], 'web.qunit_suite_tests': [ 'bus/static/tests/*.js', ], }, 'license': 'LGPL-3', }
27.692308
720
1,919
py
PYTHON
15.0
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import odoo.tests from odoo.osv import expression @odoo.tests.tagged('post_install', '-at_install', 'assets_bundle') class BusWebTests(odoo.tests.HttpCase): def test_bundle_sends_bus(self): """ Tests two things: - Messages are posted to the bus when assets change i.e. their hash has been recomputed and differ from the attachment's - The interface deals with those bus messages by displaying one notification """ db_name = self.env.registry.db_name bundle_xml_ids = ('web.assets_common', 'web.assets_backend') domain = [] for bundle in bundle_xml_ids: domain = expression.OR([ domain, [('name', 'ilike', bundle + '%')] ]) # start from a clean slate self.env['ir.attachment'].search(domain).unlink() self.env.registry._clear_cache() sendones = [] def patched_sendone(self, channel, notificationType, message): """ Control API and number of messages posted to the bus linked to bundle_changed events """ if notificationType == 'bundle_changed': sendones.append((channel, message)) self.patch(type(self.env['bus.bus']), '_sendone', patched_sendone) self.authenticate('admin', 'admin') self.url_open('/web') # One sendone for each asset bundle and for each CSS / JS self.assertEqual( len(sendones), 4, 'Received %s' % '\n'.join('%s - %s' % (tmp[0], tmp[1]) for tmp in sendones) ) for (channel, message) in sendones: self.assertEqual(channel, 'broadcast') self.assertEqual(len(message), 1) self.assertTrue(isinstance(message.get('server_version'), str))
37.627451
1,919
423
py
PYTHON
15.0
# Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo.tests import HttpCase class TestBusController(HttpCase): def test_health(self): response = self.url_open('/longpolling/health') self.assertEqual(response.status_code, 200) payload = response.json() self.assertEqual(payload['status'], 'pass') self.assertNotIn('session_id', response.cookies)
35.25
423
3,310
py
PYTHON
15.0
# -*- coding: utf-8 -*- import datetime import time from psycopg2 import OperationalError from odoo import api, fields, models from odoo import tools from odoo.addons.bus.models.bus import TIMEOUT from odoo.service.model import PG_CONCURRENCY_ERRORS_TO_RETRY from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT DISCONNECTION_TIMER = TIMEOUT + 5 AWAY_TIMER = 1800 # 30 minutes class BusPresence(models.Model): """ User Presence Its status is 'online', 'away' or 'offline'. This model should be a one2one, but is not attached to res_users to avoid database concurrence errors. Since the 'update' method is executed at each poll, if the user have multiple opened tabs, concurrence errors can happend, but are 'muted-logged'. """ _name = 'bus.presence' _description = 'User Presence' _log_access = False user_id = fields.Many2one('res.users', 'Users', ondelete='cascade') last_poll = fields.Datetime('Last Poll', default=lambda self: fields.Datetime.now()) last_presence = fields.Datetime('Last Presence', default=lambda self: fields.Datetime.now()) status = fields.Selection([('online', 'Online'), ('away', 'Away'), ('offline', 'Offline')], 'IM Status', default='offline') def init(self): self.env.cr.execute("CREATE UNIQUE INDEX IF NOT EXISTS bus_presence_user_unique ON %s (user_id) WHERE user_id IS NOT NULL" % self._table) @api.model def update(self, inactivity_period, identity_field, identity_value): """ Updates the last_poll and last_presence of the current user :param inactivity_period: duration in milliseconds """ # This method is called in method _poll() and cursor is closed right # after; see bus/controllers/main.py. try: # Hide transaction serialization errors, which can be ignored, the presence update is not essential # The errors are supposed from presence.write(...) call only with tools.mute_logger('odoo.sql_db'): self._update(inactivity_period=inactivity_period, identity_field=identity_field, identity_value=identity_value) # commit on success self.env.cr.commit() except OperationalError as e: if e.pgcode in PG_CONCURRENCY_ERRORS_TO_RETRY: # ignore concurrency error return self.env.cr.rollback() raise @api.model def _update(self, inactivity_period, identity_field, identity_value): presence = self.search([(identity_field, '=', identity_value)], limit=1) # compute last_presence timestamp last_presence = datetime.datetime.now() - datetime.timedelta(milliseconds=inactivity_period) values = { 'last_poll': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT), } # update the presence or a create a new one if not presence: # create a new presence for the user values[identity_field] = identity_value values['last_presence'] = last_presence self.create(values) else: # update the last_presence if necessary, and write values if presence.last_presence < last_presence: values['last_presence'] = last_presence presence.write(values)
45.972222
3,310
8,411
py
PYTHON
15.0
# -*- coding: utf-8 -*- import datetime import json import logging import random import select import threading import time from psycopg2 import InterfaceError import odoo import odoo.service.server as servermod from odoo import api, fields, models, SUPERUSER_ID from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT from odoo.tools import date_utils _logger = logging.getLogger(__name__) # longpolling timeout connection TIMEOUT = 50 #---------------------------------------------------------- # Bus #---------------------------------------------------------- def json_dump(v): return json.dumps(v, separators=(',', ':'), default=date_utils.json_default) def hashable(key): if isinstance(key, list): key = tuple(key) return key def channel_with_db(dbname, channel): if isinstance(channel, models.Model): return (dbname, channel._name, channel.id) if isinstance(channel, str): return (dbname, channel) return channel class ImBus(models.Model): _name = 'bus.bus' _description = 'Communication Bus' channel = fields.Char('Channel') message = fields.Char('Message') @api.autovacuum def _gc_messages(self): timeout_ago = datetime.datetime.utcnow()-datetime.timedelta(seconds=TIMEOUT*2) domain = [('create_date', '<', timeout_ago.strftime(DEFAULT_SERVER_DATETIME_FORMAT))] return self.sudo().search(domain).unlink() @api.model def _sendmany(self, notifications): channels = set() values = [] for target, notification_type, message in notifications: channel = channel_with_db(self.env.cr.dbname, target) channels.add(channel) values.append({ 'channel': json_dump(channel), 'message': json_dump({ 'type': notification_type, 'payload': message, }) }) self.sudo().create(values) if channels: # We have to wait until the notifications are commited in database. # When calling `NOTIFY imbus`, some concurrent threads will be # awakened and will fetch the notification in the bus table. If the # transaction is not commited yet, there will be nothing to fetch, # and the longpolling will return no notification. @self.env.cr.postcommit.add def notify(): with odoo.sql_db.db_connect('postgres').cursor() as cr: cr.execute("notify imbus, %s", (json_dump(list(channels)),)) @api.model def _sendone(self, channel, notification_type, message): self._sendmany([[channel, notification_type, message]]) @api.model def _poll(self, channels, last=0, options=None): # first poll return the notification in the 'buffer' if last == 0: timeout_ago = datetime.datetime.utcnow()-datetime.timedelta(seconds=TIMEOUT) domain = [('create_date', '>', timeout_ago.strftime(DEFAULT_SERVER_DATETIME_FORMAT))] else: # else returns the unread notifications domain = [('id', '>', last)] channels = [json_dump(channel_with_db(self.env.cr.dbname, c)) for c in channels] domain.append(('channel', 'in', channels)) notifications = self.sudo().search_read(domain) # list of notification to return result = [] for notif in notifications: result.append({ 'id': notif['id'], 'message': json.loads(notif['message']), }) return result #---------------------------------------------------------- # Dispatcher #---------------------------------------------------------- class ImDispatch: def __init__(self): self.channels = {} self.started = False self.Event = None def poll(self, dbname, channels, last, options=None, timeout=None): channels = [channel_with_db(dbname, channel) for channel in channels] if timeout is None: timeout = TIMEOUT if options is None: options = {} # Dont hang ctrl-c for a poll request, we need to bypass private # attribute access because we dont know before starting the thread that # it will handle a longpolling request if not odoo.evented: current = threading.current_thread() current._daemonic = True # rename the thread to avoid tests waiting for a longpolling current.name = f"openerp.longpolling.request.{current.ident}" registry = odoo.registry(dbname) # immediatly returns if past notifications exist with registry.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) notifications = env['bus.bus']._poll(channels, last, options) # immediatly returns in peek mode if options.get('peek'): return dict(notifications=notifications, channels=channels) # or wait for future ones if not notifications: if not self.started: # Lazy start of events listener self.start() event = self.Event() for channel in channels: self.channels.setdefault(hashable(channel), set()).add(event) try: event.wait(timeout=timeout) with registry.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) notifications = env['bus.bus']._poll(channels, last, options) except Exception: # timeout pass finally: # gc pointers to event for channel in channels: channel_events = self.channels.get(hashable(channel)) if channel_events and event in channel_events: channel_events.remove(event) return notifications def loop(self): """ Dispatch postgres notifications to the relevant polling threads/greenlets """ _logger.info("Bus.loop listen imbus on db postgres") with odoo.sql_db.db_connect('postgres').cursor() as cr: conn = cr._cnx cr.execute("listen imbus") cr.commit(); while not stop_event.is_set(): if select.select([conn], [], [], TIMEOUT) == ([], [], []): pass else: conn.poll() channels = [] while conn.notifies: channels.extend(json.loads(conn.notifies.pop().payload)) # dispatch to local threads/greenlets events = set() for channel in channels: events.update(self.channels.pop(hashable(channel), set())) for event in events: event.set() def wakeup_workers(self): """ Wake up all http workers that are waiting for an event, useful on server shutdown when they can't reveive anymore messages. """ for events in self.channels.values(): for event in events: event.set() def run(self): while not stop_event.is_set(): try: self.loop() except Exception as exc: if isinstance(exc, InterfaceError) and stop_event.is_set(): continue _logger.exception("Bus.loop error, sleep and retry") time.sleep(TIMEOUT) def start(self): if odoo.evented: # gevent mode import gevent.event # pylint: disable=import-outside-toplevel self.Event = gevent.event.Event gevent.spawn(self.run) else: # threaded mode self.Event = threading.Event threading.Thread(name=f"{__name__}.Bus", target=self.run, daemon=True).start() self.started = True return self # Partially undo a2ed3d3d5bdb6025a1ba14ad557a115a86413e65 # IMDispatch has a lazy start, so we could initialize it anyway # And this avoids the Bus unavailable error messages dispatch = ImDispatch() stop_event = threading.Event() if servermod.server: servermod.server.on_stop(stop_event.set) servermod.server.on_stop(dispatch.wakeup_workers)
36.729258
8,411