size
int64 0
304k
| ext
stringclasses 1
value | lang
stringclasses 1
value | branch
stringclasses 1
value | content
stringlengths 0
304k
| avg_line_length
float64 0
238
| max_line_length
int64 0
304k
|
---|---|---|---|---|---|---|
245 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
{
'name': "test read_group",
'description': "Tests for read_group",
'category': 'Hidden/Tests',
'version': '0.1',
'depends': ['base'],
'data': ['ir.model.access.csv'],
'license': 'LGPL-3',
}
| 20.416667 | 245 |
7,222 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo.tests import common
class TestGroupOnSelection(common.TransactionCase):
def setUp(self):
super(TestGroupOnSelection, self).setUp()
self.Model = self.env['test_read_group.on_selection']
def test_none(self):
self.Model.create({'value': 1})
self.Model.create({'value': 2})
self.Model.create({'value': 3})
groups = self.Model.read_group([], fields=['state', 'value'], groupby=['state'])
self.assertEqual(groups, [
{
'state': 'a',
'state_count': 0,
'value': False,
'__domain': [('state', '=', 'a')],
},
{
'state': 'b',
'state_count': 0,
'value': False,
'__domain': [('state', '=', 'b')],
},
{
'state': False,
'state_count': 3,
'value': 6,
'__domain': [('state', '=', False)],
},
])
def test_partial(self):
self.Model.create({'state': 'a', 'value': 1})
self.Model.create({'state': 'a', 'value': 2})
self.Model.create({'value': 3})
groups = self.Model.read_group([], fields=['state', 'value'], groupby=['state'])
self.assertEqual(groups, [
{
'state': 'a',
'state_count': 2,
'value': 3,
'__domain': [('state', '=', 'a')],
},
{
'state': 'b',
'state_count': 0,
'value': False,
'__domain': [('state', '=', 'b')],
},
{
'state': False,
'state_count': 1,
'value': 3,
'__domain': [('state', '=', False)],
},
])
def test_full(self):
self.Model.create({'state': 'a', 'value': 1})
self.Model.create({'state': 'b', 'value': 2})
self.Model.create({'value': 3})
groups = self.Model.read_group([], fields=['state', 'value'], groupby=['state'])
self.assertEqual(groups, [
{
'state': 'a',
'state_count': 1,
'value': 1,
'__domain': [('state', '=', 'a')],
},
{
'state': 'b',
'state_count': 1,
'value': 2,
'__domain': [('state', '=', 'b')],
},
{
'state': False,
'state_count': 1,
'value': 3,
'__domain': [('state', '=', False)],
},
])
@common.tagged("test_read_group_selection")
class TestSelectionReadGroup(common.TransactionCase):
def setUp(self):
super().setUp()
self.Model = self.env['test_read_group.on_selection']
def test_static_group_expand(self):
# this test verifies that the following happens when grouping by a Selection field with
# group_expand=True:
# - the order of the returned groups is the same as the order in which the
# options are declared in the field definition.
# - the groups returned include the empty groups, i.e. all groups, even those
# that have no records assigned to them, this is a (wanted) side-effect of the
# implementation.
# - the false group, i.e. records without the Selection field set, is last.
self.Model.create([
{"value": 1, "static_expand": "a"},
{"value": 2, "static_expand": "c"},
{"value": 3},
])
groups = self.Model.read_group(
[],
fields=["static_expand", "value"],
groupby=["static_expand"],
)
self.assertEqual(groups, [
{
'static_expand': 'c',
'static_expand_count': 1,
'value': 2,
'__domain': [('static_expand', '=', 'c')],
},
{
'static_expand': 'b',
'static_expand_count': 0,
'value': 0,
'__domain': [('static_expand', '=', 'b')],
},
{
'static_expand': 'a',
'static_expand_count': 1,
'value': 1,
'__domain': [('static_expand', '=', 'a')],
},
{
'static_expand': False,
'static_expand_count': 1,
'value': 3,
'__domain': [('static_expand', '=', False)],
},
])
def test_dynamic_group_expand(self):
# this test tests the same as the above test but with a Selection field whose
# options are dynamic, this means that the result of read_group when grouping by this
# field can change from one call to another.
self.Model.create([
{"value": 1, "dynamic_expand": "a"},
{"value": 2, "dynamic_expand": "c"},
{"value": 3},
])
groups = self.Model.read_group(
[],
fields=["dynamic_expand", "value"],
groupby=["dynamic_expand"],
)
self.assertEqual(groups, [
{
'dynamic_expand': 'c',
'dynamic_expand_count': 1,
'value': 2,
'__domain': [('dynamic_expand', '=', 'c')],
},
{
'dynamic_expand': 'b',
'dynamic_expand_count': 0,
'value': 0,
'__domain': [('dynamic_expand', '=', 'b')],
},
{
'dynamic_expand': 'a',
'dynamic_expand_count': 1,
'value': 1,
'__domain': [('dynamic_expand', '=', 'a')],
},
{
'dynamic_expand': False,
'dynamic_expand_count': 1,
'value': 3,
'__domain': [('dynamic_expand', '=', False)],
},
])
def test_no_group_expand(self):
# if group_expand is not defined on a Selection field, it should return only the necessary
# groups and in alphabetical order (PostgreSQL ordering)
self.Model.create([
{"value": 1, "no_expand": "a"},
{"value": 2, "no_expand": "c"},
{"value": 3},
])
groups = self.Model.read_group(
[],
fields=["no_expand", "value"],
groupby=["no_expand"],
)
self.assertEqual(groups, [
{
'no_expand': 'a',
'no_expand_count': 1,
'value': 1,
'__domain': [('no_expand', '=', 'a')],
},
{
'no_expand': 'c',
'no_expand_count': 1,
'value': 2,
'__domain': [('no_expand', '=', 'c')],
},
{
'no_expand': False,
'no_expand_count': 1,
'value': 3,
'__domain': [('no_expand', '=', False)],
},
])
| 32.38565 | 7,222 |
1,592 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo.tests import common
from odoo import Command
class TestAutoJoin(common.TransactionCase):
""" Test what happens when grouping with a domain using a one2many field with auto_join. """
def test_auto_join(self):
model = self.env['test_read_group.order']
records = model.create([{
'line_ids': [Command.create({'value': 1}), Command.create({'value': 2})],
}, {
'line_ids': [Command.create({'value': 1})],
}])
domain1 = [('id', 'in', records.ids), ('line_ids.value', '=', 1)]
domain2 = [('id', 'in', records.ids), ('line_ids.value', '>', 0)]
# reference results
self.assertEqual(len(model.search(domain1)), 2)
self.assertEqual(len(model.search(domain2)), 2)
result1 = model.read_group(domain1, [], [])
self.assertEqual(len(result1), 1)
self.assertEqual(result1[0]['__count'], 2)
result2 = model.read_group(domain2, [], [])
self.assertEqual(len(result2), 1)
self.assertEqual(result2[0]['__count'], 2)
# same requests, with auto_join
self.patch(type(model).line_ids, 'auto_join', True)
self.assertEqual(len(model.search(domain1)), 2)
self.assertEqual(len(model.search(domain2)), 2)
result1 = model.read_group(domain1, [], [])
self.assertEqual(len(result1), 1)
self.assertEqual(result1[0]['__count'], 2)
result2 = model.read_group(domain2, [], [])
self.assertEqual(len(result2), 1)
self.assertEqual(result2[0]['__count'], 2)
| 36.181818 | 1,592 |
8,958 |
py
|
PYTHON
|
15.0
|
""" Test read_group grouping with many2many fields """
from odoo.fields import Command
from odoo.tests import common
@common.tagged('test_m2m_read_group')
class TestM2MGrouping(common.TransactionCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.users = cls.env['test_read_group.user'].create([
{'name': 'Mario'},
{'name': 'Luigi'},
])
cls.tasks = cls.env['test_read_group.task'].create([
{ # both users
'name': "Super Mario Bros.",
'user_ids': [Command.set(cls.users.ids)],
},
{ # mario only
'name': "Paper Mario",
'user_ids': [Command.set(cls.users[0].ids)],
},
{ # luigi only
'name': "Luigi's Mansion",
'user_ids': [Command.set(cls.users[1].ids)],
},
{ # no user
'name': 'Donkey Kong',
'user_ids': [Command.set([])],
},
])
def test_base_users(self):
# group users
user_by_tasks = self.users.read_group(
domain=[],
fields=['name:array_agg'],
groupby=['task_ids'],
)
self.assertEqual(user_by_tasks, [
{ # first task: both users
'task_ids': (self.tasks[0].id, "Super Mario Bros."),
'task_ids_count': 2,
'name': ['Mario', 'Luigi'],
'__domain': [('task_ids', '=', self.tasks[0].id)],
},
{ # second task: Mario only
'task_ids': (self.tasks[1].id, "Paper Mario"),
'task_ids_count': 1,
'name': ['Mario'],
'__domain': [('task_ids', '=', self.tasks[1].id)],
},
{ # third task: Luigi only
'task_ids': (self.tasks[2].id, "Luigi's Mansion"),
'task_ids_count': 1,
'name': ['Luigi'],
'__domain': [('task_ids', '=', self.tasks[2].id)],
},
])
def test_base_tasks(self):
# consider the simplest case first: one task with two users
task_by_users = self.tasks.read_group(
domain=[('id', '=', self.tasks[0].id)],
fields=['name:array_agg'],
groupby=['user_ids'],
)
self.assertEqual(task_by_users, [
{ # task of Mario
'user_ids': (self.users[0].id, "Mario"),
'user_ids_count': 1,
'name': ["Super Mario Bros."],
'__domain': ['&', ('user_ids', '=', self.users[0].id), ('id', '=', self.tasks[0].id)],
},
{ # task of Luigi
'user_ids': (self.users[1].id, "Luigi"),
'user_ids_count': 1,
'name': ["Super Mario Bros."],
'__domain': ['&', ('user_ids', '=', self.users[1].id), ('id', '=', self.tasks[0].id)],
},
])
# now consider the full case: all tasks, with all user combinations
task_by_users = self.tasks.read_group(
domain=[],
fields=['name:array_agg'],
groupby=['user_ids'],
)
self.assertEqual(task_by_users, [
{ # tasks of Mario
'user_ids': (self.users[0].id, "Mario"),
'user_ids_count': 2,
'name': unordered(["Super Mario Bros.", "Paper Mario"]),
'__domain': [('user_ids', '=', self.users[0].id)],
},
{ # tasks of Luigi
'user_ids': (self.users[1].id, "Luigi"),
'user_ids_count': 2,
'name': unordered(["Super Mario Bros.", "Luigi's Mansion"]),
'__domain': [('user_ids', '=', self.users[1].id)],
},
{ # tasks of nobody
'user_ids': False,
'user_ids_count': 1,
'name': unordered(["Donkey Kong"]),
'__domain': [('user_ids', '=', False)],
},
])
# check that the domain returned by read_group is valid
tasks_from_domain = self.tasks.search(task_by_users[0]['__domain'])
self.assertEqual(tasks_from_domain, self.tasks[:2])
tasks_from_domain = self.tasks.search(task_by_users[1]['__domain'])
self.assertEqual(tasks_from_domain, self.tasks[0] + self.tasks[2])
tasks_from_domain = self.tasks.search(task_by_users[2]['__domain'])
self.assertEqual(tasks_from_domain, self.tasks[3])
def test_complex_case(self):
# group tasks with some ir.rule on users
users_model = self.env['ir.model']._get(self.users._name)
self.env['ir.rule'].create({
'name': "Only The Lone Wanderer allowed",
'model_id': users_model.id,
'domain_force': [('id', '=', self.users[0].id)],
})
# as superuser, ir.rule should not apply
expected = """
SELECT
min("test_read_group_task".id) AS id,
count("test_read_group_task".id) AS "user_ids_count",
array_agg("test_read_group_task"."name") AS "name",
"test_read_group_task__user_ids"."user_id" AS "user_ids"
FROM "test_read_group_task"
LEFT JOIN "test_read_group_task_user_rel" AS "test_read_group_task__user_ids"
ON ("test_read_group_task"."id" = "test_read_group_task__user_ids"."task_id")
GROUP BY "test_read_group_task__user_ids"."user_id"
ORDER BY "user_ids"
"""
with self.assertQueries([expected]):
as_admin = self.tasks.read_group(
domain=[],
fields=['name:array_agg'],
groupby=['user_ids'],
)
self.assertEqual(as_admin, [
{ # tasks of Mario
'user_ids': (self.users[0].id, "Mario"),
'user_ids_count': 2,
'name': unordered(["Super Mario Bros.", "Paper Mario"]),
'__domain': [('user_ids', '=', self.users[0].id)],
},
{ # tasks of Luigi
'user_ids': (self.users[1].id, "Luigi"),
'user_ids_count': 2,
'name': unordered(["Super Mario Bros.", "Luigi's Mansion"]),
'__domain': [('user_ids', '=', self.users[1].id)],
},
{ # tasks of nobody
'user_ids': False,
'user_ids_count': 1,
'name': unordered(["Donkey Kong"]),
'__domain': [('user_ids', '=', False)],
},
])
# as demo user, ir.rule should apply
tasks = self.tasks.with_user(self.browse_ref('base.user_demo'))
# warming up various caches; this avoids extra queries
tasks.read_group(domain=[], fields=['name:array_agg'], groupby=['user_ids'])
expected = """
SELECT
min("test_read_group_task".id) AS id,
count("test_read_group_task".id) AS "user_ids_count",
array_agg("test_read_group_task"."name") AS "name",
"test_read_group_task__user_ids"."user_id" AS "user_ids"
FROM "test_read_group_task"
LEFT JOIN "test_read_group_task_user_rel" AS "test_read_group_task__user_ids"
ON (
"test_read_group_task"."id" = "test_read_group_task__user_ids"."task_id"
AND "test_read_group_task__user_ids"."user_id" IN (
SELECT "test_read_group_user".id
FROM "test_read_group_user"
WHERE ("test_read_group_user"."id" = %s)
)
)
GROUP BY "test_read_group_task__user_ids"."user_id"
ORDER BY "user_ids"
"""
with self.assertQueries([expected]):
as_demo = tasks.read_group(
domain=[],
fields=['name:array_agg'],
groupby=['user_ids'],
)
self.assertEqual(as_demo, [
{ # tasks of Mario
'user_ids': (self.users[0].id, "Mario"),
'user_ids_count': 2,
'name': unordered(['Super Mario Bros.', 'Paper Mario']),
'__domain': [('user_ids', '=', self.users[0].id)],
},
{ # tasks of Luigi and no user
'user_ids': False,
'user_ids_count': 2,
'name': unordered(["Luigi's Mansion", 'Donkey Kong']),
'__domain': [('user_ids', '=', False)],
},
])
class unordered(list):
""" A list where equality is interpreted without ordering. """
__slots__ = ()
def __eq__(self, other):
return sorted(self) == sorted(other)
def __ne__(self, other):
return sorted(self) != sorted(other)
| 39.289474 | 8,958 |
8,866 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo.tests import common
class TestGroupBooleans(common.TransactionCase):
def setUp(self):
super(TestGroupBooleans, self).setUp()
self.Model = self.env['test_read_group.aggregate.boolean']
def test_no_value(self):
groups = self.Model.read_group(
domain=[],
fields=['key', 'bool_and', 'bool_or', 'bool_array'],
groupby=['key'],
)
self.assertEqual([], groups)
def test_agg_and(self):
# and(true, true)
self.Model.create({
'key': 1,
'bool_and': True
})
self.Model.create({
'key': 1,
'bool_and': True
})
# and(true, false)
self.Model.create({'key': 2, 'bool_and': True})
self.Model.create({'key': 2, 'bool_and': False})
# and(false, false)
self.Model.create({'key': 3, 'bool_and': False})
self.Model.create({'key': 3, 'bool_and': False})
groups = self.Model.read_group(
domain=[],
fields=['key', 'bool_and'],
groupby=['key'],
)
self.assertEqual([
{
'key_count': 2,
'__domain': [('key', '=', 1)],
'key': 1,
'bool_and': True
},
{
'key_count': 2,
'__domain': [('key', '=', 2)],
'key': 2,
'bool_and': False
},
{
'key_count': 2,
'__domain': [('key', '=', 3)],
'key': 3,
'bool_and': False
},
], groups)
def test_agg_or(self):
# or(true, true)
self.Model.create({
'key': 1,
'bool_or': True
})
self.Model.create({
'key': 1,
'bool_or': True
})
# or(true, false)
self.Model.create({'key': 2, 'bool_or': True})
self.Model.create({'key': 2, 'bool_or': False})
# or(false, false)
self.Model.create({'key': 3, 'bool_or': False})
self.Model.create({'key': 3, 'bool_or': False})
groups = self.Model.read_group(
domain=[],
fields=['key', 'bool_or'],
groupby=['key'],
)
self.assertEqual([
{
'key_count': 2,
'__domain': [('key', '=', 1)],
'key': 1,
'bool_or': True
},
{
'key_count': 2,
'__domain': [('key', '=', 2)],
'key': 2,
'bool_or': True
},
{
'key_count': 2,
'__domain': [('key', '=', 3)],
'key': 3,
'bool_or': False
},
], groups)
def test_agg_array(self):
# array(true, true)
self.Model.create({
'key': 1,
'bool_array': True
})
self.Model.create({
'key': 1,
'bool_array': True
})
# array(true, false)
self.Model.create({'key': 2, 'bool_array': True})
self.Model.create({'key': 2, 'bool_array': False})
# array(false, false)
self.Model.create({'key': 3, 'bool_array': False})
self.Model.create({'key': 3, 'bool_array': False})
groups = self.Model.read_group(
domain=[],
fields=['key', 'bool_array'],
groupby=['key'],
)
self.assertEqual([
{
'key_count': 2,
'__domain': [('key', '=', 1)],
'key': 1,
'bool_array': [True, True]
},
{
'key_count': 2,
'__domain': [('key', '=', 2)],
'key': 2,
'bool_array': [True, False]
},
{
'key_count': 2,
'__domain': [('key', '=', 3)],
'key': 3,
'bool_array': [False, False]
},
], groups)
def test_group_by_aggregable(self):
self.Model.create({'bool_and': False, 'key': 1, 'bool_array': True})
self.Model.create({'bool_and': False, 'key': 2, 'bool_array': True})
self.Model.create({'bool_and': False, 'key': 2, 'bool_array': True})
self.Model.create({'bool_and': True, 'key': 2, 'bool_array': True})
self.Model.create({'bool_and': True, 'key': 3, 'bool_array': True})
self.Model.create({'bool_and': True, 'key': 3, 'bool_array': True})
groups = self.Model.read_group(
domain=[],
fields=['key', 'bool_and', 'bool_array'],
groupby=['bool_and', 'key'],
lazy=False
)
self.assertEqual([
{
'bool_and': False,
'key': 1,
'bool_array': [True],
'__count': 1,
'__domain': ['&', ('bool_and', '=', False), ('key', '=', 1)]
},
{
'bool_and': False,
'key': 2,
'bool_array': [True, True],
'__count': 2,
'__domain': ['&', ('bool_and', '=', False), ('key', '=', 2)]
},
{
'bool_and': True,
'key': 2,
'bool_array': [True],
'__count': 1,
'__domain': ['&', ('bool_and', '=', True), ('key', '=', 2)]
},
{
'bool_and': True,
'key': 3,
'bool_array': [True, True],
'__count': 2,
'__domain': ['&', ('bool_and', '=', True), ('key', '=', 3)]
}
], groups)
class TestAggregate(common.TransactionCase):
def setUp(self):
super(TestAggregate, self).setUp()
self.foo = self.env['res.partner'].create({'name': 'Foo'})
self.bar = self.env['res.partner'].create({'name': 'Bar'})
self.Model = self.env['test_read_group.aggregate']
self.Model.create({'key': 1, 'value': 1, 'partner_id': False})
self.Model.create({'key': 1, 'value': 2, 'partner_id': self.foo.id})
self.Model.create({'key': 1, 'value': 3, 'partner_id': self.foo.id})
self.Model.create({'key': 1, 'value': 4, 'partner_id': self.bar.id})
def test_agg_default(self):
""" test default aggregation on fields """
fields = ['key', 'value', 'partner_id']
groups = self.Model.read_group([], fields, ['key'])
self.assertEqual(groups, [{
'key': 1,
'value': 10,
'key_count': 4,
'__domain': [('key', '=', 1)],
}])
def test_agg_explicit(self):
""" test explicit aggregation on fields """
fields = ['key', 'value:max', 'partner_id']
groups = self.Model.read_group([], fields, ['key'])
self.assertEqual(groups, [{
'key': 1,
'value': 4,
'key_count': 4,
'__domain': [('key', '=', 1)],
}])
fields = ['key', 'value', 'partner_id:array_agg']
groups = self.Model.read_group([], fields, ['key'])
self.assertEqual(groups, [{
'key': 1,
'value': 10,
'partner_id': [None, self.foo.id, self.foo.id, self.bar.id],
'key_count': 4,
'__domain': [('key', '=', 1)],
}])
fields = ['key', 'value', 'partner_id:count']
groups = self.Model.read_group([], fields, ['key'])
self.assertEqual(groups, [{
'key': 1,
'value': 10,
'partner_id': 3,
'key_count': 4,
'__domain': [('key', '=', 1)],
}])
fields = ['key', 'value', 'partner_id:count_distinct']
groups = self.Model.read_group([], fields, ['key'])
self.assertEqual(groups, [{
'key': 1,
'value': 10,
'partner_id': 2,
'key_count': 4,
'__domain': [('key', '=', 1)],
}])
def test_agg_multi(self):
""" test multiple aggregation on fields """
fields = ['key', 'value_min:min(value)', 'value_max:max(value)', 'partner_id']
groups = self.Model.read_group([], fields, ['key'])
self.assertEqual(groups, [{
'key': 1,
'value_min': 1,
'value_max': 4,
'key_count': 4,
'__domain': [('key', '=', 1)],
}])
fields = ['key', 'ids:array_agg(id)']
groups = self.Model.read_group([], fields, ['key'])
self.assertEqual(groups, [{
'key': 1,
'ids': self.Model.search([]).ids,
'key_count': 4,
'__domain': [('key', '=', 1)],
}])
| 31.439716 | 8,866 |
2,095 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo.tests import common
class TestEmptyDate(common.TransactionCase):
""" Test what happens when grouping on date fields and getting a "false"
grouping value
"""
def setUp(self):
super(TestEmptyDate, self).setUp()
self.Model = self.env['test_read_group.on_date']
def test_empty_only(self):
self.Model.create({'value': 1})
self.Model.create({'value': 2})
self.Model.create({'value': 3})
gb = self.Model.read_group([], ['date', 'value'], ['date'], lazy=False)
self.assertEqual(gb, [{
'__count': 3,
'__domain': [('date', '=', False)],
'__range': {'date': False},
'date': False,
'value': 6
}])
def test_empty_by_span(self):
self.Model.create({'value': 1})
self.Model.create({'value': 2})
self.Model.create({'value': 3})
gb = self.Model.read_group([], ['date', 'value'], ['date:quarter'], lazy=False)
self.assertEqual(gb, [{
'__count': 3,
'__domain': [('date', '=', False)],
'__range': {'date': False},
'date:quarter': False,
'value': 6
}])
def test_mixed(self):
self.Model.create({'date': False, 'value': 1})
self.Model.create({'date': False, 'value': 2})
self.Model.create({'date': '1916-12-18', 'value': 3})
self.Model.create({'date': '1916-12-18', 'value': 4})
gb = self.Model.read_group([], ['date', 'value'], ['date'], lazy=False)
self.assertSequenceEqual(sorted(gb, key=lambda r: r['date'] or ''), [{
'__count': 2,
'__domain': [('date', '=', False)],
'__range': {'date': False},
'date': False,
'value': 3,
}, {
'__count': 2,
'__domain': ['&', ('date', '>=', '1916-12-01'), ('date', '<', '1917-01-01')],
'__range': {'date': {'from': '1916-12-01', 'to': '1917-01-01'}},
'date': 'December 1916',
'value': 7,
}])
| 33.253968 | 2,095 |
40,884 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
"""Test for fill temporal."""
from odoo.tests import common
class TestFillTemporal(common.TransactionCase):
"""Test for fill temporal.
This feature is mainly used in graph view. For more informations, read the
documentation of models's '_read_group_fill_temporal' method.
"""
def setUp(self):
super(TestFillTemporal, self).setUp()
self.Model = self.env['test_read_group.fill_temporal']
def test_date_range_and_flag(self):
"""Simple date range test, the flag is also tested.
One of the most simple test. It must verify that dates 'holes' are filled
only when the fill_temporal flag is set.
"""
self.Model.create({'date': '1916-08-18', 'value': 2})
self.Model.create({'date': '1916-10-19', 'value': 3})
self.Model.create({'date': '1916-12-19', 'value': 5})
expected = [{
'__domain': ['&', ('date', '>=', '1916-08-01'), ('date', '<', '1916-09-01')],
'__range': {'date': {'from': '1916-08-01', 'to': '1916-09-01'}},
'date': 'August 1916',
'date_count': 1,
'value': 2
}, {
'__domain': ['&', ('date', '>=', '1916-09-01'), ('date', '<', '1916-10-01')],
'__range': {'date': {'from': '1916-09-01', 'to': '1916-10-01'}},
'date': 'September 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-10-01'), ('date', '<', '1916-11-01')],
'__range': {'date': {'from': '1916-10-01', 'to': '1916-11-01'}},
'date': 'October 1916',
'date_count': 1,
'value': 3
}, {
'__domain': ['&', ('date', '>=', '1916-11-01'), ('date', '<', '1916-12-01')],
'__range': {'date': {'from': '1916-11-01', 'to': '1916-12-01'}},
'date': 'November 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-12-01'), ('date', '<', '1917-01-01')],
'__range': {'date': {'from': '1916-12-01', 'to': '1917-01-01'}},
'date': 'December 1916',
'date_count': 1,
'value': 5
}]
groups = self.Model.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, [group for group in expected if group['date_count']])
model_fill = self.Model.with_context(fill_temporal=True)
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_date_range_with_context_timezone(self):
"""Test if date are date_trunced correctly by pgres.
This test was added in attempt to fix a bug appearing with babel that
we use to translate the dates. Typically after a daylight saving, A
whole year was displayed in a graph like this (APR missing and OCT
appearing twice) :
JAN FEB MAR MAY JUN JUL AUG SEP OCT OCT NOV
^^^ ^^^
"""
self.Model.create({'date': '1915-01-01', 'value': 3})
self.Model.create({'date': '1916-01-01', 'value': 5})
expected = [{
'__domain': ['&', ('date', '>=', '1915-01-01'), ('date', '<', '1915-02-01')],
'__range': {'date': {'from': '1915-01-01', 'to': '1915-02-01'}},
'date': 'January 1915',
'date_count': 1,
'value': 3
}, {
'__domain': ['&', ('date', '>=', '1915-02-01'), ('date', '<', '1915-03-01')],
'__range': {'date': {'from': '1915-02-01', 'to': '1915-03-01'}},
'date': 'February 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-03-01'), ('date', '<', '1915-04-01')],
'__range': {'date': {'from': '1915-03-01', 'to': '1915-04-01'}},
'date': 'March 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-04-01'), ('date', '<', '1915-05-01')],
'__range': {'date': {'from': '1915-04-01', 'to': '1915-05-01'}},
'date': 'April 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-05-01'), ('date', '<', '1915-06-01')],
'__range': {'date': {'from': '1915-05-01', 'to': '1915-06-01'}},
'date': 'May 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-06-01'), ('date', '<', '1915-07-01')],
'__range': {'date': {'from': '1915-06-01', 'to': '1915-07-01'}},
'date': 'June 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-07-01'), ('date', '<', '1915-08-01')],
'__range': {'date': {'from': '1915-07-01', 'to': '1915-08-01'}},
'date': 'July 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-08-01'), ('date', '<', '1915-09-01')],
'__range': {'date': {'from': '1915-08-01', 'to': '1915-09-01'}},
'date': 'August 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-09-01'), ('date', '<', '1915-10-01')],
'__range': {'date': {'from': '1915-09-01', 'to': '1915-10-01'}},
'date': 'September 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-10-01'), ('date', '<', '1915-11-01')],
'__range': {'date': {'from': '1915-10-01', 'to': '1915-11-01'}},
'date': 'October 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-11-01'), ('date', '<', '1915-12-01')],
'__range': {'date': {'from': '1915-11-01', 'to': '1915-12-01'}},
'date': 'November 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1915-12-01'), ('date', '<', '1916-01-01')],
'__range': {'date': {'from': '1915-12-01', 'to': '1916-01-01'}},
'date': 'December 1915',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-01-01'), ('date', '<', '1916-02-01')],
'__range': {'date': {'from': '1916-01-01', 'to': '1916-02-01'}},
'date': 'January 1916',
'date_count': 1,
'value': 5
}]
# Time Zone UTC UTC DST
tzs = ["America/Anchorage", # −09:00 −08:00
"Europe/Brussels", # +01:00 +02:00
"Pacific/Kwajalein"] # +12:00 +12:00
for tz in tzs:
model_fill = self.Model.with_context(tz=tz, fill_temporal=True)
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_only_with_only_null_date(self):
"""We should have the same result when fill_temporal is set or not."""
self.Model.create({'date': False, 'value': 13})
self.Model.create({'date': False, 'value': 11})
self.Model.create({'date': False, 'value': 17})
expected = [{'__domain': [('date', '=', False)],
'__range': {'date': False},
'date_count': 3,
'value': 41,
'date': False}]
groups = self.Model.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
model_fill = self.Model.with_context(fill_temporal=True)
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_date_range_and_null_date(self):
"""Test data with null and non-null dates."""
self.Model.create({'date': '1916-08-19', 'value': 4})
self.Model.create({'date': False, 'value': 13})
self.Model.create({'date': '1916-10-18', 'value': 5})
self.Model.create({'date': '1916-08-18', 'value': 3})
self.Model.create({'date': '1916-10-19', 'value': 4})
self.Model.create({'date': False, 'value': 11})
expected = [{
'__domain': ['&', ('date', '>=', '1916-08-01'), ('date', '<', '1916-09-01')],
'__range': {'date': {'from': '1916-08-01', 'to': '1916-09-01'}},
'date': 'August 1916',
'date_count': 2,
'value': 7
}, {
'__domain': ['&', ('date', '>=', '1916-09-01'), ('date', '<', '1916-10-01')],
'__range': {'date': {'from': '1916-09-01', 'to': '1916-10-01'}},
'date': 'September 1916',
'date_count': 0,
'value': 0
}, {
'__domain': ['&', ('date', '>=', '1916-10-01'), ('date', '<', '1916-11-01')],
'__range': {'date': {'from': '1916-10-01', 'to': '1916-11-01'}},
'date': 'October 1916',
'date_count': 2,
'value': 9
}, {
'__domain': [('date', '=', False)],
'__range': {'date': False},
'date': False,
'date_count': 2,
'value': 24
}]
groups = self.Model.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, [group for group in expected if group['date_count']])
model_fill = self.Model.with_context(fill_temporal=True)
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_order_date_desc(self):
"""Test if changing Model._order has influence on the result."""
self.Model.create({'date': '1916-08-18', 'value': 3})
self.Model.create({'date': '1916-08-19', 'value': 4})
self.Model.create({'date': '1916-10-18', 'value': 5})
self.Model.create({'date': '1916-10-19', 'value': 4})
self.patch(type(self.Model), '_order', 'date desc')
expected = [{
'__domain': ['&', ('date', '>=', '1916-08-01'), ('date', '<', '1916-09-01')],
'__range': {'date': {'from': '1916-08-01', 'to': '1916-09-01'}},
'date': 'August 1916',
'date_count': 2,
'value': 7
}, {
'__domain': ['&', ('date', '>=', '1916-09-01'), ('date', '<', '1916-10-01')],
'__range': {'date': {'from': '1916-09-01', 'to': '1916-10-01'}},
'date': 'September 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-10-01'), ('date', '<', '1916-11-01')],
'__range': {'date': {'from': '1916-10-01', 'to': '1916-11-01'}},
'date': 'October 1916',
'date_count': 2,
'value': 9
}]
groups = self.Model.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, [group for group in expected if group['date_count']])
model_fill = self.Model.with_context(fill_temporal=True)
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_timestamp_without_timezone(self):
"""Test datetimes.
Date stored with an hour inside the Odoo model are processed as timestamp
without timezone by postgres.
"""
self.Model.create({'datetime': '1916-08-19 01:30:00', 'value': 7})
self.Model.create({'datetime': False, 'value': 13})
self.Model.create({'datetime': '1916-10-18 02:30:00', 'value': 5})
self.Model.create({'datetime': '1916-08-18 01:50:00', 'value': 3})
self.Model.create({'datetime': False, 'value': 11})
self.Model.create({'datetime': '1916-10-19 23:59:59', 'value': 2})
self.Model.create({'datetime': '1916-10-19', 'value': 19})
expected = [{
'__domain': ['&',
('datetime', '>=', '1916-08-01 00:00:00'),
('datetime', '<', '1916-09-01 00:00:00')],
'__range': {'datetime': {'from': '1916-08-01 00:00:00', 'to': '1916-09-01 00:00:00'}},
'datetime': 'August 1916',
'datetime_count': 2,
'value': 10
}, {
'__domain': ['&',
('datetime', '>=', '1916-09-01 00:00:00'),
('datetime', '<', '1916-10-01 00:00:00')],
'__range': {'datetime': {'from': '1916-09-01 00:00:00', 'to': '1916-10-01 00:00:00'}},
'datetime': 'September 1916',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-10-01 00:00:00'),
('datetime', '<', '1916-11-01 00:00:00')],
'__range': {'datetime': {'from': '1916-10-01 00:00:00', 'to': '1916-11-01 00:00:00'}},
'datetime': 'October 1916',
'datetime_count': 3,
'value': 26
}, {
'__domain': [('datetime', '=', False)],
'__range': {'datetime': False},
'datetime': False,
'datetime_count': 2,
'value': 24
}]
groups = self.Model.read_group([], fields=['datetime', 'value'], groupby=['datetime'])
self.assertEqual(groups, [group for group in expected if group['datetime_count']])
model_fill = self.Model.with_context(fill_temporal=True)
groups = model_fill.read_group([], fields=['datetime', 'value'], groupby=['datetime'])
self.assertEqual(groups, expected)
def test_with_datetimes_and_groupby_per_hour(self):
"""Test with datetimes and groupby per hour.
Test if datetimes are filled correctly when grouping by hours instead of
months.
"""
self.Model.create({'datetime': '1916-01-01 01:30:00', 'value': 2})
self.Model.create({'datetime': '1916-01-01 01:50:00', 'value': 8})
self.Model.create({'datetime': '1916-01-01 02:30:00', 'value': 3})
self.Model.create({'datetime': '1916-01-01 13:50:00', 'value': 5})
self.Model.create({'datetime': '1916-01-01 23:50:00', 'value': 7})
expected = [{
'__domain': ['&',
('datetime', '>=', '1916-01-01 01:00:00'),
('datetime', '<', '1916-01-01 02:00:00')],
'__range': {'datetime': {'from': '1916-01-01 01:00:00', 'to': '1916-01-01 02:00:00'}},
'datetime:hour': '01:00 01 Jan',
'datetime_count': 2,
'value': 10
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 02:00:00'),
('datetime', '<', '1916-01-01 03:00:00')],
'__range': {'datetime': {'from': '1916-01-01 02:00:00', 'to': '1916-01-01 03:00:00'}},
'datetime:hour': '02:00 01 Jan',
'datetime_count': 1,
'value': 3
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 03:00:00'),
('datetime', '<', '1916-01-01 04:00:00')],
'__range': {'datetime': {'from': '1916-01-01 03:00:00', 'to': '1916-01-01 04:00:00'}},
'datetime:hour': '03:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 04:00:00'),
('datetime', '<', '1916-01-01 05:00:00')],
'__range': {'datetime': {'from': '1916-01-01 04:00:00', 'to': '1916-01-01 05:00:00'}},
'datetime:hour': '04:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 05:00:00'),
('datetime', '<', '1916-01-01 06:00:00')],
'__range': {'datetime': {'from': '1916-01-01 05:00:00', 'to': '1916-01-01 06:00:00'}},
'datetime:hour': '05:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 06:00:00'),
('datetime', '<', '1916-01-01 07:00:00')],
'__range': {'datetime': {'from': '1916-01-01 06:00:00', 'to': '1916-01-01 07:00:00'}},
'datetime:hour': '06:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 07:00:00'),
('datetime', '<', '1916-01-01 08:00:00')],
'__range': {'datetime': {'from': '1916-01-01 07:00:00', 'to': '1916-01-01 08:00:00'}},
'datetime:hour': '07:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 08:00:00'),
('datetime', '<', '1916-01-01 09:00:00')],
'__range': {'datetime': {'from': '1916-01-01 08:00:00', 'to': '1916-01-01 09:00:00'}},
'datetime:hour': '08:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 09:00:00'),
('datetime', '<', '1916-01-01 10:00:00')],
'__range': {'datetime': {'from': '1916-01-01 09:00:00', 'to': '1916-01-01 10:00:00'}},
'datetime:hour': '09:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 10:00:00'),
('datetime', '<', '1916-01-01 11:00:00')],
'__range': {'datetime': {'from': '1916-01-01 10:00:00', 'to': '1916-01-01 11:00:00'}},
'datetime:hour': '10:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 11:00:00'),
('datetime', '<', '1916-01-01 12:00:00')],
'__range': {'datetime': {'from': '1916-01-01 11:00:00', 'to': '1916-01-01 12:00:00'}},
'datetime:hour': '11:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 12:00:00'),
('datetime', '<', '1916-01-01 13:00:00')],
'__range': {'datetime': {'from': '1916-01-01 12:00:00', 'to': '1916-01-01 13:00:00'}},
'datetime:hour': '12:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 13:00:00'),
('datetime', '<', '1916-01-01 14:00:00')],
'__range': {'datetime': {'from': '1916-01-01 13:00:00', 'to': '1916-01-01 14:00:00'}},
'datetime:hour': '01:00 01 Jan',
'datetime_count': 1,
'value': 5
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 14:00:00'),
('datetime', '<', '1916-01-01 15:00:00')],
'__range': {'datetime': {'from': '1916-01-01 14:00:00', 'to': '1916-01-01 15:00:00'}},
'datetime:hour': '02:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 15:00:00'),
('datetime', '<', '1916-01-01 16:00:00')],
'__range': {'datetime': {'from': '1916-01-01 15:00:00', 'to': '1916-01-01 16:00:00'}},
'datetime:hour': '03:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 16:00:00'),
('datetime', '<', '1916-01-01 17:00:00')],
'__range': {'datetime': {'from': '1916-01-01 16:00:00', 'to': '1916-01-01 17:00:00'}},
'datetime:hour': '04:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 17:00:00'),
('datetime', '<', '1916-01-01 18:00:00')],
'__range': {'datetime': {'from': '1916-01-01 17:00:00', 'to': '1916-01-01 18:00:00'}},
'datetime:hour': '05:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 18:00:00'),
('datetime', '<', '1916-01-01 19:00:00')],
'__range': {'datetime': {'from': '1916-01-01 18:00:00', 'to': '1916-01-01 19:00:00'}},
'datetime:hour': '06:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 19:00:00'),
('datetime', '<', '1916-01-01 20:00:00')],
'__range': {'datetime': {'from': '1916-01-01 19:00:00', 'to': '1916-01-01 20:00:00'}},
'datetime:hour': '07:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 20:00:00'),
('datetime', '<', '1916-01-01 21:00:00')],
'__range': {'datetime': {'from': '1916-01-01 20:00:00', 'to': '1916-01-01 21:00:00'}},
'datetime:hour': '08:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 21:00:00'),
('datetime', '<', '1916-01-01 22:00:00')],
'__range': {'datetime': {'from': '1916-01-01 21:00:00', 'to': '1916-01-01 22:00:00'}},
'datetime:hour': '09:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 22:00:00'),
('datetime', '<', '1916-01-01 23:00:00')],
'__range': {'datetime': {'from': '1916-01-01 22:00:00', 'to': '1916-01-01 23:00:00'}},
'datetime:hour': '10:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 23:00:00'),
('datetime', '<', '1916-01-02 00:00:00')],
'__range': {'datetime': {'from': '1916-01-01 23:00:00', 'to': '1916-01-02 00:00:00'}},
'datetime:hour': '11:00 01 Jan',
'datetime_count': 1,
'value': 7
}]
model_fill = self.Model.with_context(fill_temporal=True)
groups = model_fill.read_group([], fields=['datetime', 'value'], groupby=['datetime:hour'])
self.assertEqual(groups, expected)
def test_hour_with_timezones(self):
"""Test hour with timezones.
What we do here is similar to test_with_datetimes_and_groupby_per_hour
but with a timezone in the user context.
"""
self.Model.create({'datetime': '1915-12-31 22:30:00', 'value': 2})
self.Model.create({'datetime': '1916-01-01 03:30:00', 'value': 3})
expected = [{
'__domain': ['&',
('datetime', '>=', '1915-12-31 22:00:00'),
('datetime', '<', '1915-12-31 23:00:00')],
'__range': {'datetime': {'from': '1915-12-31 22:00:00', 'to': '1915-12-31 23:00:00'}},
'datetime:hour': '04:00 01 Jan',
'datetime_count': 1,
'value': 2
}, {
'__domain': ['&',
('datetime', '>=', '1915-12-31 23:00:00'),
('datetime', '<', '1916-01-01 00:00:00')],
'__range': {'datetime': {'from': '1915-12-31 23:00:00', 'to': '1916-01-01 00:00:00'}},
'datetime:hour': '05:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 00:00:00'),
('datetime', '<', '1916-01-01 01:00:00')],
'__range': {'datetime': {'from': '1916-01-01 00:00:00', 'to': '1916-01-01 01:00:00'}},
'datetime:hour': '06:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 01:00:00'),
('datetime', '<', '1916-01-01 02:00:00')],
'__range': {'datetime': {'from': '1916-01-01 01:00:00', 'to': '1916-01-01 02:00:00'}},
'datetime:hour': '07:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 02:00:00'),
('datetime', '<', '1916-01-01 03:00:00')],
'__range': {'datetime': {'from': '1916-01-01 02:00:00', 'to': '1916-01-01 03:00:00'}},
'datetime:hour': '08:00 01 Jan',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '1916-01-01 03:00:00'),
('datetime', '<', '1916-01-01 04:00:00')],
'__range': {'datetime': {'from': '1916-01-01 03:00:00', 'to': '1916-01-01 04:00:00'}},
'datetime:hour': '09:00 01 Jan',
'datetime_count': 1,
'value': 3
}]
model_fill = self.Model.with_context(tz='Asia/Hovd', fill_temporal=True)
groups = model_fill.read_group([], fields=['datetime', 'value'],
groupby=['datetime:hour'])
self.assertEqual(groups, expected)
def test_quarter_with_timezones(self):
"""Test quarter with timezones.
We group year by quarter and check that it is consistent with timezone.
"""
self.Model.create({'datetime': '2016-01-01 03:30:00', 'value': 2})
self.Model.create({'datetime': '2016-12-30 22:30:00', 'value': 3})
expected = [{
'__domain': ['&',
('datetime', '>=', '2015-12-31 17:00:00'),
('datetime', '<', '2016-03-31 16:00:00')],
'__range': {'datetime': {'from': '2015-12-31 17:00:00', 'to': '2016-03-31 16:00:00'}},
'datetime:quarter': 'Q1 2016',
'datetime_count': 1,
'value': 2
}, {
'__domain': ['&',
('datetime', '>=', '2016-03-31 16:00:00'),
('datetime', '<', '2016-06-30 16:00:00')],
'__range': {'datetime': {'from': '2016-03-31 16:00:00', 'to': '2016-06-30 16:00:00'}},
'datetime:quarter': 'Q2 2016',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '2016-06-30 16:00:00'),
('datetime', '<', '2016-09-30 17:00:00')],
'__range': {'datetime': {'from': '2016-06-30 16:00:00', 'to': '2016-09-30 17:00:00'}},
'datetime:quarter': 'Q3 2016',
'datetime_count': 0,
'value': False
}, {
'__domain': ['&',
('datetime', '>=', '2016-09-30 17:00:00'),
('datetime', '<', '2016-12-31 17:00:00')],
'__range': {'datetime': {'from': '2016-09-30 17:00:00', 'to': '2016-12-31 17:00:00'}},
'datetime:quarter': 'Q4 2016',
'datetime_count': 1,
'value': 3
}]
model_fill = self.Model.with_context(tz='Asia/Hovd', fill_temporal=True)
groups = model_fill.read_group([], fields=['datetime', 'value'],
groupby=['datetime:quarter'])
self.assertEqual(groups, expected)
def test_edge_fx_tz(self):
"""We test if different edge effect by using a different timezone from the user context
Suppose a user resident near Hovd, a city in Mongolia. he sells a product
at exacltly 4:00 AM on 1st January 2018. Using its context, that datetime
is previously converted to UTC time by the ORM so as being stored properly
inside the datebase. We are in winter time so 'Asia/Hovd' is UTC+7 :
'2018-01-01 04:00:00' --> '2017-12-31 21:00:00'
If that same user groups by datetime, we must ensure that the last
displayed date is in January and not in December.
"""
self.Model.create({'datetime': '2017-12-31 21:00:00', 'value': 42})
expected = [{
'__domain': ['&',
('datetime', '>=', '2017-12-31 17:00:00'),
('datetime', '<', '2018-01-31 17:00:00')],
'__range': {'datetime': {'from': '2017-12-31 17:00:00', 'to': '2018-01-31 17:00:00'}},
'datetime': 'January 2018',
'datetime_count': 1,
'value': 42
}]
model_fill = self.Model.with_context(tz='Asia/Hovd', fill_temporal=True)
groups = model_fill.read_group([], fields=['datetime', 'value'], groupby=['datetime'])
self.assertEqual(groups, expected)
def test_with_bounds(self):
"""Test the alternative dictionary format for the fill_temporal context key (fill_from, fill_to).
We apply the fill_temporal logic only to a cibled portion of the result of a read_group.
[fill_from, fill_to] are the inclusive bounds of this portion.
Data outside those bounds will not be filtered out
Bounds will be converted to the start of the period which they belong to (depending
on the granularity of the groupby). This means that we can put any date of the period as the bound
and it will still work.
"""
self.Model.create({'date': '1916-02-15', 'value': 1})
self.Model.create({'date': '1916-06-15', 'value': 2})
self.Model.create({'date': '1916-11-15', 'value': 3})
expected = [{
'__domain': ['&', ('date', '>=', '1916-02-01'), ('date', '<', '1916-03-01')],
'__range': {'date': {'from': '1916-02-01', 'to': '1916-03-01'}},
'date': 'February 1916',
'date_count': 1,
'value': 1
}, {
'__domain': ['&', ('date', '>=', '1916-05-01'), ('date', '<', '1916-06-01')],
'__range': {'date': {'from': '1916-05-01', 'to': '1916-06-01'}},
'date': 'May 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-06-01'), ('date', '<', '1916-07-01')],
'__range': {'date': {'from': '1916-06-01', 'to': '1916-07-01'}},
'date': 'June 1916',
'date_count': 1,
'value': 2
}, {
'__domain': ['&', ('date', '>=', '1916-07-01'), ('date', '<', '1916-08-01')],
'__range': {'date': {'from': '1916-07-01', 'to': '1916-08-01'}},
'date': 'July 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-08-01'), ('date', '<', '1916-09-01')],
'__range': {'date': {'from': '1916-08-01', 'to': '1916-09-01'}},
'date': 'August 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-11-01'), ('date', '<', '1916-12-01')],
'__range': {'date': {'from': '1916-11-01', 'to': '1916-12-01'}},
'date': 'November 1916',
'date_count': 1,
'value': 3
}]
model_fill = self.Model.with_context(fill_temporal={"fill_from": '1916-05-15', "fill_to": '1916-08-15'})
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_upper_bound(self):
"""Test the alternative dictionary format for the fill_temporal context key (fill_to).
Same as with both bounds, but this time the first bound is the earliest group with data
(since only fill_to is set)
"""
self.Model.create({'date': '1916-02-15', 'value': 1})
expected = [{
'__domain': ['&', ('date', '>=', '1916-02-01'), ('date', '<', '1916-03-01')],
'__range': {'date': {'from': '1916-02-01', 'to': '1916-03-01'}},
'date': 'February 1916',
'date_count': 1,
'value': 1
}, {
'__domain': ['&', ('date', '>=', '1916-03-01'), ('date', '<', '1916-04-01')],
'__range': {'date': {'from': '1916-03-01', 'to': '1916-04-01'}},
'date': 'March 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-04-01'), ('date', '<', '1916-05-01')],
'__range': {'date': {'from': '1916-04-01', 'to': '1916-05-01'}},
'date': 'April 1916',
'date_count': 0,
'value': False
}]
model_fill = self.Model.with_context(fill_temporal={"fill_to": '1916-04-15'})
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_lower_bound(self):
"""Test the alternative dictionary format for the fill_temporal context key (fill_from).
Same as with both bounds, but this time the second bound is the lastest group with data
(since only fill_from is set)
"""
self.Model.create({'date': '1916-04-15', 'value': 1})
expected = [{
'__domain': ['&', ('date', '>=', '1916-02-01'), ('date', '<', '1916-03-01')],
'__range': {'date': {'from': '1916-02-01', 'to': '1916-03-01'}},
'date': 'February 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-03-01'), ('date', '<', '1916-04-01')],
'__range': {'date': {'from': '1916-03-01', 'to': '1916-04-01'}},
'date': 'March 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-04-01'), ('date', '<', '1916-05-01')],
'__range': {'date': {'from': '1916-04-01', 'to': '1916-05-01'}},
'date': 'April 1916',
'date_count': 1,
'value': 1
}]
model_fill = self.Model.with_context(fill_temporal={"fill_from": '1916-02-15'})
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_empty_context_key(self):
"""Test the alternative dictionary format for the fill_temporal context key.
When fill_temporal context key is set to an empty dictionary, it must be equivalent to being True
"""
self.Model.create({'date': '1916-02-15', 'value': 1})
self.Model.create({'date': '1916-04-15', 'value': 2})
expected = [{
'__domain': ['&', ('date', '>=', '1916-02-01'), ('date', '<', '1916-03-01')],
'__range': {'date': {'from': '1916-02-01', 'to': '1916-03-01'}},
'date': 'February 1916',
'date_count': 1,
'value': 1
}, {
'__domain': ['&', ('date', '>=', '1916-03-01'), ('date', '<', '1916-04-01')],
'__range': {'date': {'from': '1916-03-01', 'to': '1916-04-01'}},
'date': 'March 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-04-01'), ('date', '<', '1916-05-01')],
'__range': {'date': {'from': '1916-04-01', 'to': '1916-05-01'}},
'date': 'April 1916',
'date_count': 1,
'value': 2
}]
model_fill = self.Model.with_context(fill_temporal={})
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_min_groups(self):
"""Test the alternative dictionary format for the fill_temporal context key (min_groups).
We guarantee that at least a certain amount of contiguous groups is returned, from the
earliest group with data.
"""
self.Model.create({'date': '1916-02-15', 'value': 1})
expected = [{
'__domain': ['&', ('date', '>=', '1916-02-01'), ('date', '<', '1916-03-01')],
'__range': {'date': {'from': '1916-02-01', 'to': '1916-03-01'}},
'date': 'February 1916',
'date_count': 1,
'value': 1
}, {
'__domain': ['&', ('date', '>=', '1916-03-01'), ('date', '<', '1916-04-01')],
'__range': {'date': {'from': '1916-03-01', 'to': '1916-04-01'}},
'date': 'March 1916',
'date_count': 0,
'value': False
}]
model_fill = self.Model.with_context(fill_temporal={"min_groups": 2})
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
def test_with_bounds_and_min_groups(self):
"""Test the alternative dictionary format for the fill_temporal context key (fill_from, fill_to, min_groups).
We guarantee that at least a certain amount of contiguous groups is returned, from the
fill_from bound. The fill_from bound has precedence over the first group with data regarding min_groups
(min_groups will first try to anchor itself on fill_from, or, if not specified, on the first group with data).
This amount is not restricted by the fill_to bound, so, if necessary, the fill_temporal
logic will be applied until min_groups is guaranteed, even for groups later than fill_to
Groups outside the specifed bounds are not counted as part of min_groups, unless added specifically
to guarantee min_groups.
"""
self.Model.create({'date': '1916-02-15', 'value': 1})
self.Model.create({'date': '1916-06-15', 'value': 2})
self.Model.create({'date': '1916-11-15', 'value': 3})
expected = [{
'__domain': ['&', ('date', '>=', '1916-02-01'), ('date', '<', '1916-03-01')],
'__range': {'date': {'from': '1916-02-01', 'to': '1916-03-01'}},
'date': 'February 1916',
'date_count': 1,
'value': 1
}, {
'__domain': ['&', ('date', '>=', '1916-05-01'), ('date', '<', '1916-06-01')],
'__range': {'date': {'from': '1916-05-01', 'to': '1916-06-01'}},
'date': 'May 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-06-01'), ('date', '<', '1916-07-01')],
'__range': {'date': {'from': '1916-06-01', 'to': '1916-07-01'}},
'date': 'June 1916',
'date_count': 1,
'value': 2
}, {
'__domain': ['&', ('date', '>=', '1916-07-01'), ('date', '<', '1916-08-01')],
'__range': {'date': {'from': '1916-07-01', 'to': '1916-08-01'}},
'date': 'July 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-08-01'), ('date', '<', '1916-09-01')],
'__range': {'date': {'from': '1916-08-01', 'to': '1916-09-01'}},
'date': 'August 1916',
'date_count': 0,
'value': False
}, {
'__domain': ['&', ('date', '>=', '1916-11-01'), ('date', '<', '1916-12-01')],
'__range': {'date': {'from': '1916-11-01', 'to': '1916-12-01'}},
'date': 'November 1916',
'date_count': 1,
'value': 3
}]
model_fill = self.Model.with_context(fill_temporal={"fill_from": '1916-05-15', "fill_to": '1916-07-15', "min_groups": 4})
groups = model_fill.read_group([], fields=['date', 'value'], groupby=['date'])
self.assertEqual(groups, expected)
| 44.923077 | 40,880 |
631 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests import BaseCase
from odoo.tools.rendering_tools import parse_inline_template
class TestParseInlineTemplate(BaseCase):
def test_no_expression(self):
text = 'a b c'
self.assertEqual(parse_inline_template(text), [('a b c', '')])
def test_expression1(self):
text = 'a {{b}}'
self.assertEqual(parse_inline_template(text), [('a ', 'b')])
def test_expression2(self):
text = 'a {{b}} c'
self.assertEqual(parse_inline_template(text), [('a ', 'b'), (' c', '')])
| 35.055556 | 631 |
6,613 |
py
|
PYTHON
|
15.0
|
import inspect
import logging
import time
import unittest
from .. import sql_db
_logger = logging.getLogger(__name__)
class OdooTestResult(unittest.result.TestResult):
"""
This class in inspired from TextTestResult (https://github.com/python/cpython/blob/master/Lib/unittest/runner.py)
Instead of using a stream, we are using the logger,
but replacing the "findCaller" in order to give the information we
have based on the test object that is running.
"""
def __init__(self):
super().__init__()
self.time_start = None
self.queries_start = None
def __str__(self):
return f'{len(self.failures)} failed, {len(self.errors)} error(s) of {self.testsRun} tests'
def update(self, other):
""" Merges an other test result into this one, only updates contents
:type other: OdooTestResult
"""
self.failures.extend(other.failures)
self.errors.extend(other.errors)
self.testsRun += other.testsRun
self.skipped.extend(other.skipped)
self.expectedFailures.extend(other.expectedFailures)
self.unexpectedSuccesses.extend(other.unexpectedSuccesses)
self.shouldStop = self.shouldStop or other.shouldStop
def log(self, level, msg, *args, test=None, exc_info=None, extra=None, stack_info=False, caller_infos=None):
"""
``test`` is the running test case, ``caller_infos`` is
(fn, lno, func, sinfo) (logger.findCaller format), see logger.log for
the other parameters.
"""
test = test or self
while isinstance(test, unittest.case._SubTest) and test.test_case:
test = test.test_case
logger = logging.getLogger(test.__module__)
try:
caller_infos = caller_infos or logger.findCaller(stack_info)
except ValueError:
caller_infos = "(unknown file)", 0, "(unknown function)", None
(fn, lno, func, sinfo) = caller_infos
# using logger.log makes it difficult to spot-replace findCaller in
# order to provide useful location information (the problematic spot
# inside the test function), so use lower-level functions instead
if logger.isEnabledFor(level):
record = logger.makeRecord(logger.name, level, fn, lno, msg, args, exc_info, func, extra, sinfo)
logger.handle(record)
def getDescription(self, test):
if isinstance(test, unittest.case._SubTest):
return 'Subtest %s.%s %s' % (test.test_case.__class__.__qualname__, test.test_case._testMethodName, test._subDescription())
if isinstance(test, unittest.TestCase):
# since we have the module name in the logger, this will avoid to duplicate module info in log line
# we only apply this for TestCase since we can receive error handler or other special case
return "%s.%s" % (test.__class__.__qualname__, test._testMethodName)
return str(test)
def startTest(self, test):
super().startTest(test)
self.log(logging.INFO, 'Starting %s ...', self.getDescription(test), test=test)
self.time_start = time.time()
self.queries_start = sql_db.sql_counter
def addError(self, test, err):
super().addError(test, err)
self.logError("ERROR", test, err)
def addFailure(self, test, err):
super().addFailure(test, err)
self.logError("FAIL", test, err)
def addSubTest(self, test, subtest, err):
# since addSubTest is not making a call to addFailure or addError we need to manage it too
# https://github.com/python/cpython/blob/3.7/Lib/unittest/result.py#L136
if err is not None:
if issubclass(err[0], test.failureException):
flavour = "FAIL"
else:
flavour = "ERROR"
self.logError(flavour, subtest, err)
super().addSubTest(test, subtest, err)
def addSkip(self, test, reason):
super().addSkip(test, reason)
self.log(logging.INFO, 'skipped %s', self.getDescription(test), test=test)
def addUnexpectedSuccess(self, test):
super().addUnexpectedSuccess(test)
self.log(logging.ERROR, 'unexpected success for %s', self.getDescription(test), test=test)
def logError(self, flavour, test, error):
err = self._exc_info_to_string(error, test)
caller_infos = self.getErrorCallerInfo(error, test)
self.log(logging.INFO, '=' * 70, test=test, caller_infos=caller_infos) # keep this as info !!!!!!
self.log(logging.ERROR, "%s: %s\n%s", flavour, self.getDescription(test), err, test=test, caller_infos=caller_infos)
def getErrorCallerInfo(self, error, test):
"""
:param error: A tuple (exctype, value, tb) as returned by sys.exc_info().
:param test: A TestCase that created this error.
:returns: a tuple (fn, lno, func, sinfo) matching the logger findCaller format or None
"""
# only test case should be executed in odoo, this is only a safe guard
if isinstance(test, unittest.suite._ErrorHolder):
return
if not isinstance(test, unittest.TestCase):
_logger.warning('%r is not a TestCase' % test)
return
_, _, error_traceback = error
# move upwards the subtest hierarchy to find the real test
while isinstance(test, unittest.case._SubTest) and test.test_case:
test = test.test_case
method_tb = None
file_tb = None
filename = inspect.getfile(type(test))
# Note: since _ErrorCatcher was introduced, we could always take the
# last frame, keeping the check on the test method for safety.
# Fallbacking on file for cleanup file shoud always be correct to a
# minimal working version would be
#
# infos_tb = error_traceback
# while infos_tb.tb_next()
# infos_tb = infos_tb.tb_next()
#
while error_traceback:
code = error_traceback.tb_frame.f_code
if code.co_name in (test._testMethodName, 'setUp', 'tearDown'):
method_tb = error_traceback
if code.co_filename == filename:
file_tb = error_traceback
error_traceback = error_traceback.tb_next
infos_tb = method_tb or file_tb
if infos_tb:
code = infos_tb.tb_frame.f_code
lineno = infos_tb.tb_lineno
filename = code.co_filename
method = test._testMethodName
return (filename, lineno, method, None)
| 41.85443 | 6,613 |
8,954 |
py
|
PYTHON
|
15.0
|
#!/usr/bin/env python3
import argparse
import logging.config
import os
import sys
import time
sys.path.append(os.path.abspath(os.path.join(__file__,'../../../')))
import odoo
from odoo.tools import config, topological_sort, unique
from odoo.netsvc import init_logger
from odoo.tests import standalone_tests
import odoo.tests.loader
_logger = logging.getLogger('odoo.tests.test_module_operations')
BLACKLIST = {
'auth_ldap', 'document_ftp', 'website_instantclick', 'pad',
'pad_project', 'note_pad', 'pos_cache', 'pos_blackbox_be', 'payment_test',
}
IGNORE = ('hw_', 'theme_', 'l10n_', 'test_', 'payment_')
def install(db_name, module_id, module_name):
with odoo.registry(db_name).cursor() as cr:
env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {})
module = env['ir.module.module'].browse(module_id)
module.button_immediate_install()
_logger.info('%s installed', module_name)
def uninstall(db_name, module_id, module_name):
with odoo.registry(db_name).cursor() as cr:
env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {})
module = env['ir.module.module'].browse(module_id)
module.button_immediate_uninstall()
_logger.info('%s uninstalled', module_name)
def cycle(db_name, module_id, module_name):
install(db_name, module_id, module_name)
uninstall(db_name, module_id, module_name)
install(db_name, module_id, module_name)
class CheckAddons(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
self.values = namespace
config._check_addons_path(self, option_string, values, self)
def parse_args():
parser = argparse.ArgumentParser(
description="Script for testing the install / uninstall / reinstall"
" cycle of Odoo modules. Prefer the 'cycle' subcommand to"
" running this without anything specified (this is the"
" default behaviour).")
parser.set_defaults(
func=test_cycle,
reinstall=True,
)
fake_commands = parser.add_mutually_exclusive_group()
parser.add_argument("--database", "-d", type=str, required=True,
help="The database to test (/ run the command on)")
parser.add_argument("--data-dir", "-D", dest="data_dir", type=str,
help="Directory where to store Odoo data"
)
parser.add_argument("--skip", "-s", type=str,
help="Comma-separated list of modules to skip (they will only be installed)")
parser.add_argument("--resume-at", "-r", type=str,
help="Skip modules (only install) up to the specified one in topological order")
parser.add_argument("--addons-path", "-p", type=str, action=CheckAddons,
help="Comma-separated list of paths to directories containing extra Odoo modules")
cmds = parser.add_subparsers(title="subcommands", metavar='')
cycle = cmds.add_parser(
'cycle', help="Full install/uninstall/reinstall cycle.",
description="Installs, uninstalls, and reinstalls all modules which are"
" not skipped or blacklisted, the database should have"
" 'base' installed (only).")
cycle.set_defaults(func=test_cycle)
fake_commands.add_argument(
"--uninstall", "-U", action=UninstallAction,
help="Comma-separated list of modules to uninstall/reinstall. Prefer the 'uninstall' subcommand."
)
uninstall = cmds.add_parser(
'uninstall', help="Uninstallation",
description="Uninstalls then (by default) reinstalls every specified "
"module. Modules which are not installed before running "
"are ignored.")
uninstall.set_defaults(func=test_uninstall)
uninstall.add_argument('uninstall', help="comma-separated list of modules to uninstall/reinstall")
uninstall.add_argument(
'-n', '--no-reinstall', dest='reinstall', action='store_false',
help="Skips reinstalling the module(s) after uninstalling."
)
fake_commands.add_argument("--standalone", action=StandaloneAction,
help="Launch standalone scripts tagged with @standalone. Accepts a list of "
"module names or tags separated by commas. 'all' will run all available scripts. Prefer the 'standalone' subcommand."
)
standalone = cmds.add_parser('standalone', help="Run scripts tagged with @standalone")
standalone.set_defaults(func=test_standalone)
standalone.add_argument('standalone', help="List of module names or tags separated by commas, 'all' will run all available scripts.")
return parser.parse_args()
class UninstallAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
namespace.func = test_uninstall
setattr(namespace, self.dest, values)
class StandaloneAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
namespace.func = test_standalone
setattr(namespace, self.dest, values)
def test_cycle(args):
""" Test full install/uninstall/reinstall cycle for all modules """
with odoo.registry(args.database).cursor() as cr:
env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {})
def valid(module):
return not (
module.name in BLACKLIST
or module.name.startswith(IGNORE)
or module.state in ('installed', 'uninstallable')
)
modules = env['ir.module.module'].search([]).filtered(valid)
# order modules in topological order
modules = modules.browse(topological_sort({
module.id: module.dependencies_id.depend_id.ids
for module in modules
}))
modules_todo = [(module.id, module.name) for module in modules]
resume = args.resume_at
skip = set(args.skip.split(',')) if args.skip else set()
for module_id, module_name in modules_todo:
if module_name == resume:
resume = None
if resume or module_name in skip:
install(args.database, module_id, module_name)
else:
cycle(args.database, module_id, module_name)
def test_uninstall(args):
""" Tries to uninstall/reinstall one ore more modules"""
for module_name in args.uninstall.split(','):
with odoo.registry(args.database).cursor() as cr:
env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {})
module = env['ir.module.module'].search([('name', '=', module_name)])
module_id, module_state = module.id, module.state
if module_state == 'installed':
uninstall(args.database, module_id, module_name)
if args.reinstall:
install(args.database, module_id, module_name)
elif module_state:
_logger.warning("Module %r is not installed", module_name)
else:
_logger.warning("Module %r does not exist", module_name)
def test_standalone(args):
""" Tries to launch standalone scripts tagged with @post_testing """
# load the registry once for script discovery
registry = odoo.registry(args.database)
for module_name in registry._init_modules:
# import tests for loaded modules
odoo.tests.loader.get_test_modules(module_name)
# fetch and filter scripts to test
funcs = list(unique(
func
for tag in args.standalone.split(',')
for func in standalone_tests[tag]
))
start_time = time.time()
for index, func in enumerate(funcs, start=1):
with odoo.registry(args.database).cursor() as cr:
env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {})
_logger.info("Executing standalone script: %s (%d / %d)",
func.__name__, index, len(funcs))
try:
func(env)
except Exception:
_logger.error("Standalone script %s failed", func.__name__, exc_info=True)
_logger.info("%d standalone scripts executed in %.2fs" % (len(funcs), time.time() - start_time))
if __name__ == '__main__':
args = parse_args()
# handle paths option
if args.addons_path:
odoo.tools.config['addons_path'] = ','.join([args.addons_path, odoo.tools.config['addons_path']])
if args.data_dir:
odoo.tools.config['data_dir'] = args.data_dir
odoo.modules.module.initialize_sys_path()
init_logger()
logging.config.dictConfig({
'version': 1,
'incremental': True,
'disable_existing_loggers': False,
'loggers': {
'odoo.modules.loading': {'level': 'CRITICAL'},
'odoo.sql_db': {'level': 'CRITICAL'},
'odoo.models.unlink': {'level': 'WARNING'},
'odoo.addons.base.models.ir_model': {'level': "WARNING"},
}
})
try:
args.func(args)
except Exception:
_logger.error("%s tests failed", args.func.__name__[5:])
raise
| 39.27193 | 8,954 |
118,449 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
"""
The module :mod:`odoo.tests.common` provides unittest test cases and a few
helpers and classes to write tests.
"""
import base64
import collections
import difflib
import functools
import importlib
import inspect
import itertools
import json
import logging
import operator
import os
import pathlib
import platform
import pprint
import re
import shutil
import signal
import subprocess
import sys
import tempfile
import threading
import time
import unittest
import warnings
from collections import defaultdict
from contextlib import contextmanager, ExitStack
from datetime import datetime, date
from itertools import zip_longest as izip_longest
from unittest.mock import patch
from xmlrpc import client as xmlrpclib
import requests
import werkzeug.urls
import werkzeug.urls
from decorator import decorator
from lxml import etree, html
import odoo
from odoo import api
from odoo.models import BaseModel
from odoo.exceptions import AccessError
from odoo.modules.registry import Registry
from odoo.osv.expression import normalize_domain, TRUE_LEAF, FALSE_LEAF
from odoo.service import security
from odoo.sql_db import BaseCursor, Cursor
from odoo.tools import float_compare, single_email_re, profiler
from odoo.tools.misc import find_in_path
from odoo.tools.safe_eval import safe_eval
try:
import websocket
except ImportError:
# chrome headless tests will be skipped
websocket = None
_logger = logging.getLogger(__name__)
# The odoo library is supposed already configured.
ADDONS_PATH = odoo.tools.config['addons_path']
HOST = '127.0.0.1'
# Useless constant, tests are aware of the content of demo data
ADMIN_USER_ID = odoo.SUPERUSER_ID
def get_db_name():
db = odoo.tools.config['db_name']
# If the database name is not provided on the command-line,
# use the one on the thread (which means if it is provided on
# the command-line, this will break when installing another
# database from XML-RPC).
if not db and hasattr(threading.current_thread(), 'dbname'):
return threading.current_thread().dbname
return db
standalone_tests = defaultdict(list)
def standalone(*tags):
""" Decorator for standalone test functions. This is somewhat dedicated to
tests that install, upgrade or uninstall some modules, which is currently
forbidden in regular test cases. The function is registered under the given
``tags`` and the corresponding Odoo module name.
"""
def register(func):
# register func by odoo module name
if func.__module__.startswith('odoo.addons.'):
module = func.__module__.split('.')[2]
standalone_tests[module].append(func)
# register func with aribitrary name, if any
for tag in tags:
standalone_tests[tag].append(func)
standalone_tests['all'].append(func)
return func
return register
# For backwards-compatibility - get_db_name() should be used instead
DB = get_db_name()
def new_test_user(env, login='', groups='base.group_user', context=None, **kwargs):
""" Helper function to create a new test user. It allows to quickly create
users given its login and groups (being a comma separated list of xml ids).
Kwargs are directly propagated to the create to further customize the
created user.
User creation uses a potentially customized environment using the context
parameter allowing to specify a custom context. It can be used to force a
specific behavior and/or simplify record creation. An example is to use
mail-related context keys in mail tests to speedup record creation.
Some specific fields are automatically filled to avoid issues
* groups_id: it is filled using groups function parameter;
* name: "login (groups)" by default as it is required;
* email: it is either the login (if it is a valid email) or a generated
string '[email protected]' (x being the first login letter). This is due
to email being required for most odoo operations;
"""
if not login:
raise ValueError('New users require at least a login')
if not groups:
raise ValueError('New users require at least user groups')
if context is None:
context = {}
groups_id = [(6, 0, [env.ref(g.strip()).id for g in groups.split(',')])]
create_values = dict(kwargs, login=login, groups_id=groups_id)
# automatically generate a name as "Login (groups)" to ease user comprehension
if not create_values.get('name'):
create_values['name'] = '%s (%s)' % (login, groups)
# automatically give a password equal to login
if not create_values.get('password'):
create_values['password'] = login + 'x' * (8 - len(login))
# generate email if not given as most test require an email
if 'email' not in create_values:
if single_email_re.match(login):
create_values['email'] = login
else:
create_values['email'] = '%s.%[email protected]' % (login[0], login[0])
# ensure company_id + allowed company constraint works if not given at create
if 'company_id' in create_values and 'company_ids' not in create_values:
create_values['company_ids'] = [(4, create_values['company_id'])]
return env['res.users'].with_context(**context).create(create_values)
class RecordCapturer:
def __init__(self, model, domain):
self._model = model
self._domain = domain
def __enter__(self):
self._before = self._model.search(self._domain)
self._after = None
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if exc_type is None:
self._after = self._model.search(self._domain) - self._before
@property
def records(self):
if self._after is None:
return self._model.search(self._domain) - self._before
return self._after
# ------------------------------------------------------------
# Main classes
# ------------------------------------------------------------
class OdooSuite(unittest.suite.TestSuite):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
from odoo.http import root
if not root._loaded:
root.load_addons()
root._loaded = True
if sys.version_info < (3, 8):
# Partial backport of bpo-24412, merged in CPython 3.8
def _handleClassSetUp(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if result._moduleSetUpFailed:
return
if getattr(currentClass, "__unittest_skip__", False):
return
try:
currentClass._classSetupFailed = False
except TypeError:
# test may actually be a function
# so its class will be a builtin-type
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
if setUpClass is not None:
unittest.suite._call_if_exists(result, '_setupStdout')
try:
setUpClass()
except Exception as e:
if isinstance(result, unittest.suite._DebugResult):
raise
currentClass._classSetupFailed = True
className = unittest.util.strclass(currentClass)
self._createClassOrModuleLevelException(result, e,
'setUpClass',
className)
finally:
unittest.suite._call_if_exists(result, '_restoreStdout')
if currentClass._classSetupFailed is True:
if hasattr(currentClass, 'doClassCleanups'):
currentClass.doClassCleanups()
if len(currentClass.tearDown_exceptions) > 0:
for exc in currentClass.tearDown_exceptions:
self._createClassOrModuleLevelException(
result, exc[1], 'setUpClass', className,
info=exc)
def _createClassOrModuleLevelException(self, result, exc, method_name, parent, info=None):
errorName = f'{method_name} ({parent})'
self._addClassOrModuleLevelException(result, exc, errorName, info)
def _addClassOrModuleLevelException(self, result, exception, errorName, info=None):
error = unittest.suite._ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, unittest.case.SkipTest):
addSkip(error, str(exception))
else:
if not info:
result.addError(error, sys.exc_info())
else:
result.addError(error, info)
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if getattr(previousClass, '_classSetupFailed', False):
return
if getattr(result, '_moduleSetUpFailed', False):
return
if getattr(previousClass, "__unittest_skip__", False):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
if tearDownClass is not None:
unittest.suite._call_if_exists(result, '_setupStdout')
try:
tearDownClass()
except Exception as e:
if isinstance(result, unittest.suite._DebugResult):
raise
className = unittest.util.strclass(previousClass)
self._createClassOrModuleLevelException(result, e,
'tearDownClass',
className)
finally:
unittest.suite._call_if_exists(result, '_restoreStdout')
if hasattr(previousClass, 'doClassCleanups'):
previousClass.doClassCleanups()
if len(previousClass.tearDown_exceptions) > 0:
for exc in previousClass.tearDown_exceptions:
className = unittest.util.strclass(previousClass)
self._createClassOrModuleLevelException(result, exc[1],
'tearDownClass',
className,
info=exc)
class MetaCase(type):
""" Metaclass of test case classes to assign default 'test_tags':
'standard', 'at_install' and the name of the module.
"""
def __init__(cls, name, bases, attrs):
super(MetaCase, cls).__init__(name, bases, attrs)
# assign default test tags
if cls.__module__.startswith('odoo.addons.'):
cls.test_tags = {'standard', 'at_install'}
cls.test_module = cls.__module__.split('.')[2]
cls.test_class = cls.__name__
cls.test_sequence = 0
def _normalize_arch_for_assert(arch_string, parser_method="xml"):
"""Takes some xml and normalize it to make it comparable to other xml
in particular, blank text is removed, and the output is pretty-printed
:param arch_string: the string representing an XML arch
:type arch_string: str
:param parser_method: an string representing which lxml.Parser class to use
when normalizing both archs. Takes either "xml" or "html"
:type parser_method: str
:return: the normalized arch
:rtype str:
"""
Parser = None
if parser_method == 'xml':
Parser = etree.XMLParser
elif parser_method == 'html':
Parser = etree.HTMLParser
parser = Parser(remove_blank_text=True)
arch_string = etree.fromstring(arch_string, parser=parser)
return etree.tostring(arch_string, pretty_print=True, encoding='unicode')
class BaseCase(unittest.TestCase, metaclass=MetaCase):
""" Subclass of TestCase for Odoo-specific code. This class is abstract and
expects self.registry, self.cr and self.uid to be initialized by subclasses.
"""
_python_version = sys.version_info
if _python_version < (3, 8):
# Partial backport of bpo-24412, merged in CPython 3.8
_class_cleanups = []
@classmethod
def addClassCleanup(cls, function, *args, **kwargs):
"""Same as addCleanup, except the cleanup items are called even if
setUpClass fails (unlike tearDownClass). Backport of bpo-24412."""
cls._class_cleanups.append((function, args, kwargs))
@classmethod
def doClassCleanups(cls):
"""Execute all class cleanup functions. Normally called for you after tearDownClass.
Backport of bpo-24412."""
cls.tearDown_exceptions = []
while cls._class_cleanups:
function, args, kwargs = cls._class_cleanups.pop()
try:
function(*args, **kwargs)
except Exception as exc:
cls.tearDown_exceptions.append(sys.exc_info())
longMessage = True # more verbose error message by default: https://www.odoo.com/r/Vmh
warm = True # False during warm-up phase (see :func:`warmup`)
def __init__(self, methodName='runTest'):
super().__init__(methodName)
self.addTypeEqualityFunc(etree._Element, self.assertTreesEqual)
self.addTypeEqualityFunc(html.HtmlElement, self.assertTreesEqual)
def shortDescription(self):
return None
def cursor(self):
return self.registry.cursor()
@property
def uid(self):
""" Get the current uid. """
return self.env.uid
@uid.setter
def uid(self, user):
""" Set the uid by changing the test's environment. """
self.env = self.env(user=user)
def ref(self, xid):
""" Returns database ID for the provided :term:`external identifier`,
shortcut for ``_xmlid_lookup``
:param xid: fully-qualified :term:`external identifier`, in the form
:samp:`{module}.{identifier}`
:raise: ValueError if not found
:returns: registered id
"""
return self.browse_ref(xid).id
def browse_ref(self, xid):
""" Returns a record object for the provided
:term:`external identifier`
:param xid: fully-qualified :term:`external identifier`, in the form
:samp:`{module}.{identifier}`
:raise: ValueError if not found
:returns: :class:`~odoo.models.BaseModel`
"""
assert "." in xid, "this method requires a fully qualified parameter, in the following form: 'module.identifier'"
return self.env.ref(xid)
def patch(self, obj, key, val):
""" Do the patch ``setattr(obj, key, val)``, and prepare cleanup. """
patcher = patch.object(obj, key, val) # this is unittest.mock.patch
patcher.start()
self.addCleanup(patcher.stop)
@contextmanager
def with_user(self, login):
""" Change user for a given test, like with self.with_user() ... """
old_uid = self.uid
try:
user = self.env['res.users'].sudo().search([('login', '=', login)])
assert user, "Login %s not found" % login
# switch user
self.uid = user.id
self.env = self.env(user=self.uid)
yield
finally:
# back
self.uid = old_uid
self.env = self.env(user=self.uid)
@contextmanager
def _assertRaises(self, exception, *, msg=None):
""" Context manager that clears the environment upon failure. """
with ExitStack() as init:
if hasattr(self, 'env'):
init.enter_context(self.env.cr.savepoint())
if issubclass(exception, AccessError):
# The savepoint() above calls flush(), which leaves the
# record cache with lots of data. This can prevent
# access errors to be detected. In order to avoid this
# issue, we clear the cache before proceeding.
self.env.cr.clear()
with ExitStack() as inner:
cm = inner.enter_context(super().assertRaises(exception, msg=msg))
# *moves* the cleanups from init to inner, this ensures the
# savepoint gets rolled back when `yield` raises `exception`,
# but still allows the initialisation to be protected *and* not
# interfered with by `assertRaises`.
inner.push(init.pop_all())
yield cm
def assertRaises(self, exception, func=None, *args, **kwargs):
if func:
with self._assertRaises(exception):
func(*args, **kwargs)
else:
return self._assertRaises(exception, **kwargs)
@contextmanager
def assertQueries(self, expected, flush=True):
""" Check the queries made by the current cursor. ``expected`` is a list
of strings representing the expected queries being made. Query strings
are matched against each other, ignoring case and whitespaces.
"""
Cursor_execute = Cursor.execute
actual_queries = []
def execute(self, query, params=None, log_exceptions=None):
actual_queries.append(query)
return Cursor_execute(self, query, params, log_exceptions)
def get_unaccent_wrapper(cr):
return lambda x: x
if flush:
self.env.user.flush()
self.env.cr.flush()
with patch('odoo.sql_db.Cursor.execute', execute):
with patch('odoo.osv.expression.get_unaccent_wrapper', get_unaccent_wrapper):
yield actual_queries
if flush:
self.env.user.flush()
self.env.cr.flush()
self.assertEqual(
len(actual_queries), len(expected),
"\n---- actual queries:\n%s\n---- expected queries:\n%s" % (
"\n".join(actual_queries), "\n".join(expected),
)
)
for actual_query, expect_query in zip(actual_queries, expected):
self.assertEqual(
"".join(actual_query.lower().split()),
"".join(expect_query.lower().split()),
"\n---- actual query:\n%s\n---- not like:\n%s" % (actual_query, expect_query),
)
@contextmanager
def assertQueryCount(self, default=0, flush=True, **counters):
""" Context manager that counts queries. It may be invoked either with
one value, or with a set of named arguments like ``login=value``::
with self.assertQueryCount(42):
...
with self.assertQueryCount(admin=3, demo=5):
...
The second form is convenient when used with :func:`users`.
"""
if self.warm:
# mock random in order to avoid random bus gc
with patch('random.random', lambda: 1):
login = self.env.user.login
expected = counters.get(login, default)
if flush:
self.env.user.flush()
self.env.cr.flush()
count0 = self.cr.sql_log_count
yield
if flush:
self.env.user.flush()
self.env.cr.flush()
count = self.cr.sql_log_count - count0
if count != expected:
# add some info on caller to allow semi-automatic update of query count
frame, filename, linenum, funcname, lines, index = inspect.stack()[2]
filename = filename.replace('\\', '/')
if "/odoo/addons/" in filename:
filename = filename.rsplit("/odoo/addons/", 1)[1]
if count > expected:
msg = "Query count more than expected for user %s: %d > %d in %s at %s:%s"
# add a subtest in order to continue the test_method in case of failures
with self.subTest():
self.fail(msg % (login, count, expected, funcname, filename, linenum))
else:
logger = logging.getLogger(type(self).__module__)
msg = "Query count less than expected for user %s: %d < %d in %s at %s:%s"
logger.info(msg, login, count, expected, funcname, filename, linenum)
else:
# flush before and after during warmup, in order to reproduce the
# same operations, otherwise the caches might not be ready!
if flush:
self.env.user.flush()
self.env.cr.flush()
yield
if flush:
self.env.user.flush()
self.env.cr.flush()
def assertRecordValues(self, records, expected_values):
''' Compare a recordset with a list of dictionaries representing the expected results.
This method performs a comparison element by element based on their index.
Then, the order of the expected values is extremely important.
Note that:
- Comparison between falsy values is supported: False match with None.
- Comparison between monetary field is also treated according the currency's rounding.
- Comparison between x2many field is done by ids. Then, empty expected ids must be [].
- Comparison between many2one field id done by id. Empty comparison can be done using any falsy value.
:param records: The records to compare.
:param expected_values: List of dicts expected to be exactly matched in records
'''
def _compare_candidate(record, candidate, field_names):
''' Compare all the values in `candidate` with a record.
:param record: record being compared
:param candidate: dict of values to compare
:return: A dictionary will encountered difference in values.
'''
diff = {}
for field_name in field_names:
record_value = record[field_name]
field = record._fields[field_name]
field_type = field.type
if field_type == 'monetary':
# Compare monetary field.
currency_field_name = record._fields[field_name].get_currency_field(record)
record_currency = record[currency_field_name]
if field_name not in candidate:
diff[field_name] = (record_value, None)
elif record_currency:
if record_currency.compare_amounts(candidate[field_name], record_value):
diff[field_name] = (record_value, record_currency.round(candidate[field_name]))
elif candidate[field_name] != record_value:
diff[field_name] = (record_value, candidate[field_name])
elif field_type == 'float' and field.get_digits(record.env):
prec = field.get_digits(record.env)[1]
if float_compare(candidate[field_name], record_value, precision_digits=prec) != 0:
diff[field_name] = (record_value, candidate[field_name])
elif field_type in ('one2many', 'many2many'):
# Compare x2many relational fields.
# Empty comparison must be an empty list to be True.
if field_name not in candidate:
diff[field_name] = (sorted(record_value.ids), None)
elif set(record_value.ids) != set(candidate[field_name]):
diff[field_name] = (sorted(record_value.ids), sorted(candidate[field_name]))
elif field_type == 'many2one':
# Compare many2one relational fields.
# Every falsy value is allowed to compare with an empty record.
if field_name not in candidate:
diff[field_name] = (record_value.id, None)
elif (record_value or candidate[field_name]) and record_value.id != candidate[field_name]:
diff[field_name] = (record_value.id, candidate[field_name])
else:
# Compare others fields if not both interpreted as falsy values.
if field_name not in candidate:
diff[field_name] = (record_value, None)
elif (candidate[field_name] or record_value) and record_value != candidate[field_name]:
diff[field_name] = (record_value, candidate[field_name])
return diff
# Compare records with candidates.
different_values = []
field_names = list(expected_values[0].keys())
for index, record in enumerate(records):
is_additional_record = index >= len(expected_values)
candidate = {} if is_additional_record else expected_values[index]
diff = _compare_candidate(record, candidate, field_names)
if diff:
different_values.append((index, 'additional_record' if is_additional_record else 'regular_diff', diff))
for index in range(len(records), len(expected_values)):
diff = {}
for field_name in field_names:
diff[field_name] = (None, expected_values[index][field_name])
different_values.append((index, 'missing_record', diff))
# Build error message.
if not different_values:
return
errors = ['The records and expected_values do not match.']
if len(records) != len(expected_values):
errors.append('Wrong number of records to compare: %d records versus %d expected values.' % (len(records), len(expected_values)))
for index, diff_type, diff in different_values:
if diff_type == 'regular_diff':
errors.append('\n==== Differences at index %s ====' % index)
record_diff = ['%s:%s' % (k, v[0]) for k, v in diff.items()]
candidate_diff = ['%s:%s' % (k, v[1]) for k, v in diff.items()]
errors.append('\n'.join(difflib.unified_diff(record_diff, candidate_diff)))
elif diff_type == 'additional_record':
errors += [
'\n==== Additional record ====',
pprint.pformat(dict((k, v[0]) for k, v in diff.items())),
]
elif diff_type == 'missing_record':
errors += [
'\n==== Missing record ====',
pprint.pformat(dict((k, v[1]) for k, v in diff.items())),
]
self.fail('\n'.join(errors))
# turns out this thing may not be quite as useful as we thought...
def assertItemsEqual(self, a, b, msg=None):
self.assertCountEqual(a, b, msg=None)
def assertTreesEqual(self, n1, n2, msg=None):
self.assertIsNotNone(n1, msg)
self.assertIsNotNone(n2, msg)
self.assertEqual(n1.tag, n2.tag, msg)
# Because lxml.attrib is an ordereddict for which order is important
# to equality, even though *we* don't care
self.assertEqual(dict(n1.attrib), dict(n2.attrib), msg)
self.assertEqual((n1.text or u'').strip(), (n2.text or u'').strip(), msg)
self.assertEqual((n1.tail or u'').strip(), (n2.tail or u'').strip(), msg)
for c1, c2 in izip_longest(n1, n2):
self.assertTreesEqual(c1, c2, msg)
def _assertXMLEqual(self, original, expected, parser="xml"):
"""Asserts that two xmls archs are equal
:param original: the xml arch to test
:type original: str
:param expected: the xml arch of reference
:type expected: str
:param parser: an string representing which lxml.Parser class to use
when normalizing both archs. Takes either "xml" or "html"
:type parser: str
"""
if original:
original = _normalize_arch_for_assert(original, parser)
if expected:
expected = _normalize_arch_for_assert(expected, parser)
self.assertEqual(original, expected)
def assertXMLEqual(self, original, expected):
return self._assertXMLEqual(original, expected)
def assertHTMLEqual(self, original, expected):
return self._assertXMLEqual(original, expected, 'html')
def profile(self, description='', **kwargs):
test_method = getattr(self, '_testMethodName', 'Unknown test method')
if not hasattr(self, 'profile_session'):
self.profile_session = profiler.make_session(test_method)
return profiler.Profiler(
description='%s uid:%s %s %s' % (test_method, self.env.user.id, 'warm' if self.warm else 'cold', description),
db=self.env.cr.dbname,
profile_session=self.profile_session,
**kwargs)
def _callSetUp(self):
# This override is aimed at providing better error logs inside tests.
# First, we want errors to be logged whenever they appear instead of
# after the test, as the latter makes debugging harder and can even be
# confusing in the case of subtests.
#
# When a subtest is used inside a test, (1) the recovered traceback is
# not complete, and (2) the error is delayed to the end of the test
# method. There is unfortunately no simple way to hook inside a subtest
# to fix this issue. The method TestCase.subTest uses the context
# manager _Outcome.testPartExecutor as follows:
#
# with self._outcome.testPartExecutor(self._subtest, isTest=True):
# yield
#
# This context manager is actually also used for the setup, test method,
# teardown, cleanups. If an error occurs during any one of those, it is
# simply appended in TestCase._outcome.errors, and the latter is
# consumed at the end calling _feedErrorsToResult.
#
# The TestCase._outcome is set just before calling _callSetUp. This
# method is actually executed inside a testPartExecutor. Replacing it
# here ensures that all errors will be caught.
# See https://github.com/odoo/odoo/pull/107572 for more info.
self._outcome.errors = _ErrorCatcher(self)
super()._callSetUp()
class _ErrorCatcher(list):
""" This extends a list where errors are appended whenever they occur. The
purpose of this class is to feed the errors directly to the output, instead
of letting them accumulate until the test is over. It also improves the
traceback to make it easier to debug.
"""
__slots__ = ['test']
def __init__(self, test):
super().__init__()
self.test = test
def append(self, error):
exc_info = error[1]
if exc_info is not None:
exception_type, exception, tb = exc_info
tb = self._complete_traceback(tb)
exc_info = (exception_type, exception, tb)
self.test._feedErrorsToResult(self.test._outcome.result, [(error[0], exc_info)])
def _complete_traceback(self, initial_tb):
Traceback = type(initial_tb)
# make the set of frames in the traceback
tb_frames = set()
tb = initial_tb
while tb:
tb_frames.add(tb.tb_frame)
tb = tb.tb_next
tb = initial_tb
# find the common frame by searching the last frame of the current_stack present in the traceback.
current_frame = inspect.currentframe()
common_frame = None
while current_frame:
if current_frame in tb_frames:
common_frame = current_frame # we want to find the last frame in common
current_frame = current_frame.f_back
if not common_frame: # not really useful but safer
_logger.warning('No common frame found with current stack, displaying full stack')
tb = initial_tb
else:
# remove the tb_frames untile the common_frame is reached (keep the current_frame tb since the line is more accurate)
while tb and tb.tb_frame != common_frame:
tb = tb.tb_next
# add all current frame elements under the common_frame to tb
current_frame = common_frame.f_back
while current_frame:
tb = Traceback(tb, current_frame, current_frame.f_lasti, current_frame.f_lineno)
current_frame = current_frame.f_back
# remove traceback root part (odoo_bin, main, loading, ...), as
# everything under the testCase is not useful. Using '_callTestMethod',
# '_callSetUp', '_callTearDown', '_callCleanup' instead of the test
# method since the error does not comme especially from the test method.
while tb:
code = tb.tb_frame.f_code
if pathlib.PurePath(code.co_filename).name == 'case.py' and code.co_name in ('_callTestMethod', '_callSetUp', '_callTearDown', '_callCleanup'):
return tb.tb_next
tb = tb.tb_next
_logger.warning('No root frame found, displaying full stacks')
return initial_tb # this shouldn't be reached
savepoint_seq = itertools.count()
class TransactionCase(BaseCase):
""" Test class in which all test methods are run in a single transaction,
but each test method is run in a sub-transaction managed by a savepoint.
The transaction's cursor is always closed without committing.
The data setup common to all methods should be done in the class method
`setUpClass`, so that it is done once for all test methods. This is useful
for test cases containing fast tests but with significant database setup
common to all cases (complex in-db test data).
After being run, each test method cleans up the record cache and the
registry cache. However, there is no cleanup of the registry models and
fields. If a test modifies the registry (custom models and/or fields), it
should prepare the necessary cleanup (`self.registry.reset_changes()`).
"""
registry: Registry = None
env: api.Environment = None
cr: Cursor = None
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.registry = odoo.registry(get_db_name())
cls.addClassCleanup(cls.registry.reset_changes)
cls.addClassCleanup(cls.registry.clear_caches)
cls.cr = cls.registry.cursor()
cls.addClassCleanup(cls.cr.close)
cls.env = api.Environment(cls.cr, odoo.SUPERUSER_ID, {})
def setUp(self):
super().setUp()
# restore environments after the test to avoid invoking flush() with an
# invalid environment (inexistent user id) from another test
envs = self.env.all.envs
for env in list(envs):
self.addCleanup(env.clear)
# restore the set of known environments as it was at setUp
self.addCleanup(envs.update, list(envs))
self.addCleanup(envs.clear)
self.addCleanup(self.registry.clear_caches)
# This prevents precommit functions and data from piling up
# until cr.flush is called in 'assertRaises' clauses
# (these are not cleared in self.env.clear or envs.clear)
cr = self.env.cr
def _reset(cb, funcs, data):
cb._funcs = funcs
cb.data = data
for callback in [cr.precommit, cr.postcommit, cr.prerollback, cr.postrollback]:
self.addCleanup(_reset, callback, collections.deque(callback._funcs), dict(callback.data))
# flush everything in setUpClass before introducing a savepoint
self.env['base'].flush()
self._savepoint_id = next(savepoint_seq)
self.cr.execute('SAVEPOINT test_%d' % self._savepoint_id)
self.addCleanup(self.cr.execute, 'ROLLBACK TO SAVEPOINT test_%d' % self._savepoint_id)
self.patch(self.registry['res.partner'], '_get_gravatar_image', lambda *a: False)
class SavepointCase(TransactionCase):
@classmethod
def __init_subclass__(cls):
super().__init_subclass__()
warnings.warn(
"Deprecated class SavepointCase has been merged into TransactionCase",
DeprecationWarning, stacklevel=2,
)
class SingleTransactionCase(BaseCase):
""" TestCase in which all test methods are run in the same transaction,
the transaction is started with the first test method and rolled back at
the end of the last.
"""
@classmethod
def __init_subclass__(cls):
super().__init_subclass__()
if issubclass(cls, TransactionCase):
_logger.warning("%s inherits from both TransactionCase and SingleTransactionCase")
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.registry = odoo.registry(get_db_name())
cls.addClassCleanup(cls.registry.reset_changes)
cls.addClassCleanup(cls.registry.clear_caches)
cls.cr = cls.registry.cursor()
cls.addClassCleanup(cls.cr.close)
cls.env = api.Environment(cls.cr, odoo.SUPERUSER_ID, {})
def setUp(self):
super(SingleTransactionCase, self).setUp()
self.env.user.flush()
class ChromeBrowserException(Exception):
pass
class ChromeBrowser():
""" Helper object to control a Chrome headless process. """
def __init__(self, logger, window_size, test_class):
self._logger = logger
self.test_class = test_class
if websocket is None:
self._logger.warning("websocket-client module is not installed")
raise unittest.SkipTest("websocket-client module is not installed")
self.devtools_port = None
self.ws_url = '' # WebSocketUrl
self.ws = None # websocket
self.request_id = 0
self.user_data_dir = tempfile.mkdtemp(suffix='_chrome_odoo')
self.chrome_pid = None
otc = odoo.tools.config
self.screenshots_dir = os.path.join(otc['screenshots'], get_db_name(), 'screenshots')
self.screencasts_dir = None
if otc['screencasts']:
self.screencasts_dir = os.path.join(otc['screencasts'], get_db_name(), 'screencasts')
self.screencast_frames = []
os.makedirs(self.screenshots_dir, exist_ok=True)
self.window_size = window_size
self.sigxcpu_handler = None
self._chrome_start()
self._find_websocket()
self._logger.info('Websocket url found: %s', self.ws_url)
self._open_websocket()
self._logger.info('Enable chrome headless console log notification')
self._websocket_send('Runtime.enable')
self._logger.info('Chrome headless enable page notifications')
self._websocket_send('Page.enable')
if os.name == 'posix':
self.sigxcpu_handler = signal.getsignal(signal.SIGXCPU)
signal.signal(signal.SIGXCPU, self.signal_handler)
def signal_handler(self, sig, frame):
if sig == signal.SIGXCPU:
_logger.info('CPU time limit reached, stopping Chrome and shutting down')
self.stop()
os._exit(0)
def stop(self):
if self.chrome_pid is not None:
self._logger.info("Closing chrome headless with pid %s", self.chrome_pid)
self._websocket_send('Browser.close')
self._logger.info("Terminating chrome headless with pid %s", self.chrome_pid)
os.kill(self.chrome_pid, signal.SIGTERM)
if self.user_data_dir and os.path.isdir(self.user_data_dir) and self.user_data_dir != '/':
self._logger.info('Removing chrome user profile "%s"', self.user_data_dir)
shutil.rmtree(self.user_data_dir, ignore_errors=True)
# Restore previous signal handler
if self.sigxcpu_handler and os.name == 'posix':
signal.signal(signal.SIGXCPU, self.sigxcpu_handler)
@property
def executable(self):
system = platform.system()
if system == 'Linux':
for bin_ in ['google-chrome', 'chromium', 'chromium-browser']:
try:
return find_in_path(bin_)
except IOError:
continue
elif system == 'Darwin':
bins = [
'/Applications/Google Chrome.app/Contents/MacOS/Google Chrome',
'/Applications/Chromium.app/Contents/MacOS/Chromium',
]
for bin_ in bins:
if os.path.exists(bin_):
return bin_
elif system == 'Windows':
bins = [
'%ProgramFiles%\\Google\\Chrome\\Application\\chrome.exe',
'%ProgramFiles(x86)%\\Google\\Chrome\\Application\\chrome.exe',
'%LocalAppData%\\Google\\Chrome\\Application\\chrome.exe',
]
for bin_ in bins:
bin_ = os.path.expandvars(bin_)
if os.path.exists(bin_):
return bin_
self._logger.warning("Chrome executable not found")
raise unittest.SkipTest("Chrome executable not found")
def _spawn_chrome(self, cmd):
if os.name == 'nt':
proc = subprocess.Popen(cmd, stderr=subprocess.DEVNULL)
pid = proc.pid
else:
pid = os.fork()
if pid != 0:
port_file = pathlib.Path(self.user_data_dir, 'DevToolsActivePort')
for _ in range(100):
time.sleep(0.1)
if port_file.is_file() and port_file.stat().st_size > 5:
with port_file.open('r', encoding='utf-8') as f:
self.devtools_port = int(f.readline())
break
else:
raise unittest.SkipTest('Failed to detect chrome devtools port after 2.5s.')
return pid
else:
if platform.system() != 'Darwin':
# since the introduction of pointer compression in Chrome 80 (v8 v8.0),
# the memory reservation algorithm requires more than 8GiB of virtual mem for alignment
# this exceeds our default memory limits.
# OSX already reserve huge memory for processes
import resource
resource.setrlimit(resource.RLIMIT_AS, (resource.RLIM_INFINITY, resource.RLIM_INFINITY))
# redirect browser stderr to /dev/null
with open(os.devnull, 'wb', 0) as stderr_replacement:
os.dup2(stderr_replacement.fileno(), sys.stderr.fileno())
os.execv(cmd[0], cmd)
def _chrome_start(self):
if self.chrome_pid is not None:
return
switches = {
'--headless': '',
'--no-default-browser-check': '',
'--no-first-run': '',
'--disable-extensions': '',
'--disable-background-networking' : '',
'--disable-background-timer-throttling' : '',
'--disable-backgrounding-occluded-windows': '',
'--disable-renderer-backgrounding' : '',
'--disable-breakpad': '',
'--disable-client-side-phishing-detection': '',
'--disable-crash-reporter': '',
'--disable-default-apps': '',
'--disable-dev-shm-usage': '',
'--disable-device-discovery-notifications': '',
'--disable-namespace-sandbox': '',
'--user-data-dir': self.user_data_dir,
'--disable-translate': '',
# required for tours that use Youtube autoplay conditions (namely website_slides' "course_tour")
'--autoplay-policy': 'no-user-gesture-required',
'--window-size': self.window_size,
'--remote-debugging-address': HOST,
'--remote-debugging-port': '0',
'--no-sandbox': '',
'--disable-gpu': '',
}
cmd = [self.executable]
cmd += ['%s=%s' % (k, v) if v else k for k, v in switches.items()]
url = 'about:blank'
cmd.append(url)
try:
self.chrome_pid = self._spawn_chrome(cmd)
except OSError:
raise unittest.SkipTest("%s not found" % cmd[0])
self._logger.info('Chrome pid: %s', self.chrome_pid)
def _find_websocket(self):
version = self._json_command('version')
self._logger.info('Browser version: %s', version['Browser'])
infos = self._json_command('', get_key=0) # Infos about the first tab
self.ws_url = infos['webSocketDebuggerUrl']
self._logger.info('Chrome headless temporary user profile dir: %s', self.user_data_dir)
def _json_command(self, command, timeout=3, get_key=None):
"""
Inspect dev tools with get
Available commands:
'' : return list of tabs with their id
list (or json/): list tabs
new : open a new tab
activate/ + an id: activate a tab
close/ + and id: close a tab
version : get chrome and dev tools version
protocol : get the full protocol
"""
command = '/'.join(['json', command]).strip('/')
url = werkzeug.urls.url_join('http://%s:%s/' % (HOST, self.devtools_port), command)
self._logger.info("Issuing json command %s", url)
delay = 0.1
tries = 0
failure_info = None
while timeout > 0:
try:
os.kill(self.chrome_pid, 0)
except ProcessLookupError:
message = 'Chrome crashed at startup'
break
try:
r = requests.get(url, timeout=3)
if r.ok:
res = r.json()
if get_key is None:
return res
else:
return res[get_key]
except requests.ConnectionError as e:
failure_info = str(e)
message = 'Connection Error while trying to connect to Chrome debugger'
except requests.exceptions.ReadTimeout as e:
failure_info = str(e)
message = 'Connection Timeout while trying to connect to Chrome debugger'
break
except (KeyError, IndexError):
message = 'Key "%s" not found in json result "%s" after connecting to Chrome debugger' % (get_key, res)
time.sleep(delay)
timeout -= delay
delay = delay * 1.5
tries += 1
self._logger.error("%s after %s tries" % (message, tries))
if failure_info:
self._logger.info(failure_info)
self.stop()
raise unittest.SkipTest("Error during Chrome headless connection")
def _open_websocket(self):
self.ws = websocket.create_connection(self.ws_url, suppress_origin=True)
if self.ws.getstatus() != 101:
raise unittest.SkipTest("Cannot connect to chrome dev tools")
self.ws.settimeout(0.01)
def _websocket_send(self, method, params=None):
"""
send chrome devtools protocol commands through websocket
"""
if self.ws is None:
return
sent_id = self.request_id
payload = {
'method': method,
'id': sent_id,
}
if params:
payload.update({'params': params})
self.ws.send(json.dumps(payload))
self.request_id += 1
return sent_id
def _get_message(self, raise_log_error=True):
"""
:param bool raise_log_error:
by default, error logging messages reported by the browser are
converted to exception in order to fail the current test.
This is undersirable for *some* message loops, mostly when waiting
for a response to a command we've sent (wait_id): we do want to
properly handle exceptions and to forward the browser logs in order
to avoid losing information, but e.g. if the client generates two
console.error() we don't want the first call to take_screenshot to
trip up on the second console.error message and throw a second
exception. At the same time we don't want to *lose* the second
console.error as it might provide useful information.
"""
try:
res = json.loads(self.ws.recv())
except websocket.WebSocketTimeoutException:
res = {}
if res.get('method') == 'Runtime.consoleAPICalled':
params = res['params']
# console formatting differs somewhat from Python's, if args[0] has
# format modifiers that many of args[1:] get formatted in, missing
# args are replaced by empty strings and extra args are concatenated
# (space-separated)
#
# current version modifies the args in place which could and should
# probably be improved
arg0, args = '', []
if params.get('args'):
arg0 = str(self._from_remoteobject(params['args'][0]))
args = params['args'][1:]
formatted = [re.sub(r'%[%sdfoOc]', self.console_formatter(args), arg0)]
# formatter consumes args it uses, leaves unformatted args untouched
formatted.extend(str(self._from_remoteobject(arg)) for arg in args)
message = ' '.join(formatted)
stack = ''.join(self._format_stack(params))
if stack:
message += '\n' + stack
log_type = params['type']
if raise_log_error and log_type == 'error':
self.take_screenshot()
self._save_screencast()
raise ChromeBrowserException(message)
self._logger.getChild('browser').log(
self._TO_LEVEL.get(log_type, logging.INFO),
"%s", message # might still have %<x> characters
)
res['success'] = 'test successful' in message
if res.get('method') == 'Runtime.exceptionThrown':
details = res['params']['exceptionDetails']
message = details['text']
exception = details.get('exception')
if exception:
message += str(self._from_remoteobject(exception))
details['type'] = 'trace' # fake this so _format_stack works
stack = ''.join(self._format_stack(details))
if stack:
message += '\n' + stack
if raise_log_error:
self.take_screenshot()
self._save_screencast()
raise ChromeBrowserException(message)
else:
self._logger.getChild('browser').error(message)
return res
_TO_LEVEL = {
'debug': logging.DEBUG,
'log': logging.INFO,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
# TODO: what do with
# dir, dirxml, table, trace, clear, startGroup, startGroupCollapsed,
# endGroup, assert, profile, profileEnd, count, timeEnd
}
def _websocket_wait_id(self, awaited_id, timeout=10):
"""
blocking wait for a certain id in a response
warning other messages are discarded
"""
start_time = time.time()
while time.time() - start_time < timeout:
res = self._get_message(raise_log_error=False)
if res.get('id') == awaited_id:
return res
self._logger.info('timeout exceeded while waiting for id : %d', awaited_id)
return {}
def _websocket_wait_event(self, method, params=None, timeout=10):
"""
blocking wait for a particular event method and eventually a dict of params
"""
start_time = time.time()
while time.time() - start_time < timeout:
res = self._get_message()
if res.get('method', '') == method:
if params:
if set(params).issubset(set(res.get('params', {}))):
return res
else:
return res
elif res:
self._logger.debug('chrome devtools protocol event: %s', res)
self._logger.info('timeout exceeded while waiting for : %s', method)
def take_screenshot(self, prefix='sc_', suffix=None):
if suffix is None:
suffix = '_%s' % self.test_class
ss_id = self._websocket_send('Page.captureScreenshot')
self._logger.info('Asked for screenshot (id: %s)', ss_id)
res = self._websocket_wait_id(ss_id)
base_png = res.get('result', {}).get('data')
if not base_png:
self._logger.warning("Couldn't capture screenshot: expected image data, got %s", res)
return
decoded = base64.b64decode(base_png, validate=True)
fname = '{}{:%Y%m%d_%H%M%S_%f}{}.png'.format(prefix, datetime.now(), suffix)
full_path = os.path.join(self.screenshots_dir, fname)
with open(full_path, 'wb') as f:
f.write(decoded)
self._logger.runbot('Screenshot in: %s', full_path)
def _save_screencast(self, prefix='failed'):
# could be encododed with something like that
# ffmpeg -framerate 3 -i frame_%05d.png output.mp4
if not self.screencast_frames:
self._logger.debug('No screencast frames to encode')
return None
for f in self.screencast_frames:
with open(f['file_path'], 'rb') as b64_file:
frame = base64.decodebytes(b64_file.read())
os.unlink(f['file_path'])
f['file_path'] = f['file_path'].replace('.b64', '.png')
with open(f['file_path'], 'wb') as png_file:
png_file.write(frame)
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S_%f')
fname = '%s_screencast_%s.mp4' % (prefix, timestamp)
outfile = os.path.join(self.screencasts_dir, fname)
try:
ffmpeg_path = find_in_path('ffmpeg')
except IOError:
ffmpeg_path = None
if ffmpeg_path:
nb_frames = len(self.screencast_frames)
concat_script_path = os.path.join(self.screencasts_dir, fname.replace('.mp4', '.txt'))
with open(concat_script_path, 'w') as concat_file:
for i in range(nb_frames):
frame_file_path = os.path.join(self.screencasts_frames_dir, self.screencast_frames[i]['file_path'])
end_time = time.time() if i == nb_frames - 1 else self.screencast_frames[i+1]['timestamp']
duration = end_time - self.screencast_frames[i]['timestamp']
concat_file.write("file '%s'\nduration %s\n" % (frame_file_path, duration))
concat_file.write("file '%s'" % frame_file_path) # needed by the concat plugin
r = subprocess.run([ffmpeg_path, '-intra', '-f', 'concat','-safe', '0', '-i', concat_script_path, '-pix_fmt', 'yuv420p', outfile])
self._logger.log(25, 'Screencast in: %s', outfile)
else:
outfile = outfile.strip('.mp4')
shutil.move(self.screencasts_frames_dir, outfile)
self._logger.runbot('Screencast frames in: %s', outfile)
def start_screencast(self):
if self.screencasts_dir:
os.makedirs(self.screencasts_dir, exist_ok=True)
self.screencasts_frames_dir = os.path.join(self.screencasts_dir, 'frames')
os.makedirs(self.screencasts_frames_dir, exist_ok=True)
self._websocket_send('Page.startScreencast')
def set_cookie(self, name, value, path, domain):
params = {'name': name, 'value': value, 'path': path, 'domain': domain}
_id = self._websocket_send('Network.setCookie', params=params)
return self._websocket_wait_id(_id)
def delete_cookie(self, name, **kwargs):
params = {kw:kwargs[kw] for kw in kwargs if kw in ['url', 'domain', 'path']}
params.update({'name': name})
_id = self._websocket_send('Network.deleteCookies', params=params)
return self._websocket_wait_id(_id)
def _wait_ready(self, ready_code, timeout=60):
self._logger.info('Evaluate ready code "%s"', ready_code)
awaited_result = {'type': 'boolean', 'value': True}
# catch errors in ready code to prevent opening error dialogs
ready_code = "try { %s } catch {}" % ready_code
ready_id = self._websocket_send('Runtime.evaluate', params={'expression': ready_code})
promise_id = None
last_bad_res = ''
start_time = time.time()
tdiff = time.time() - start_time
has_exceeded = False
while tdiff < timeout:
res = self._get_message()
if res.get('id') == ready_id:
result = res.get('result').get('result')
if result.get('subtype') == 'promise':
remote_promise_id = result.get('objectId')
promise_id = self._websocket_send('Runtime.awaitPromise', params={'promiseObjectId': remote_promise_id})
elif result == awaited_result:
if has_exceeded:
self._logger.info('The ready code tooks too much time : %s', tdiff)
return True
else:
last_bad_res = res
ready_id = self._websocket_send('Runtime.evaluate', params={'expression': ready_code})
if promise_id and res.get('id') == promise_id:
if has_exceeded:
self._logger.info('The ready promise took too much time: %s', tdiff)
return True
tdiff = time.time() - start_time
if tdiff >= 2 and not has_exceeded:
has_exceeded = True
self.take_screenshot(prefix='sc_failed_ready_')
self._logger.info('Ready code last try result: %s', last_bad_res or res)
return False
def _wait_code_ok(self, code, timeout):
self._logger.info('Evaluate test code "%s"', code)
code_id = self._websocket_send('Runtime.evaluate', params={'expression': code})
start_time = time.time()
logged_error = False
nb_frame = 0
while time.time() - start_time < timeout:
res = self._get_message()
if res.get('id', -1) == code_id:
self._logger.info('Code start result: %s', res)
if res.get('result', {}).get('result').get('subtype', '') == 'error':
raise ChromeBrowserException("Running code returned an error: %s" % res)
elif res.get('success'):
return True
elif res.get('method') == 'Page.screencastFrame':
session_id = res.get('params').get('sessionId')
self._websocket_send('Page.screencastFrameAck', params={'sessionId': int(session_id)})
outfile = os.path.join(self.screencasts_frames_dir, 'frame_%05d.b64' % nb_frame)
frame = res.get('params')
with open(outfile, 'w') as f:
f.write(frame.get('data'))
nb_frame += 1
self.screencast_frames.append({
'file_path': outfile,
'timestamp': frame.get('metadata').get('timestamp')
})
elif res:
self._logger.debug('chrome devtools protocol event: %s', res)
self.take_screenshot()
self._save_screencast()
raise ChromeBrowserException('Script timeout exceeded : %s' % (time.time() - start_time))
def navigate_to(self, url, wait_stop=False):
self._logger.info('Navigating to: "%s"', url)
nav_id = self._websocket_send('Page.navigate', params={'url': url})
nav_result = self._websocket_wait_id(nav_id)
self._logger.info("Navigation result: %s", nav_result)
frame_id = nav_result.get('result', {}).get('frameId', '')
if wait_stop and frame_id:
self._logger.info('Waiting for frame "%s" to stop loading', frame_id)
self._websocket_wait_event('Page.frameStoppedLoading', params={'frameId': frame_id})
def clear(self):
self._websocket_send('Page.stopScreencast')
if self.screencasts_dir and os.path.isdir(self.screencasts_frames_dir):
shutil.rmtree(self.screencasts_frames_dir)
self.screencast_frames = []
sl_id = self._websocket_send('Page.stopLoading')
self._websocket_wait_id(sl_id)
clear_service_workers = """
if ('serviceWorker' in navigator) {
navigator.serviceWorker.getRegistrations().then(
registrations => registrations.forEach(r => r.unregister())
)
}
"""
cl_id = self._websocket_send('Runtime.evaluate', params={'expression': clear_service_workers, 'awaitPromise': True})
self._websocket_wait_id(cl_id)
self._logger.info('Deleting cookies and clearing local storage')
dc_id = self._websocket_send('Network.clearBrowserCache')
self._websocket_wait_id(dc_id)
dc_id = self._websocket_send('Network.clearBrowserCookies')
self._websocket_wait_id(dc_id)
cl_id = self._websocket_send('Runtime.evaluate', params={'expression': 'localStorage.clear()'})
self._websocket_wait_id(cl_id)
self.navigate_to('about:blank', wait_stop=True)
def _from_remoteobject(self, arg):
""" attempts to make a CDT RemoteObject comprehensible
"""
objtype = arg['type']
subtype = arg.get('subtype')
if objtype == 'undefined':
# the undefined remoteobject is literally just {type: undefined}...
return 'undefined'
elif objtype != 'object' or subtype not in (None, 'array'):
# value is the json representation for json object
# otherwise fallback on the description which is "a string
# representation of the object" e.g. the traceback for errors, the
# source for functions, ... finally fallback on the entire arg mess
return arg.get('value', arg.get('description', arg))
elif subtype == 'array':
# apparently value is *not* the JSON representation for arrays
# instead it's just Array(3) which is useless, however the preview
# properties are the same as object which is useful (just ignore the
# name which is the index)
return '[%s]' % ', '.join(
repr(p['value']) if p['type'] == 'string' else str(p['value'])
for p in arg.get('preview', {}).get('properties', [])
if re.match(r'\d+', p['name'])
)
# all that's left is type=object, subtype=None aka custom or
# non-standard objects, print as TypeName(param=val, ...), sadly because
# of the way Odoo widgets are created they all appear as Class(...)
# nb: preview properties are *not* recursive, the value is *all* we get
return '%s(%s)' % (
arg.get('className') or 'object',
', '.join(
'%s=%s' % (p['name'], repr(p['value']) if p['type'] == 'string' else p['value'])
for p in arg.get('preview', {}).get('properties', [])
if p.get('value') is not None
)
)
LINE_PATTERN = '\tat %(functionName)s (%(url)s:%(lineNumber)d:%(columnNumber)d)\n'
def _format_stack(self, logrecord):
if logrecord['type'] not in ['trace']:
return
trace = logrecord.get('stackTrace')
while trace:
for f in trace['callFrames']:
yield self.LINE_PATTERN % f
trace = trace.get('parent')
def console_formatter(self, args):
""" Formats similarly to the console API:
* if there are no args, don't format (return string as-is)
* %% -> %
* %c -> replace by styling directives (ignore for us)
* other known formatters -> replace by corresponding argument
* leftover known formatters (args exhausted) -> replace by empty string
* unknown formatters -> return as-is
"""
if not args:
return lambda m: m[0]
def replacer(m):
fmt = m[0][1]
if fmt == '%':
return '%'
if fmt in 'sdfoOc':
if not args:
return ''
repl = args.pop(0)
if fmt == 'c':
return ''
return str(self._from_remoteobject(repl))
return m[0]
return replacer
class Opener(requests.Session):
"""
Flushes and clears the current transaction when starting a request.
This is likely necessary when we make a request to the server, as the
request is made with a test cursor, which uses a different cache than this
transaction.
"""
def __init__(self, cr: BaseCursor):
super().__init__()
self.cr = cr
def request(self, *args, **kwargs):
self.cr.flush(); self.cr.clear()
return super().request(*args, **kwargs)
class Transport(xmlrpclib.Transport):
""" see :class:`Opener` """
def __init__(self, cr: BaseCursor):
self.cr = cr
super().__init__()
def request(self, *args, **kwargs):
self.cr.flush(); self.cr.clear()
return super().request(*args, **kwargs)
class HttpCase(TransactionCase):
""" Transactional HTTP TestCase with url_open and Chrome headless helpers. """
registry_test_mode = True
browser = None
browser_size = '1366x768'
_logger: logging.Logger = None
@classmethod
def setUpClass(cls):
super().setUpClass()
ICP = cls.env['ir.config_parameter']
ICP.set_param('web.base.url', cls.base_url())
ICP.flush()
# v8 api with correct xmlrpc exception handling.
cls.xmlrpc_url = f'http://{HOST}:{odoo.tools.config["http_port"]:d}/xmlrpc/2/'
cls._logger = logging.getLogger('%s.%s' % (cls.__module__, cls.__name__))
def setUp(self):
super().setUp()
if self.registry_test_mode:
self.registry.enter_test_mode(self.cr)
self.addCleanup(self.registry.leave_test_mode)
self.xmlrpc_common = xmlrpclib.ServerProxy(self.xmlrpc_url + 'common', transport=Transport(self.cr))
self.xmlrpc_db = xmlrpclib.ServerProxy(self.xmlrpc_url + 'db', transport=Transport(self.cr))
self.xmlrpc_object = xmlrpclib.ServerProxy(self.xmlrpc_url + 'object', transport=Transport(self.cr))
# setup an url opener helper
self.opener = Opener(self.cr)
@classmethod
def start_browser(cls):
# start browser on demand
if cls.browser is None:
cls.browser = ChromeBrowser(cls._logger, cls.browser_size, cls.__name__)
cls.addClassCleanup(cls.terminate_browser)
@classmethod
def terminate_browser(cls):
if cls.browser:
cls.browser.stop()
cls.browser = None
def url_open(self, url, data=None, files=None, timeout=10, headers=None, allow_redirects=True, head=False):
if url.startswith('/'):
url = "http://%s:%s%s" % (HOST, odoo.tools.config['http_port'], url)
if head:
return self.opener.head(url, data=data, files=files, timeout=timeout, headers=headers, allow_redirects=False)
if data or files:
return self.opener.post(url, data=data, files=files, timeout=timeout, headers=headers, allow_redirects=allow_redirects)
return self.opener.get(url, timeout=timeout, headers=headers, allow_redirects=allow_redirects)
def _wait_remaining_requests(self, timeout=10):
def get_http_request_threads():
return [t for t in threading.enumerate() if t.name.startswith('odoo.service.http.request.')]
start_time = time.time()
request_threads = get_http_request_threads()
self._logger.info('waiting for threads: %s', request_threads)
for thread in request_threads:
thread.join(timeout - (time.time() - start_time))
request_threads = get_http_request_threads()
for thread in request_threads:
self._logger.info("Stop waiting for thread %s handling request for url %s",
thread.name, getattr(thread, 'url', '<UNKNOWN>'))
if request_threads:
self._logger.info('remaining requests')
odoo.tools.misc.dumpstacks()
def logout(self, keep_db=True):
self.session.logout(keep_db=True)
odoo.http.root.session_store.save(self.session)
def authenticate(self, user, password):
db = get_db_name()
if getattr(self, 'session', None):
odoo.http.root.session_store.delete(self.session)
self.session = session = odoo.http.root.session_store.new()
session.db = db
if user: # if authenticated
# Flush and clear the current transaction. This is useful, because
# the call below opens a test cursor, which uses a different cache
# than this transaction.
self.cr.flush()
self.cr.clear()
uid = self.registry['res.users'].authenticate(db, user, password, {'interactive': False})
env = api.Environment(self.cr, uid, {})
session.uid = uid
session.login = user
session.session_token = uid and security.compute_session_token(session, env)
session.context = dict(env['res.users'].context_get() or {})
session.context['uid'] = uid
session._fix_lang(session.context)
odoo.http.root.session_store.save(session)
# Reset the opener: turns out when we set cookies['foo'] we're really
# setting a cookie on domain='' path='/'.
#
# But then our friendly neighborhood server might set a cookie for
# domain='localhost' path='/' (with the same value) which is considered
# a *different* cookie following ours rather than the same.
#
# When we update our cookie, it's done in-place, so the server-set
# cookie is still present and (as it follows ours and is more precise)
# very likely to still be used, therefore our session change is ignored.
#
# An alternative would be to set the cookie to None (unsetting it
# completely) or clear-ing session.cookies.
self.opener = Opener(self.cr)
self.opener.cookies['session_id'] = session.sid
if self.browser:
self._logger.info('Setting session cookie in browser')
self.browser.set_cookie('session_id', session.sid, '/', HOST)
return session
def browser_js(self, url_path, code, ready='', login=None, timeout=60, cookies=None, **kw):
""" Test js code running in the browser
- optionnally log as 'login'
- load page given by url_path
- wait for ready object to be available
- eval(code) inside the page
To signal success test do: console.log('test successful')
To signal test failure raise an exception or call console.error
"""
if not self.env.registry.loaded:
self._logger.warning('HttpCase test should be in post_install only')
# increase timeout if coverage is running
if any(f.filename.endswith('/coverage/execfile.py') for f in inspect.stack() if f.filename):
timeout = timeout * 1.5
self.start_browser()
try:
self.authenticate(login, login)
# Flush and clear the current transaction. This is useful in case
# we make requests to the server, as these requests are made with
# test cursors, which uses different caches than this transaction.
self.cr.flush()
self.cr.clear()
url = werkzeug.urls.url_join(self.base_url(), url_path)
self._logger.info('Open "%s" in browser', url)
if self.browser.screencasts_dir:
self._logger.info('Starting screencast')
self.browser.start_screencast()
if cookies:
for name, value in cookies.items():
self.browser.set_cookie(name, value, '/', HOST)
self.browser.navigate_to(url, wait_stop=not bool(ready))
# Needed because tests like test01.js (qunit tests) are passing a ready
# code = ""
ready = ready or "document.readyState === 'complete'"
self.assertTrue(self.browser._wait_ready(ready), 'The ready "%s" code was always falsy' % ready)
error = False
try:
self.browser._wait_code_ok(code, timeout)
except ChromeBrowserException as chrome_browser_exception:
error = chrome_browser_exception
if error: # dont keep initial traceback, keep that outside of except
if code:
message = 'The test code "%s" failed' % code
else:
message = "Some js test failed"
self.fail('%s\n%s' % (message, error))
finally:
# clear browser to make it stop sending requests, in case we call
# the method several times in a test method
self.browser.delete_cookie('session_id', domain=HOST)
self.browser.clear()
self._wait_remaining_requests()
@classmethod
def base_url(cls):
return "http://%s:%s" % (HOST, odoo.tools.config['http_port'])
def start_tour(self, url_path, tour_name, step_delay=None, **kwargs):
"""Wrapper for `browser_js` to start the given `tour_name` with the
optional delay between steps `step_delay`. Other arguments from
`browser_js` can be passed as keyword arguments."""
step_delay = ', %s' % step_delay if step_delay else ''
code = kwargs.pop('code', "odoo.startTour('%s'%s)" % (tour_name, step_delay))
ready = kwargs.pop('ready', "odoo.__DEBUG__.services['web_tour.tour'].tours['%s'].ready" % tour_name)
return self.browser_js(url_path=url_path, code=code, ready=ready, **kwargs)
def profile(self, **kwargs):
"""
for http_case, also patch _get_profiler_context_manager in order to profile all requests
"""
sup = super()
_profiler = sup.profile(**kwargs)
def route_profiler(request):
return sup.profile(description=request.httprequest.full_path)
return profiler.Nested(_profiler, patch('odoo.http.Request._get_profiler_context_manager', route_profiler))
# kept for backward compatibility
class HttpSavepointCase(HttpCase):
@classmethod
def __init_subclass__(cls):
super().__init_subclass__()
warnings.warn(
"Deprecated class HttpSavepointCase has been merged into HttpCase",
DeprecationWarning, stacklevel=2,
)
def users(*logins):
""" Decorate a method to execute it once for each given user. """
@decorator
def _users(func, *args, **kwargs):
self = args[0]
old_uid = self.uid
try:
# retrieve users
Users = self.env['res.users'].with_context(active_test=False)
user_id = {
user.login: user.id
for user in Users.search([('login', 'in', list(logins))])
}
for login in logins:
with self.subTest(login=login):
# switch user and execute func
self.uid = user_id[login]
func(*args, **kwargs)
# Invalidate the cache between subtests, in order to not reuse
# the former user's cache (`test_read_mail`, `test_write_mail`)
self.env.cache.invalidate()
finally:
self.uid = old_uid
return _users
@decorator
def warmup(func, *args, **kwargs):
""" Decorate a test method to run it twice: once for a warming up phase, and
a second time for real. The test attribute ``warm`` is set to ``False``
during warm up, and ``True`` once the test is warmed up. Note that the
effects of the warmup phase are rolled back thanks to a savepoint.
"""
self = args[0]
self.env['base'].flush()
self.env.cache.invalidate()
# run once to warm up the caches
self.warm = False
self.cr.execute('SAVEPOINT test_warmup')
func(*args, **kwargs)
self.env['base'].flush()
# run once for real
self.cr.execute('ROLLBACK TO SAVEPOINT test_warmup')
self.env.cache.invalidate()
self.warm = True
func(*args, **kwargs)
def can_import(module):
""" Checks if <module> can be imported, returns ``True`` if it can be,
``False`` otherwise.
To use with ``unittest.skipUnless`` for tests conditional on *optional*
dependencies, which may or may be present but must still be tested if
possible.
"""
try:
importlib.import_module(module)
except ImportError:
return False
else:
return True
# TODO: sub-views (o2m, m2m) -> sub-form?
# TODO: domains
ref_re = re.compile(r"""
# first match 'form_view_ref' key, backrefs are used to handle single or
# double quoting of the value
(['"])(?P<view_type>\w+_view_ref)\1
# colon separator (with optional spaces around)
\s*:\s*
# open quote for value
(['"])
(?P<view_id>
# we'll just match stuff which is normally part of an xid:
# word and "." characters
[.\w]+
)
# close with same quote as opening
\3
""", re.VERBOSE)
class Form(object):
""" Server-side form view implementation (partial)
Implements much of the "form view" manipulation flow, such that
server-side tests can more properly reflect the behaviour which would be
observed when manipulating the interface:
* call default_get and the relevant onchanges on "creation"
* call the relevant onchanges on setting fields
* properly handle defaults & onchanges around x2many fields
Saving the form returns the created record if in creation mode.
Regular fields can just be assigned directly to the form, for
:class:`~odoo.fields.Many2one` fields assign a singleton recordset::
# empty recordset => creation mode
f = Form(self.env['sale.order'])
f.partner_id = a_partner
so = f.save()
When editing a record, using the form as a context manager to
automatically save it at the end of the scope::
with Form(so) as f2:
f2.payment_term_id = env.ref('account.account_payment_term_15days')
# f2 is saved here
For :class:`~odoo.fields.Many2many` fields, the field itself is a
:class:`~odoo.tests.common.M2MProxy` and can be altered by adding or
removing records::
with Form(user) as u:
u.groups_id.add(env.ref('account.group_account_manager'))
u.groups_id.remove(id=env.ref('base.group_portal').id)
Finally :class:`~odoo.fields.One2many` are reified as
:class:`~odoo.tests.common.O2MProxy`.
Because the :class:`~odoo.fields.One2many` only exists through its
parent, it is manipulated more directly by creating "sub-forms"
with the :meth:`~odoo.tests.common.O2MProxy.new` and
:meth:`~odoo.tests.common.O2MProxy.edit` methods. These would
normally be used as context managers since they get saved in the
parent record::
with Form(so) as f3:
# add support
with f3.order_line.new() as line:
line.product_id = env.ref('product.product_product_2')
# add a computer
with f3.order_line.new() as line:
line.product_id = env.ref('product.product_product_3')
# we actually want 5 computers
with f3.order_line.edit(1) as line:
line.product_uom_qty = 5
# remove support
f3.order_line.remove(index=0)
# SO is saved here
:param recordp: empty or singleton recordset. An empty recordset will
put the view in "creation" mode and trigger calls to
default_get and on-load onchanges, a singleton will
put it in "edit" mode and only load the view's data.
:type recordp: odoo.models.Model
:param view: the id, xmlid or actual view object to use for
onchanges and view constraints. If none is provided,
simply loads the default view for the model.
:type view: int | str | odoo.model.Model
.. versionadded:: 12.0
"""
def __init__(self, recordp, view=None):
# necessary as we're overriding setattr
assert isinstance(recordp, BaseModel)
env = recordp.env
object.__setattr__(self, '_env', env)
# store model bit only
object.__setattr__(self, '_model', recordp.browse(()))
if isinstance(view, BaseModel):
assert view._name == 'ir.ui.view', "the view parameter must be a view id, xid or record, got %s" % view
view_id = view.id
elif isinstance(view, str):
view_id = env.ref(view).id
else:
view_id = view or False
fvg = recordp.fields_view_get(view_id, 'form')
fvg['tree'] = etree.fromstring(fvg['arch'])
object.__setattr__(self, '_view', fvg)
self._process_fvg(recordp, fvg)
# ordered?
vals = dict.fromkeys(fvg['fields'], False)
object.__setattr__(self, '_values', vals)
object.__setattr__(self, '_changed', set())
if recordp:
assert recordp['id'], "editing unstored records is not supported"
# always load the id
vals['id'] = recordp['id']
self._init_from_values(recordp)
else:
self._init_from_defaults(self._model)
def _o2m_set_edition_view(self, descr, node, level):
default_view = next(
(m for m in node.get('mode', 'tree').split(',') if m != 'form'),
'tree'
)
refs = {
m.group('view_type'): m.group('view_id')
for m in ref_re.finditer(node.get('context', ''))
}
# always fetch for simplicity, ensure we always have a tree and
# a form view
submodel = self._env[descr['relation']]
views = submodel.with_context(**refs) \
.load_views([(False, 'tree'), (False, 'form')])['fields_views']
# embedded views should take the priority on externals
views.update(descr['views'])
# re-set all resolved views on the descriptor
descr['views'] = views
# if the default view is a kanban or a non-editable list, the
# "edition controller" is the form view
edition = views['form']
edition['tree'] = etree.fromstring(edition['arch'])
if default_view == 'tree':
subarch = etree.fromstring(views['tree']['arch'])
if subarch.get('editable'):
edition = views['tree']
edition['tree'] = subarch
# don't recursively process o2ms in o2ms
self._process_fvg(submodel, edition, level=level-1)
descr['views']['edition'] = edition
def __str__(self):
return "<%s %s(%s)>" % (
type(self).__name__,
self._model._name,
self._values.get('id', False),
)
def _process_fvg(self, model, fvg, level=2):
""" Post-processes to augment the fields_view_get with:
* an id field (may not be present if not in the view but needed)
* pre-processed modifiers (map of modifier name to json-loaded domain)
* pre-processed onchanges list
"""
fvg['fields'].setdefault('id', {'type': 'id'})
# pre-resolve modifiers & bind to arch toplevel
modifiers = fvg['modifiers'] = {'id': {'required': False, 'readonly': True}}
contexts = fvg['contexts'] = {}
order = fvg['fields_ordered'] = []
for f in fvg['tree'].xpath('//field[not(ancestor::field)]'):
fname = f.get('name')
order.append(fname)
modifiers[fname] = {
modifier: bool(domain) if isinstance(domain, int) else normalize_domain(domain)
for modifier, domain in json.loads(f.get('modifiers', '{}')).items()
}
ctx = f.get('context')
if ctx:
contexts[fname] = ctx
descr = fvg['fields'].get(fname) or {'type': None}
# FIXME: better widgets support
# NOTE: selection breaks because of m2o widget=selection
if f.get('widget') in ['many2many']:
descr['type'] = f.get('widget')
if level and descr['type'] == 'one2many':
self._o2m_set_edition_view(descr, f, level)
fvg['onchange'] = model._onchange_spec(fvg)
def _init_from_defaults(self, model):
vals = self._values
vals.clear()
vals['id'] = False
# call onchange with an empty list of fields; this retrieves default
# values, applies onchanges and return the result
self._perform_onchange([])
# fill in whatever fields are still missing with falsy values
vals.update(
(f, _cleanup_from_default(descr['type'], False))
for f, descr in self._view['fields'].items()
if f not in vals
)
# mark all fields as modified (though maybe this should be done on
# save when creating for better reliability?)
self._changed.update(self._view['fields'])
def _init_from_values(self, values):
self._values.update(
record_to_values(self._view['fields'], values))
def __getattr__(self, field):
descr = self._view['fields'].get(field)
assert descr is not None, "%s was not found in the view" % field
v = self._values[field]
if descr['type'] == 'many2one':
Model = self._env[descr['relation']]
if not v:
return Model
return Model.browse(v)
elif descr['type'] == 'many2many':
return M2MProxy(self, field)
elif descr['type'] == 'one2many':
return O2MProxy(self, field)
return v
def _get_modifier(self, field, modifier, *, default=False, view=None, modmap=None, vals=None):
if view is None:
view = self._view
d = (modmap or view['modifiers'])[field].get(modifier, default)
if isinstance(d, bool):
return d
if vals is None:
vals = self._values
stack = []
for it in reversed(d):
if it == '!':
stack.append(not stack.pop())
elif it == '&':
e1 = stack.pop()
e2 = stack.pop()
stack.append(e1 and e2)
elif it == '|':
e1 = stack.pop()
e2 = stack.pop()
stack.append(e1 or e2)
elif isinstance(it, tuple):
if it == TRUE_LEAF:
stack.append(True)
continue
elif it == FALSE_LEAF:
stack.append(False)
continue
f, op, val = it
# hack-ish handling of parent.<field> modifiers
f, n = re.subn(r'^parent\.', '', f, 1)
if n:
field_val = vals['•parent•'][f]
else:
field_val = vals[f]
# apparent artefact of JS data representation: m2m field
# values are assimilated to lists of ids?
# FIXME: SSF should do that internally, but the requirement
# of recursively post-processing to generate lists of
# commands on save (e.g. m2m inside an o2m) means the
# data model needs proper redesign
# we're looking up the "current view" so bits might be
# missing when processing o2ms in the parent (see
# values_to_save:1450 or so)
f_ = view['fields'].get(f, {'type': None})
if f_['type'] == 'many2many':
# field value should be [(6, _, ids)], we want just the ids
field_val = field_val[0][2] if field_val else []
stack.append(self._OPS[op](field_val, val))
else:
raise ValueError("Unknown domain element %s" % [it])
[result] = stack
return result
_OPS = {
'=': operator.eq,
'==': operator.eq,
'!=': operator.ne,
'<': operator.lt,
'<=': operator.le,
'>=': operator.ge,
'>': operator.gt,
'in': lambda a, b: (a in b) if isinstance(b, (tuple, list)) else (b in a),
'not in': lambda a, b: (a not in b) if isinstance(b, (tuple, list)) else (b not in a),
}
def _get_context(self, field):
c = self._view['contexts'].get(field)
if not c:
return {}
# see _getEvalContext
# the context for a field's evals (of domain/context) is the composition of:
# * the parent's values
# * ??? element.context ???
# * the environment's context (?)
# * a few magic values
record_id = self._values.get('id') or False
ctx = dict(self._values_to_save(all_fields=True))
ctx.update(self._env.context)
ctx.update(
id=record_id,
active_id=record_id,
active_ids=[record_id] if record_id else [],
active_model=self._model._name,
current_date=date.today().strftime("%Y-%m-%d"),
)
return safe_eval(c, ctx, {'context': ctx})
def __setattr__(self, field, value):
descr = self._view['fields'].get(field)
assert descr is not None, "%s was not found in the view" % field
assert descr['type'] not in ('many2many', 'one2many'), \
"Can't set an o2m or m2m field, manipulate the corresponding proxies"
# TODO: consider invisible to be the same as readonly?
assert not self._get_modifier(field, 'readonly'), \
"can't write on readonly field {}".format(field)
if descr['type'] == 'many2one':
assert isinstance(value, BaseModel) and value._name == descr['relation']
# store just the id: that's the output of default_get & (more
# or less) onchange.
value = value.id
self._values[field] = value
self._perform_onchange([field])
# enables with Form(...) as f: f.a = 1; f.b = 2; f.c = 3
# q: how to get recordset?
def __enter__(self):
return self
def __exit__(self, etype, _evalue, _etb):
if not etype:
self.save()
def save(self):
""" Saves the form, returns the created record if applicable
* does not save ``readonly`` fields
* does not save unmodified fields (during edition) — any assignment
or onchange return marks the field as modified, even if set to its
current value
:raises AssertionError: if the form has any unfilled required field
"""
id_ = self._values.get('id')
values = self._values_to_save()
if id_:
r = self._model.browse(id_)
if values:
r.write(values)
else:
r = self._model.create(values)
self._values.update(
record_to_values(self._view['fields'], r)
)
self._changed.clear()
self._model.flush()
self._model.env.clear() # discard cache and pending recomputations
return r
def _values_to_save(self, all_fields=False):
""" Validates values and returns only fields modified since
load/save
:param bool all_fields: if False (the default), checks for required
fields and only save fields which are changed
and not readonly
"""
view = self._view
fields = self._view['fields']
record_values = self._values
changed = self._changed
return self._values_to_save_(
record_values, fields, view,
changed, all_fields
)
def _values_to_save_(
self, record_values, fields, view,
changed, all_fields=False, modifiers_values=None,
parent_link=None
):
""" Validates & extracts values to save, recursively in order to handle
o2ms properly
:param dict record_values: values of the record to extract
:param dict fields: fields_get result
:param view: view tree
:param set changed: set of fields which have been modified (since last save)
:param bool all_fields:
whether to ignore normal filtering and just return everything
:param dict modifiers_values:
defaults to ``record_values``, but o2ms need some additional
massaging
"""
values = {}
for f in fields:
if f == 'id':
continue
get_modifier = functools.partial(
self._get_modifier,
f, view=view,
vals=modifiers_values or record_values
)
descr = fields[f]
v = record_values[f]
# note: maybe `invisible` should not skip `required` if model attribute
if v is False and not (all_fields or f == parent_link or descr['type'] == 'boolean' or get_modifier('invisible') or get_modifier('column_invisible')):
if get_modifier('required'):
raise AssertionError("{} is a required field ({})".format(f, view['modifiers'][f]))
# skip unmodified fields unless all_fields
if not (all_fields or f in changed):
continue
if get_modifier('readonly'):
node = _get_node(view, f)
if not (all_fields or node.get('force_save')):
continue
if descr['type'] == 'one2many':
subview = descr['views']['edition']
fields_ = subview['fields']
oldvals = v
v = []
for (c, rid, vs) in oldvals:
if c == 1 and not vs:
c, vs = 4, False
elif c in (0, 1):
vs = vs or {}
missing = fields_.keys() - vs.keys()
# FIXME: maybe do this during initial loading instead?
if missing:
Model = self._env[descr['relation']]
if c == 0:
vs.update(dict.fromkeys(missing, False))
vs.update(
(k, _cleanup_from_default(fields_[k], v))
for k, v in Model.default_get(list(missing)).items()
)
else:
vs.update(record_to_values(
{k: v for k, v in fields_.items() if k not in vs},
Model.browse(rid)
))
vs = self._values_to_save_(
vs, fields_, subview,
vs._changed if isinstance(vs, UpdateDict) else vs.keys(),
all_fields,
modifiers_values={'id': False, **vs, '•parent•': record_values},
# related o2m don't have a relation_field
parent_link=descr.get('relation_field'),
)
v.append((c, rid, vs))
values[f] = v
return values
def _perform_onchange(self, fields):
assert isinstance(fields, list)
# marks any onchange source as changed
self._changed.update(fields)
# skip calling onchange() if there's no trigger on any of the changed
# fields
spec = self._view['onchange']
if fields and not any(spec[f] for f in fields):
return
record = self._model.browse(self._values.get('id'))
result = record.onchange(self._onchange_values(), fields, spec)
self._model.flush()
self._model.env.clear() # discard cache and pending recomputations
if result.get('warning'):
_logger.getChild('onchange').warning("%(title)s %(message)s" % result.get('warning'))
values = result.get('value', {})
# mark onchange output as changed
self._changed.update(values.keys() & self._view['fields'].keys())
self._values.update(
(k, self._cleanup_onchange(
self._view['fields'][k],
v, self._values.get(k),
))
for k, v in values.items()
if k in self._view['fields']
)
return result
def _onchange_values(self):
return self._onchange_values_(self._view['fields'], self._values)
def _onchange_values_(self, fields, record):
""" Recursively cleanup o2m values for onchanges:
* if an o2m command is a 1 (UPDATE) and there is nothing to update, send
a 4 instead (LINK_TO) instead as that's what the webclient sends for
unmodified rows
* if an o2m command is a 1 (UPDATE) and only a subset of its fields have
been modified, only send the modified ones
This needs to be recursive as there are people who put invisible o2ms
inside their o2ms.
"""
values = {}
for k, v in record.items():
if fields[k]['type'] == 'one2many':
subfields = fields[k]['views']['edition']['fields']
it = values[k] = []
for (c, rid, vs) in v:
if c == 1 and isinstance(vs, UpdateDict):
vs = dict(vs.changed_items())
if c == 1 and not vs:
it.append((4, rid, False))
elif c in (0, 1):
it.append((c, rid, self._onchange_values_(subfields, vs)))
else:
it.append((c, rid, vs))
else:
values[k] = v
return values
def _cleanup_onchange(self, descr, value, current):
if descr['type'] == 'many2one':
if not value:
return False
# out of onchange, m2o are name-gotten
return value[0]
elif descr['type'] == 'one2many':
# ignore o2ms nested in o2ms
if not descr['views']:
return []
if current is None:
current = []
v = []
c = {t[1] for t in current if t[0] in (1, 2)}
current_values = {c[1]: c[2] for c in current if c[0] == 1}
# which view should this be???
subfields = descr['views']['edition']['fields']
# TODO: simplistic, unlikely to work if e.g. there's a 5 inbetween other commands
for command in value:
if command[0] == 0:
v.append((0, 0, {
k: self._cleanup_onchange(subfields[k], v, None)
for k, v in command[2].items()
if k in subfields
}))
elif command[0] == 1:
record_id = command[1]
c.discard(record_id)
stored = current_values.get(record_id)
if stored is None:
record = self._env[descr['relation']].browse(record_id)
stored = UpdateDict(record_to_values(subfields, record))
updates = (
(k, self._cleanup_onchange(subfields[k], v, stored.get(k)))
for k, v in command[2].items()
if k in subfields
)
for field, value in updates:
# if there are values from the onchange which differ
# from current values, update & mark field as changed
if stored.get(field, value) != value:
stored._changed.add(field)
stored[field] = value
v.append((1, record_id, stored))
elif command[0] == 2:
c.discard(command[1])
v.append((2, command[1], False))
elif command[0] == 4:
c.discard(command[1])
v.append((1, command[1], None))
elif command[0] == 5:
v = []
# explicitly mark all non-relinked (or modified) records as deleted
for id_ in c: v.append((2, id_, False))
return v
elif descr['type'] == 'many2many':
# onchange result is a bunch of commands, normalize to single 6
if current is None:
ids = []
else:
ids = list(current[0][2])
for command in value:
if command[0] == 1:
ids.append(command[1])
elif command[0] == 3:
ids.remove(command[1])
elif command[0] == 4:
ids.append(command[1])
elif command[0] == 5:
del ids[:]
elif command[0] == 6:
ids[:] = command[2]
else:
raise ValueError(
"Unsupported M2M command %d" % command[0])
return [(6, False, ids)]
return value
class O2MForm(Form):
# noinspection PyMissingConstructor
def __init__(self, proxy, index=None):
m = proxy._model
object.__setattr__(self, '_proxy', proxy)
object.__setattr__(self, '_index', index)
object.__setattr__(self, '_env', m.env)
object.__setattr__(self, '_model', m)
# copy so we don't risk breaking it too much (?)
fvg = dict(proxy._descr['views']['edition'])
object.__setattr__(self, '_view', fvg)
self._process_fvg(m, fvg)
vals = dict.fromkeys(fvg['fields'], False)
object.__setattr__(self, '_values', vals)
object.__setattr__(self, '_changed', set())
if index is None:
self._init_from_defaults(m)
else:
vals = proxy._records[index]
self._values.update(vals)
if hasattr(vals, '_changed'):
self._changed.update(vals._changed)
def _get_modifier(self, field, modifier, *, default=False, view=None, modmap=None, vals=None):
if vals is None:
vals = {**self._values, '•parent•': self._proxy._parent._values}
return super()._get_modifier(field, modifier, default=default, view=view, modmap=modmap, vals=vals)
def _onchange_values(self):
values = super(O2MForm, self)._onchange_values()
# computed o2m may not have a relation_field(?)
descr = self._proxy._descr
if 'relation_field' in descr: # note: should be fine because not recursive
values[descr['relation_field']] = self._proxy._parent._onchange_values()
return values
def save(self):
proxy = self._proxy
commands = proxy._parent._values[proxy._field]
values = self._values_to_save()
if self._index is None:
commands.append((0, 0, values))
else:
index = proxy._command_index(self._index)
(c, id_, vs) = commands[index]
if c == 0:
vs.update(values)
elif c == 1:
if vs is None:
vs = UpdateDict()
assert isinstance(vs, UpdateDict), type(vs)
vs.update(values)
commands[index] = (1, id_, vs)
else:
raise AssertionError("Expected command type 0 or 1, found %s" % c)
# FIXME: should be called when performing on change => value needs to be serialised into parent every time?
proxy._parent._perform_onchange([proxy._field])
def _values_to_save(self, all_fields=False):
""" Validates values and returns only fields modified since
load/save
"""
values = UpdateDict(self._values)
values._changed.update(self._changed)
if all_fields:
return values
for f in self._view['fields']:
if self._get_modifier(f, 'required') and not (self._get_modifier(f, 'column_invisible') or self._get_modifier(f, 'invisible')):
assert self._values[f] is not False, "{} is a required field".format(f)
return values
class UpdateDict(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._changed = set()
if args and isinstance(args[0], UpdateDict):
self._changed.update(args[0]._changed)
def changed_items(self):
return (
(k, v) for k, v in self.items()
if k in self._changed
)
def update(self, *args, **kw):
super().update(*args, **kw)
if args and isinstance(args[0], UpdateDict):
self._changed.update(args[0]._changed)
class X2MProxy(object):
_parent = None
_field = None
def _assert_editable(self):
assert not self._parent._get_modifier(self._field, 'readonly'),\
'field %s is not editable' % self._field
class O2MProxy(X2MProxy):
""" O2MProxy()
"""
def __init__(self, parent, field):
self._parent = parent
self._field = field
# reify records to a list so they can be manipulated easily?
self._records = []
model = self._model
fields = self._descr['views']['edition']['fields']
for (command, rid, values) in self._parent._values[self._field]:
if command == 0:
self._records.append(values)
elif command == 1:
if values is None:
# read based on view info
r = model.browse(rid)
values = UpdateDict(record_to_values(fields, r))
self._records.append(values)
elif command == 2:
pass
else:
raise AssertionError("O2M proxy only supports commands 0, 1 and 2, found %s" % command)
def __len__(self):
return len(self._records)
@property
def _model(self):
model = self._parent._env[self._descr['relation']]
ctx = self._parent._get_context(self._field)
if ctx:
model = model.with_context(**ctx)
return model
@property
def _descr(self):
return self._parent._view['fields'][self._field]
def _command_index(self, for_record):
""" Takes a record index and finds the corresponding record index
(skips all 2s, basically)
:param int for_record:
"""
commands = self._parent._values[self._field]
return next(
cidx
for ridx, cidx in enumerate(
cidx for cidx, (c, _1, _2) in enumerate(commands)
if c in (0, 1)
)
if ridx == for_record
)
def new(self):
""" Returns a :class:`Form` for a new
:class:`~odoo.fields.One2many` record, properly initialised.
The form is created from the list view if editable, or the field's
form view otherwise.
:raises AssertionError: if the field is not editable
"""
self._assert_editable()
return O2MForm(self)
def edit(self, index):
""" Returns a :class:`Form` to edit the pre-existing
:class:`~odoo.fields.One2many` record.
The form is created from the list view if editable, or the field's
form view otherwise.
:raises AssertionError: if the field is not editable
"""
self._assert_editable()
return O2MForm(self, index)
def remove(self, index):
""" Removes the record at ``index`` from the parent form.
:raises AssertionError: if the field is not editable
"""
self._assert_editable()
# remove reified record from local list & either remove 0 from
# commands list or replace 1 (update) by 2 (remove)
cidx = self._command_index(index)
commands = self._parent._values[self._field]
(command, rid, _) = commands[cidx]
if command == 0:
# record not saved yet -> just remove the command
del commands[cidx]
elif command == 1:
# record already saved, replace by 2
commands[cidx] = (2, rid, 0)
else:
raise AssertionError("Expected command 0 or 1, got %s" % commands[cidx])
# remove reified record
del self._records[index]
self._parent._perform_onchange([self._field])
class M2MProxy(X2MProxy, collections.abc.Sequence):
""" M2MProxy()
Behaves as a :class:`~collection.Sequence` of recordsets, can be
indexed or sliced to get actual underlying recordsets.
"""
def __init__(self, parent, field):
self._parent = parent
self._field = field
def __getitem__(self, it):
p = self._parent
model = p._view['fields'][self._field]['relation']
return p._env[model].browse(self._get_ids()[it])
def __len__(self):
return len(self._get_ids())
def __iter__(self):
return iter(self[:])
def __contains__(self, record):
relation_ = self._parent._view['fields'][self._field]['relation']
assert isinstance(record, BaseModel)\
and record._name == relation_
return record.id in self._get_ids()
def add(self, record):
""" Adds ``record`` to the field, the record must already exist.
The addition will only be finalized when the parent record is saved.
"""
self._assert_editable()
parent = self._parent
relation_ = parent._view['fields'][self._field]['relation']
assert isinstance(record, BaseModel) and record._name == relation_,\
"trying to assign a '{}' object to a '{}' field".format(
record._name,
relation_,
)
self._get_ids().append(record.id)
parent._perform_onchange([self._field])
def _get_ids(self):
return self._parent._values[self._field][0][2]
def remove(self, id=None, index=None):
""" Removes a record at a certain index or with a provided id from
the field.
"""
self._assert_editable()
assert (id is None) ^ (index is None), \
"can remove by either id or index"
if id is None:
# remove by index
del self._get_ids()[index]
else:
self._get_ids().remove(id)
self._parent._perform_onchange([self._field])
def clear(self):
""" Removes all existing records in the m2m
"""
self._assert_editable()
self._get_ids()[:] = []
self._parent._perform_onchange([self._field])
def record_to_values(fields, record):
r = {}
# don't read the id explicitly, not sure why but if any of the "magic" hr
# field is read alongside `id` then it blows up e.g.
# james.read(['barcode']) works fine but james.read(['id', 'barcode'])
# triggers an ACL error on barcode, likewise km_home_work or
# emergency_contact or whatever. Since we always get the id anyway, just
# remove it from the fields to read
to_read = list(fields.keys() - {'id'})
for f, v in record.read(to_read)[0].items():
descr = fields[f]
if descr['type'] == 'many2one':
v = v and v[0]
elif descr['type'] == 'many2many':
v = [(6, 0, v or [])]
elif descr['type'] == 'one2many':
v = [(1, r, None) for r in v or []]
elif descr['type'] == 'datetime' and isinstance(v, datetime):
v = odoo.fields.Datetime.to_string(v)
elif descr['type'] == 'date' and isinstance(v, date):
v = odoo.fields.Date.to_string(v)
r[f] = v
return r
def _cleanup_from_default(type_, value):
if not value:
if type_ == 'many2many':
return [(6, False, [])]
elif type_ == 'one2many':
return []
elif type_ in ('integer', 'float'):
return 0
return value
if type_ == 'one2many':
return [c for c in value if c[0] != 6]
elif type_ == 'datetime' and isinstance(value, datetime):
return odoo.fields.Datetime.to_string(value)
elif type_ == 'date' and isinstance(value, date):
return odoo.fields.Date.to_string(value)
return value
def _get_node(view, f, *arg):
""" Find etree node for the field ``f`` in the view's arch
"""
return next((
n for n in view['tree'].iter('field')
if n.get('name') == f
), *arg)
def tagged(*tags):
"""
A decorator to tag BaseCase objects.
Tags are stored in a set that can be accessed from a 'test_tags' attribute.
A tag prefixed by '-' will remove the tag e.g. to remove the 'standard' tag.
By default, all Test classes from odoo.tests.common have a test_tags
attribute that defaults to 'standard' and 'at_install'.
When using class inheritance, the tags are NOT inherited.
"""
def tags_decorator(obj):
include = {t for t in tags if not t.startswith('-')}
exclude = {t[1:] for t in tags if t.startswith('-')}
obj.test_tags = (getattr(obj, 'test_tags', set()) | include) - exclude # todo remove getattr in master since we want to limmit tagged to BaseCase and always have +standard tag
at_install = 'at_install' in obj.test_tags
post_install = 'post_install' in obj.test_tags
if not (at_install ^ post_install):
_logger.warning('A tests should be either at_install or post_install, which is not the case of %r', obj)
return obj
return tags_decorator
class TagsSelector(object):
""" Test selector based on tags. """
filter_spec_re = re.compile(r'^([+-]?)(\*|\w*)(?:/(\w*))?(?::(\w*))?(?:\.(\w*))?$') # [-][tag][/module][:class][.method]
def __init__(self, spec):
""" Parse the spec to determine tags to include and exclude. """
filter_specs = {t.strip() for t in spec.split(',') if t.strip()}
self.exclude = set()
self.include = set()
for filter_spec in filter_specs:
match = self.filter_spec_re.match(filter_spec)
if not match:
_logger.error('Invalid tag %s', filter_spec)
continue
sign, tag, module, klass, method = match.groups()
is_include = sign != '-'
if not tag and is_include:
# including /module:class.method implicitly requires 'standard'
tag = 'standard'
elif not tag or tag == '*':
# '*' indicates all tests (instead of 'standard' tests only)
tag = None
test_filter = (tag, module, klass, method)
if is_include:
self.include.add(test_filter)
else:
self.exclude.add(test_filter)
if self.exclude and not self.include:
self.include.add(('standard', None, None, None))
def check(self, test):
""" Return whether ``arg`` matches the specification: it must have at
least one tag in ``self.include`` and none in ``self.exclude`` for each tag category.
"""
if not hasattr(test, 'test_tags'): # handle the case where the Test does not inherit from BaseCase and has no test_tags
_logger.debug("Skipping test '%s' because no test_tag found.", test)
return False
test_module = getattr(test, 'test_module', None)
test_class = getattr(test, 'test_class', None)
test_tags = test.test_tags | {test_module} # module as test_tags deprecated, keep for retrocompatibility,
test_method = getattr(test, '_testMethodName', None)
def _is_matching(test_filter):
(tag, module, klass, method) = test_filter
if tag and tag not in test_tags:
return False
elif module and module != test_module:
return False
elif klass and klass != test_class:
return False
elif method and test_method and method != test_method:
return False
return True
if any(_is_matching(test_filter) for test_filter in self.exclude):
return False
if any(_is_matching(test_filter) for test_filter in self.include):
return True
return False
| 41.295328 | 118,435 |
4,149 |
py
|
PYTHON
|
15.0
|
import importlib
import inspect
import itertools
import logging
import sys
import threading
import unittest
from pathlib import Path
from .. import tools
from .common import TagsSelector, OdooSuite
from .runner import OdooTestResult
_logger = logging.getLogger(__name__)
def get_test_modules(module):
""" Return a list of module for the addons potentially containing tests to
feed unittest.TestLoader.loadTestsFromModule() """
# Try to import the module
results = _get_tests_modules('odoo.addons', module)
try:
importlib.import_module('odoo.upgrade.%s' % module)
except ImportError:
pass
else:
results += list(_get_upgrade_test_modules(module))
return results
def _get_tests_modules(path, module):
modpath = '%s.%s' % (path, module)
try:
mod = importlib.import_module('.tests', modpath)
except ImportError as e: # will also catch subclass ModuleNotFoundError of P3.6
# Hide ImportErrors on `tests` sub-module, but display other exceptions
if e.name == modpath + '.tests' and e.msg.startswith('No module named'):
return []
_logger.exception('Can not `import %s`.', module)
return []
except Exception as e:
_logger.exception('Can not `import %s`.', module)
return []
if hasattr(mod, 'fast_suite') or hasattr(mod, 'checks'):
_logger.warning(
"Found deprecated fast_suite or checks attribute in test module "
"%s. These have no effect in or after version 8.0.",
mod.__name__)
result = [mod_obj for name, mod_obj in inspect.getmembers(mod, inspect.ismodule)
if name.startswith('test_')]
return result
def _get_upgrade_test_modules(module):
upg = importlib.import_module("odoo.upgrade")
for path in map(Path, upg.__path__):
for test in (path / module / "tests").glob("test_*.py"):
spec = importlib.util.spec_from_file_location(f"odoo.upgrade.{module}.tests.{test.stem}", test)
if not spec:
continue
pymod = importlib.util.module_from_spec(spec)
sys.modules[spec.name] = pymod
spec.loader.exec_module(pymod)
yield pymod
def make_suite(module_names, position='at_install'):
""" Creates a test suite for all the tests in the specified modules,
filtered by the provided ``position`` and the current test tags
:param list[str] module_names: modules to load tests from
:param str position: "at_install" or "post_install"
"""
config_tags = TagsSelector(tools.config['test_tags'])
position_tag = TagsSelector(position)
tests = (
t
for module_name in module_names
for m in get_test_modules(module_name)
for t in unwrap_suite(unittest.TestLoader().loadTestsFromModule(m))
if position_tag.check(t) and config_tags.check(t)
)
return OdooSuite(sorted(tests, key=lambda t: t.test_sequence))
def run_suite(suite, module_name=None):
# avoid dependency hell
from ..modules import module
module.current_test = module_name
threading.current_thread().testing = True
results = OdooTestResult()
suite(results)
threading.current_thread().testing = False
module.current_test = None
return results
def unwrap_suite(test):
"""
Attempts to unpack testsuites (holding suites or cases) in order to
generate a single stream of terminals (either test cases or customized
test suites). These can then be checked for run/skip attributes
individually.
An alternative would be to use a variant of @unittest.skipIf with a state
flag of some sort e.g. @unittest.skipIf(common.runstate != 'at_install'),
but then things become weird with post_install as tests should *not* run
by default there
"""
if isinstance(test, unittest.TestCase):
yield test
return
subtests = list(test)
# custom test suite (no test cases)
if not len(subtests):
yield test
return
for item in itertools.chain.from_iterable(unwrap_suite(t) for t in subtests):
yield item
| 34.008197 | 4,149 |
5,772 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
"""
OpenERP - Server
OpenERP is an ERP+CRM program for small and medium businesses.
The whole source code is distributed under the terms of the
GNU Public Licence.
(c) 2003-TODAY, Fabien Pinckaers - OpenERP SA
"""
import atexit
import csv # pylint: disable=deprecated-module
import logging
import os
import signal
import sys
import threading
import traceback
import time
from psycopg2 import ProgrammingError, errorcodes
import odoo
from . import Command
__author__ = odoo.release.author
__version__ = odoo.release.version
# Also use the `odoo` logger for the main script.
_logger = logging.getLogger('odoo')
def check_root_user():
"""Warn if the process's user is 'root' (on POSIX system)."""
if os.name == 'posix':
import getpass
if getpass.getuser() == 'root':
sys.stderr.write("Running as user 'root' is a security risk.\n")
def check_postgres_user():
""" Exit if the configured database user is 'postgres'.
This function assumes the configuration has been initialized.
"""
config = odoo.tools.config
if (config['db_user'] or os.environ.get('PGUSER')) == 'postgres':
sys.stderr.write("Using the database user 'postgres' is a security risk, aborting.")
sys.exit(1)
def report_configuration():
""" Log the server version and some configuration values.
This function assumes the configuration has been initialized.
"""
config = odoo.tools.config
_logger.info("Odoo version %s", __version__)
if os.path.isfile(config.rcfile):
_logger.info("Using configuration file at " + config.rcfile)
_logger.info('addons paths: %s', odoo.addons.__path__)
if config.get('upgrade_path'):
_logger.info('upgrade path: %s', config['upgrade_path'])
host = config['db_host'] or os.environ.get('PGHOST', 'default')
port = config['db_port'] or os.environ.get('PGPORT', 'default')
user = config['db_user'] or os.environ.get('PGUSER', 'default')
_logger.info('database: %s@%s:%s', user, host, port)
def rm_pid_file(main_pid):
config = odoo.tools.config
if config['pidfile'] and main_pid == os.getpid():
try:
os.unlink(config['pidfile'])
except OSError:
pass
def setup_pid_file():
""" Create a file with the process id written in it.
This function assumes the configuration has been initialized.
"""
config = odoo.tools.config
if not odoo.evented and config['pidfile']:
pid = os.getpid()
with open(config['pidfile'], 'w') as fd:
fd.write(str(pid))
atexit.register(rm_pid_file, pid)
def export_translation():
config = odoo.tools.config
dbname = config['db_name']
if config["language"]:
msg = "language %s" % (config["language"],)
else:
msg = "new language"
_logger.info('writing translation file for %s to %s', msg,
config["translate_out"])
fileformat = os.path.splitext(config["translate_out"])[-1][1:].lower()
# .pot is the same fileformat as .po
if fileformat == "pot":
fileformat = "po"
with open(config["translate_out"], "wb") as buf:
registry = odoo.modules.registry.Registry.new(dbname)
with registry.cursor() as cr:
odoo.tools.trans_export(config["language"],
config["translate_modules"] or ["all"], buf, fileformat, cr)
_logger.info('translation file written successfully')
def import_translation():
config = odoo.tools.config
overwrite = config["overwrite_existing_translations"]
dbname = config['db_name']
registry = odoo.modules.registry.Registry.new(dbname)
with registry.cursor() as cr:
odoo.tools.trans_load(
cr, config["translate_in"], config["language"], overwrite=overwrite,
)
def main(args):
check_root_user()
odoo.tools.config.parse_config(args)
check_postgres_user()
report_configuration()
config = odoo.tools.config
# the default limit for CSV fields in the module is 128KiB, which is not
# quite sufficient to import images to store in attachment. 500MiB is a
# bit overkill, but better safe than sorry I guess
csv.field_size_limit(500 * 1024 * 1024)
preload = []
if config['db_name']:
preload = config['db_name'].split(',')
for db_name in preload:
try:
odoo.service.db._create_empty_database(db_name)
config['init']['base'] = True
except ProgrammingError as err:
if err.pgcode == errorcodes.INSUFFICIENT_PRIVILEGE:
# We use an INFO loglevel on purpose in order to avoid
# reporting unnecessary warnings on build environment
# using restricted database access.
_logger.info("Could not determine if database %s exists, "
"skipping auto-creation: %s", db_name, err)
else:
raise err
except odoo.service.db.DatabaseExists:
pass
if config["translate_out"]:
export_translation()
sys.exit(0)
if config["translate_in"]:
import_translation()
sys.exit(0)
# This needs to be done now to ensure the use of the multiprocessing
# signaling mechanism for registries loaded with -d
if config['workers']:
odoo.multi_process = True
stop = config["stop_after_init"]
setup_pid_file()
rc = odoo.service.server.start(preload=preload, stop=stop)
sys.exit(rc)
class Server(Command):
"""Start the odoo server (default command)"""
def run(self, args):
main(args)
| 32.24581 | 5,772 |
3,663 |
py
|
PYTHON
|
15.0
|
import argparse
import glob
import json
import os
import re
import sys
from . import Command
from odoo.modules.module import MANIFEST_NAMES
class TSConfig(Command):
"""Generates tsconfig files for javascript code"""
def __init__(self):
self.command_name = "tsconfig"
def get_module_list(self, path):
return [
mod.split(os.path.sep)[-2]
for mname in MANIFEST_NAMES
for mod in glob.glob(os.path.join(path, f'*/{mname}'))
]
def clean_path(self, path):
return re.sub(r"/{2,}", "/", path)
def prefix_suffix_path(self, path, prefix, suffix):
return self.clean_path(f"{prefix}/{path}/{suffix}")
def remove_(self, modules, module):
for name, path in modules:
if module == name:
modules.remove((name, path))
def run(self, cmdargs):
parser = argparse.ArgumentParser(
prog="%s %s" % (sys.argv[0].split(os.path.sep)[-1], self.command_name),
description=self.__doc__
)
parser.add_argument('--addons-path', type=str, nargs=1, dest="paths")
args = parser.parse_args(args=cmdargs)
paths = list(map(self.clean_path, args.paths[0].split(',')))
modules = {}
for path in paths:
for module in self.get_module_list(path):
modules[module] = self.prefix_suffix_path(module, path, "/static/src/*")
content = self.generate_file_content(modules, paths)
# pylint: disable=bad-builtin
print(json.dumps(content, indent=2))
def generate_imports(self, modules):
return {
f'@{module}/*': [path]
for module, path in modules.items()
}
def generate_file_content(self, modules, paths):
return {
'compilerOptions': {
"baseUrl": ".",
"target": "es2019",
"checkJs": True,
"allowJs": True,
"noEmit": True,
"typeRoots": list(map(lambda p: p + "/web/tooling/types", paths)),
"paths": self.generate_imports(modules)
}, "exclude": self.generate_excludes()
}
def generate_excludes(self):
return [
"/**/*.po",
"/**/*.py",
"/**/*.pyc",
"/**/*.xml",
"/**/*.png",
"/**/*.md",
"/**/*.dat",
"/**/*.scss",
"/**/*.jpg",
"/**/*.svg",
"/**/*.pot",
"/**/*.csv",
"/**/*.mo",
"/**/*.txt",
"/**/*.less",
"/**/*.bcmap",
"/**/*.properties",
"/**/*.html",
"/**/*.ttf",
"/**/*.rst",
"/**/*.css",
"/**/*.pack",
"/**/*.idx",
"/**/*.h",
"/**/*.map",
"/**/*.gif",
"/**/*.sample",
"/**/*.doctree",
"/**/*.so",
"/**/*.pdf",
"/**/*.xslt",
"/**/*.conf",
"/**/*.woff",
"/**/*.xsd",
"/**/*.eot",
"/**/*.jst",
"/**/*.flow",
"/**/*.sh",
"/**/*.yml",
"/**/*.pfb",
"/**/*.jpeg",
"/**/*.crt",
"/**/*.template",
"/**/*.pxd",
"/**/*.dylib",
"/**/*.pem",
"/**/*.rng",
"/**/*.xsl",
"/**/*.xls",
"/**/*.cfg",
"/**/*.pyi",
"/**/*.pth",
"/**/*.markdown",
"/**/*.key",
"/**/*.ico",
]
| 28.176923 | 3,663 |
2,132 |
py
|
PYTHON
|
15.0
|
from __future__ import print_function
import logging
import sys
import os
from os.path import join as joinpath, isdir
import odoo
from odoo.modules import get_modules, get_module_path, initialize_sys_path
commands = {}
class CommandType(type):
def __init__(cls, name, bases, attrs):
super(CommandType, cls).__init__(name, bases, attrs)
name = getattr(cls, name, cls.__name__.lower())
cls.name = name
if name != 'command':
commands[name] = cls
Command = CommandType('Command', (object,), {'run': lambda self, args: None})
class Help(Command):
"""Display the list of available commands"""
def run(self, args):
print("Available commands:\n")
names = list(commands)
padding = max([len(k) for k in names]) + 2
for k in sorted(names):
name = k.ljust(padding, ' ')
doc = (commands[k].__doc__ or '').strip()
print(" %s%s" % (name, doc))
print("\nUse '%s <command> --help' for individual command help." % sys.argv[0].split(os.path.sep)[-1])
def main():
args = sys.argv[1:]
# The only shared option is '--addons-path=' needed to discover additional
# commands from modules
if len(args) > 1 and args[0].startswith('--addons-path=') and not args[1].startswith("-"):
# parse only the addons-path, do not setup the logger...
odoo.tools.config._parse_config([args[0]])
args = args[1:]
# Default legacy command
command = "server"
# TODO: find a way to properly discover addons subcommands without importing the world
# Subcommand discovery
if len(args) and not args[0].startswith("-"):
logging.disable(logging.CRITICAL)
initialize_sys_path()
for module in get_modules():
if isdir(joinpath(get_module_path(module), 'cli')):
__import__('odoo.addons.' + module)
logging.disable(logging.NOTSET)
command = args[0]
args = args[1:]
if command in commands:
o = commands[command]()
o.run(args)
else:
sys.exit('Unknown command %r' % (command,))
| 33.84127 | 2,132 |
3,730 |
py
|
PYTHON
|
15.0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os
import requests
import sys
import tempfile
import zipfile
from . import Command
class Deploy(Command):
"""Deploy a module on an Odoo instance"""
def __init__(self):
super(Deploy, self).__init__()
self.session = requests.session()
def deploy_module(self, module_path, url, login, password, db='', force=False):
url = url.rstrip('/')
module_file = self.zip_module(module_path)
try:
return self.login_upload_module(module_file, url, login, password, db, force=force)
finally:
os.remove(module_file)
def login_upload_module(self, module_file, url, login, password, db, force=False):
print("Uploading module file...")
self.session.get(f'{url}/web/login?db={db}', allow_redirects=False) # this set the db in the session
endpoint = url + '/base_import_module/login_upload'
post_data = {
'login': login,
'password': password,
'db': db,
'force': '1' if force else '',
}
with open(module_file, 'rb') as f:
res = self.session.post(endpoint, files={'mod_file': f}, data=post_data)
if res.status_code == 404:
raise Exception(
"The server '%s' does not have the 'base_import_module' installed or is not up-to-date." % url)
res.raise_for_status()
return res.text
def zip_module(self, path):
path = os.path.abspath(path)
if not os.path.isdir(path):
raise Exception("Could not find module directory '%s'" % path)
container, module_name = os.path.split(path)
temp = tempfile.mktemp(suffix='.zip')
try:
print("Zipping module directory...")
with zipfile.ZipFile(temp, 'w') as zfile:
for root, dirs, files in os.walk(path):
for file in files:
file_path = os.path.join(root, file)
zfile.write(file_path, file_path.split(container).pop())
return temp
except Exception:
os.remove(temp)
raise
def run(self, cmdargs):
parser = argparse.ArgumentParser(
prog="%s deploy" % sys.argv[0].split(os.path.sep)[-1],
description=self.__doc__
)
parser.add_argument('path', help="Path of the module to deploy")
parser.add_argument('url', nargs='?', help='Url of the server (default=http://localhost:8069)', default="http://localhost:8069")
parser.add_argument('--db', dest='db', help='Database to use if server does not use db-filter.')
parser.add_argument('--login', dest='login', default="admin", help='Login (default=admin)')
parser.add_argument('--password', dest='password', default="admin", help='Password (default=admin)')
parser.add_argument('--verify-ssl', action='store_true', help='Verify SSL certificate')
parser.add_argument('--force', action='store_true', help='Force init even if module is already installed. (will update `noupdate="1"` records)')
if not cmdargs:
sys.exit(parser.print_help())
args = parser.parse_args(args=cmdargs)
if not args.verify_ssl:
self.session.verify = False
try:
if not args.url.startswith(('http://', 'https://')):
args.url = 'https://%s' % args.url
result = self.deploy_module(args.path, args.url, args.login, args.password, args.db, force=args.force)
print(result)
except Exception as e:
sys.exit("ERROR: %s" % e)
| 41.444444 | 3,730 |
3,924 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from __future__ import print_function
import code
import logging
import os
import signal
import sys
import odoo
from odoo.tools import config
from . import Command
_logger = logging.getLogger(__name__)
"""
Shell exit behaviors
====================
Legend:
stop = The REPL main loop stop.
raise = Exception raised.
loop = Stay in REPL.
Shell | ^D | exit() | quit() | sys.exit() | raise SystemExit()
----------------------------------------------------------------------
python | stop | raise | raise | raise | raise
ipython | stop | stop | stop | loop | loop
ptpython | stop | raise | raise | raise | raise
bpython | stop | stop | stop | stop | stop
"""
def raise_keyboard_interrupt(*a):
raise KeyboardInterrupt()
class Console(code.InteractiveConsole):
def __init__(self, locals=None, filename="<console>"):
code.InteractiveConsole.__init__(self, locals, filename)
try:
import readline
import rlcompleter
except ImportError:
print('readline or rlcompleter not available, autocomplete disabled.')
else:
readline.set_completer(rlcompleter.Completer(locals).complete)
readline.parse_and_bind("tab: complete")
class Shell(Command):
"""Start odoo in an interactive shell"""
supported_shells = ['ipython', 'ptpython', 'bpython', 'python']
def init(self, args):
config.parse_config(args)
odoo.cli.server.report_configuration()
odoo.service.server.start(preload=[], stop=True)
signal.signal(signal.SIGINT, raise_keyboard_interrupt)
def console(self, local_vars):
if not os.isatty(sys.stdin.fileno()):
local_vars['__name__'] = '__main__'
exec(sys.stdin.read(), local_vars)
else:
if 'env' not in local_vars:
print('No environment set, use `%s shell -d dbname` to get one.' % sys.argv[0])
for i in sorted(local_vars):
print('%s: %s' % (i, local_vars[i]))
preferred_interface = config.options.get('shell_interface')
if preferred_interface:
shells_to_try = [preferred_interface, 'python']
else:
shells_to_try = self.supported_shells
for shell in shells_to_try:
try:
return getattr(self, shell)(local_vars)
except ImportError:
pass
except Exception:
_logger.warning("Could not start '%s' shell." % shell)
_logger.debug("Shell error:", exc_info=True)
def ipython(self, local_vars):
from IPython import start_ipython
start_ipython(argv=[], user_ns=local_vars)
def ptpython(self, local_vars):
from ptpython.repl import embed
embed({}, local_vars)
def bpython(self, local_vars):
from bpython import embed
embed(local_vars)
def python(self, local_vars):
Console(locals=local_vars).interact()
def shell(self, dbname):
local_vars = {
'openerp': odoo,
'odoo': odoo,
}
if dbname:
registry = odoo.registry(dbname)
with registry.cursor() as cr:
uid = odoo.SUPERUSER_ID
ctx = odoo.api.Environment(cr, uid, {})['res.users'].context_get()
env = odoo.api.Environment(cr, uid, ctx)
local_vars['env'] = env
local_vars['self'] = env.user
self.console(local_vars)
cr.rollback()
else:
self.console(local_vars)
def run(self, args):
self.init(args)
self.shell(config['db_name'])
return 0
| 31.392 | 3,924 |
1,177 |
py
|
PYTHON
|
15.0
|
import argparse
import os
import secrets
import sys
import textwrap
from passlib.hash import pbkdf2_sha512
from . import Command
from odoo.tools import config
class GenProxyToken(Command):
def __init__(self):
self.command_name = "genproxytoken"
def generate_token(self, length=16):
token = secrets.token_hex(int(length / 2))
split_size = int(length / 4)
return '-'.join(textwrap.wrap(token, split_size))
def run(self, cmdargs):
parser = argparse.ArgumentParser(
prog="%s %s" % (sys.argv[0].split(os.path.sep)[-1], self.command_name),
description="Generate and (re)set proxy access token in config file"
)
parser.add_argument('-c', '--config', type=str, help="Specify an alternate config file")
parser.add_argument('--token-length', type=int, help="Token Length", default=16)
args, _ = parser.parse_known_args()
if args.config:
config.rcfile = args.config
token = self.generate_token(length=args.token_length)
config['proxy_access_token'] = pbkdf2_sha512.hash(token)
config.save()
sys.stdout.write(f'{token}\n')
| 33.628571 | 1,177 |
3,159 |
py
|
PYTHON
|
15.0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import glob
import itertools
import os
import sys
import odoo
from . import Command
from .server import main
from odoo.modules.module import get_module_root, MANIFEST_NAMES
from odoo.service.db import _create_empty_database, DatabaseExists
class Start(Command):
"""Quick start the Odoo server for your project"""
def get_module_list(self, path):
mods = itertools.chain.from_iterable(
glob.glob(os.path.join(path, '*/%s' % mname))
for mname in MANIFEST_NAMES
)
return [mod.split(os.path.sep)[-2] for mod in mods]
def run(self, cmdargs):
parser = argparse.ArgumentParser(
prog="%s start" % sys.argv[0].split(os.path.sep)[-1],
description=self.__doc__
)
parser.add_argument('--path', default=".",
help="Directory where your project's modules are stored (will autodetect from current dir)")
parser.add_argument("-d", "--database", dest="db_name", default=None,
help="Specify the database name (default to project's directory name")
args, unknown = parser.parse_known_args(args=cmdargs)
# When in a virtualenv, by default use it's path rather than the cwd
if args.path == '.' and os.environ.get('VIRTUAL_ENV'):
args.path = os.environ.get('VIRTUAL_ENV')
project_path = os.path.abspath(os.path.expanduser(os.path.expandvars(args.path)))
module_root = get_module_root(project_path)
db_name = None
if module_root:
# started in a module so we choose this module name for database
db_name = project_path.split(os.path.sep)[-1]
# go to the parent's directory of the module root
project_path = os.path.abspath(os.path.join(project_path, os.pardir))
# check if one of the subfolders has at least one module
mods = self.get_module_list(project_path)
if mods and '--addons-path' not in cmdargs:
cmdargs.append('--addons-path=%s' % project_path)
if not args.db_name:
args.db_name = db_name or project_path.split(os.path.sep)[-1]
cmdargs.extend(('-d', args.db_name))
# TODO: forbid some database names ? eg template1, ...
try:
_create_empty_database(args.db_name)
odoo.tools.config['init']['base'] = True
except DatabaseExists as e:
pass
except Exception as e:
die("Could not create database `%s`. (%s)" % (args.db_name, e))
if '--db-filter' not in cmdargs:
cmdargs.append('--db-filter=^%s$' % args.db_name)
# Remove --path /-p options from the command arguments
def to_remove(i, l):
return l[i] == '-p' or l[i].startswith('--path') or \
(i > 0 and l[i-1] in ['-p', '--path'])
cmdargs = [v for i, v in enumerate(cmdargs)
if not to_remove(i, cmdargs)]
main(cmdargs)
def die(message, code=1):
print(message, file=sys.stderr)
sys.exit(code)
| 37.607143 | 3,159 |
1,639 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import argparse
import os
import sys
import textwrap
from odoo.tools import cloc, config
from . import Command
class Cloc(Command):
"""\
Odoo cloc is a tool to count the number of relevant lines written in
Python, Javascript or XML. This can be used as rough metric for pricing
maintenance of customizations.
It has two modes of operation, either by providing a path:
odoo-bin cloc -p module_path
Or by providing the name of a database:
odoo-bin cloc --addons-path=dirs -d database
In the latter mode, only the custom code is accounted for.
"""
def run(self, args):
parser = argparse.ArgumentParser(
prog="%s cloc" % sys.argv[0].split(os.path.sep)[-1],
description=textwrap.dedent(self.__doc__),
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument('--database', '-d', dest="database", help="Database name")
parser.add_argument('--path', '-p', action='append', help="File or directory path")
parser.add_argument('--verbose', '-v', action='count', default=0)
opt, unknown = parser.parse_known_args(args)
if not opt.database and not opt.path:
parser.print_help()
sys.exit()
c = cloc.Cloc()
if opt.database:
config.parse_config(['-d', opt.database] + unknown)
c.count_database(opt.database)
if opt.path:
for i in opt.path:
c.count_path(i)
c.report(opt.verbose)
| 34.145833 | 1,639 |
4,298 |
py
|
PYTHON
|
15.0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os
import re
import sys
import jinja2
from . import Command
class Scaffold(Command):
""" Generates an Odoo module skeleton. """
def run(self, cmdargs):
# TODO: bash completion file
parser = argparse.ArgumentParser(
prog="%s scaffold" % sys.argv[0].split(os.path.sep)[-1],
description=self.__doc__,
epilog=self.epilog(),
)
parser.add_argument(
'-t', '--template', type=template, default=template('default'),
help="Use a custom module template, can be a template name or the"
" path to a module template (default: %(default)s)")
parser.add_argument('name', help="Name of the module to create")
parser.add_argument(
'dest', default='.', nargs='?',
help="Directory to create the module in (default: %(default)s)")
if not cmdargs:
sys.exit(parser.print_help())
args = parser.parse_args(args=cmdargs)
args.template.render_to(
snake(args.name),
directory(args.dest, create=True),
{'name': args.name})
def epilog(self):
return "Built-in templates available are: %s" % ', '.join(
d for d in os.listdir(builtins())
if d != 'base'
)
builtins = lambda *args: os.path.join(
os.path.abspath(os.path.dirname(__file__)),
'templates',
*args)
def snake(s):
""" snake cases ``s``
:param str s:
:return: str
"""
# insert a space before each uppercase character preceded by a
# non-uppercase letter
s = re.sub(r'(?<=[^A-Z])\B([A-Z])', r' \1', s)
# lowercase everything, split on whitespace and join
return '_'.join(s.lower().split())
def pascal(s):
return ''.join(
ss.capitalize()
for ss in re.sub('[_\s]+', ' ', s).split()
)
def directory(p, create=False):
expanded = os.path.abspath(
os.path.expanduser(
os.path.expandvars(p)))
if create and not os.path.exists(expanded):
os.makedirs(expanded)
if not os.path.isdir(expanded):
die("%s is not a directory" % p)
return expanded
env = jinja2.Environment()
env.filters['snake'] = snake
env.filters['pascal'] = pascal
class template(object):
def __init__(self, identifier):
# TODO: archives (zipfile, tarfile)
self.id = identifier
# is identifier a builtin?
self.path = builtins(identifier)
if os.path.isdir(self.path):
return
# is identifier a directory?
self.path = identifier
if os.path.isdir(self.path):
return
die("{} is not a valid module template".format(identifier))
def __str__(self):
return self.id
def files(self):
""" Lists the (local) path and content of all files in the template
"""
for root, _, files in os.walk(self.path):
for f in files:
path = os.path.join(root, f)
yield path, open(path, 'rb').read()
def render_to(self, modname, directory, params=None):
""" Render this module template to ``dest`` with the provided
rendering parameters
"""
# overwrite with local
for path, content in self.files():
local = os.path.relpath(path, self.path)
# strip .template extension
root, ext = os.path.splitext(local)
if ext == '.template':
local = root
dest = os.path.join(directory, modname, local)
destdir = os.path.dirname(dest)
if not os.path.exists(destdir):
os.makedirs(destdir)
with open(dest, 'wb') as f:
if ext not in ('.py', '.xml', '.csv', '.js', '.rst', '.html', '.template'):
f.write(content)
else:
env.from_string(content.decode('utf-8'))\
.stream(params or {})\
.dump(f, encoding='utf-8')
def die(message, code=1):
print(message, file=sys.stderr)
sys.exit(code)
def warn(message):
# ASK: shall we use logger ?
print("WARNING:", message)
| 31.372263 | 4,298 |
3,621 |
py
|
PYTHON
|
15.0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import fnmatch
import logging
import optparse
import odoo
from . import Command
_logger = logging.getLogger(__name__)
class Populate(Command):
def run(self, cmdargs):
parser = odoo.tools.config.parser
group = optparse.OptionGroup(parser, "Populate Configuration")
group.add_option("--size", dest="population_size",
help="Populate database with auto-generated data. Value should be the population size: small, medium or large",
default='small')
group.add_option("--models",
dest='populate_models',
help="Comma separated list of model or pattern (fnmatch)")
parser.add_option_group(group)
opt = odoo.tools.config.parse_config(cmdargs)
populate_models = opt.populate_models and set(opt.populate_models.split(','))
population_size = opt.population_size
dbname = odoo.tools.config['db_name']
registry = odoo.registry(dbname)
with registry.cursor() as cr:
env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {})
self.populate(env, population_size, populate_models)
@classmethod
def populate(cls, env, size, model_patterns=False):
registry = env.registry
populated_models = None
try:
registry.populated_models = {} # todo master, initialize with already populated models
ordered_models = cls._get_ordered_models(env, model_patterns)
_logger.log(25, 'Populating database')
for model in ordered_models:
_logger.info('Populating database for model %s', model._name)
t0 = time.time()
registry.populated_models[model._name] = model._populate(size).ids
# todo indicate somewhere that model is populated
env.cr.commit()
model_time = time.time() - t0
if model_time > 1:
_logger.info('Populated database for model %s (total: %fs) (average: %fms per record)',
model._name, model_time, model_time / len(registry.populated_models[model._name]) * 1000)
except:
_logger.exception('Something went wrong populating database')
finally:
populated_models = registry.populated_models
del registry.populated_models
return populated_models
@classmethod
def _get_ordered_models(cls, env, model_patterns=False):
_logger.info('Computing model order')
processed = set()
ordered_models = []
visited = set()
def add_model(model):
if model not in processed:
if model in visited:
raise ValueError('Cyclic dependency detected for %s' % model)
visited.add(model)
for dep in model._populate_dependencies:
add_model(env[dep])
ordered_models.append(model)
processed.add(model)
for model in env.values():
ir_model = env['ir.model'].search([('model', '=', model._name)])
if model_patterns and not any(fnmatch.fnmatch(model._name, match) for match in model_patterns):
continue
if model._transient or model._abstract:
continue
if not model_patterns and all(module.startswith('test_') for module in ir_model.modules.split(',')):
continue
add_model(model)
return ordered_models
| 41.147727 | 3,621 |
18,022 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
import base64
import json
import logging
import os
import shutil
import tempfile
import threading
import traceback
from xml.etree import ElementTree as ET
import zipfile
from psycopg2 import sql
from pytz import country_timezones
from functools import wraps
from contextlib import closing
from decorator import decorator
import psycopg2
import odoo
from odoo import SUPERUSER_ID
from odoo.exceptions import AccessDenied
import odoo.release
import odoo.sql_db
import odoo.tools
from odoo.sql_db import db_connect
from odoo.release import version_info
_logger = logging.getLogger(__name__)
class DatabaseExists(Warning):
pass
def check_db_management_enabled(method):
def if_db_mgt_enabled(method, self, *args, **kwargs):
if not odoo.tools.config['list_db']:
_logger.error('Database management functions blocked, admin disabled database listing')
raise AccessDenied()
return method(self, *args, **kwargs)
return decorator(if_db_mgt_enabled, method)
#----------------------------------------------------------
# Master password required
#----------------------------------------------------------
def check_super(passwd):
if passwd and odoo.tools.config.verify_admin_password(passwd):
return True
raise odoo.exceptions.AccessDenied()
# This should be moved to odoo.modules.db, along side initialize().
def _initialize_db(id, db_name, demo, lang, user_password, login='admin', country_code=None, phone=None):
try:
db = odoo.sql_db.db_connect(db_name)
with closing(db.cursor()) as cr:
# TODO this should be removed as it is done by Registry.new().
odoo.modules.db.initialize(cr)
odoo.tools.config['load_language'] = lang
cr.commit()
registry = odoo.modules.registry.Registry.new(db_name, demo, None, update_module=True)
with closing(registry.cursor()) as cr:
env = odoo.api.Environment(cr, SUPERUSER_ID, {})
if lang:
modules = env['ir.module.module'].search([('state', '=', 'installed')])
modules._update_translations(lang)
if country_code:
country = env['res.country'].search([('code', 'ilike', country_code)])[0]
env['res.company'].browse(1).write({'country_id': country_code and country.id, 'currency_id': country_code and country.currency_id.id})
if len(country_timezones.get(country_code, [])) == 1:
users = env['res.users'].search([])
users.write({'tz': country_timezones[country_code][0]})
if phone:
env['res.company'].browse(1).write({'phone': phone})
if '@' in login:
env['res.company'].browse(1).write({'email': login})
# update admin's password and lang and login
values = {'password': user_password, 'lang': lang}
if login:
values['login'] = login
emails = odoo.tools.email_split(login)
if emails:
values['email'] = emails[0]
env.ref('base.user_admin').write(values)
cr.execute('SELECT login, password FROM res_users ORDER BY login')
cr.commit()
except Exception as e:
_logger.exception('CREATE DATABASE failed:')
def _create_empty_database(name):
db = odoo.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
chosen_template = odoo.tools.config['db_template']
cr.execute("SELECT datname FROM pg_database WHERE datname = %s",
(name,), log_exceptions=False)
if cr.fetchall():
raise DatabaseExists("database %r already exists!" % (name,))
else:
# database-altering operations cannot be executed inside a transaction
cr.rollback()
cr._cnx.autocommit = True
# 'C' collate is only safe with template0, but provides more useful indexes
collate = sql.SQL("LC_COLLATE 'C'" if chosen_template == 'template0' else "")
cr.execute(
sql.SQL("CREATE DATABASE {} ENCODING 'unicode' {} TEMPLATE {}").format(
sql.Identifier(name), collate, sql.Identifier(chosen_template)
))
if odoo.tools.config['unaccent']:
try:
db = odoo.sql_db.db_connect(name)
with closing(db.cursor()) as cr:
cr.execute("CREATE EXTENSION IF NOT EXISTS unaccent")
cr.commit()
except psycopg2.Error:
pass
@check_db_management_enabled
def exp_create_database(db_name, demo, lang, user_password='admin', login='admin', country_code=None, phone=None):
""" Similar to exp_create but blocking."""
_logger.info('Create database `%s`.', db_name)
_create_empty_database(db_name)
_initialize_db(id, db_name, demo, lang, user_password, login, country_code, phone)
return True
@check_db_management_enabled
def exp_duplicate_database(db_original_name, db_name):
_logger.info('Duplicate database `%s` to `%s`.', db_original_name, db_name)
odoo.sql_db.close_db(db_original_name)
db = odoo.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
# database-altering operations cannot be executed inside a transaction
cr._cnx.autocommit = True
_drop_conn(cr, db_original_name)
cr.execute(sql.SQL("CREATE DATABASE {} ENCODING 'unicode' TEMPLATE {}").format(
sql.Identifier(db_name),
sql.Identifier(db_original_name)
))
registry = odoo.modules.registry.Registry.new(db_name)
with registry.cursor() as cr:
# if it's a copy of a database, force generation of a new dbuuid
env = odoo.api.Environment(cr, SUPERUSER_ID, {})
env['ir.config_parameter'].init(force=True)
from_fs = odoo.tools.config.filestore(db_original_name)
to_fs = odoo.tools.config.filestore(db_name)
if os.path.exists(from_fs) and not os.path.exists(to_fs):
shutil.copytree(from_fs, to_fs)
return True
def _drop_conn(cr, db_name):
# Try to terminate all other connections that might prevent
# dropping the database
try:
# PostgreSQL 9.2 renamed pg_stat_activity.procpid to pid:
# http://www.postgresql.org/docs/9.2/static/release-9-2.html#AEN110389
pid_col = 'pid' if cr._cnx.server_version >= 90200 else 'procpid'
cr.execute("""SELECT pg_terminate_backend(%(pid_col)s)
FROM pg_stat_activity
WHERE datname = %%s AND
%(pid_col)s != pg_backend_pid()""" % {'pid_col': pid_col},
(db_name,))
except Exception:
pass
@check_db_management_enabled
def exp_drop(db_name):
if db_name not in list_dbs(True):
return False
odoo.modules.registry.Registry.delete(db_name)
odoo.sql_db.close_db(db_name)
db = odoo.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
# database-altering operations cannot be executed inside a transaction
cr._cnx.autocommit = True
_drop_conn(cr, db_name)
try:
cr.execute(sql.SQL('DROP DATABASE {}').format(sql.Identifier(db_name)))
except Exception as e:
_logger.info('DROP DB: %s failed:\n%s', db_name, e)
raise Exception("Couldn't drop database %s: %s" % (db_name, e))
else:
_logger.info('DROP DB: %s', db_name)
fs = odoo.tools.config.filestore(db_name)
if os.path.exists(fs):
shutil.rmtree(fs)
return True
@check_db_management_enabled
def exp_dump(db_name, format):
with tempfile.TemporaryFile(mode='w+b') as t:
dump_db(db_name, t, format)
t.seek(0)
return base64.b64encode(t.read()).decode()
@check_db_management_enabled
def dump_db_manifest(cr):
pg_version = "%d.%d" % divmod(cr._obj.connection.server_version / 100, 100)
cr.execute("SELECT name, latest_version FROM ir_module_module WHERE state = 'installed'")
modules = dict(cr.fetchall())
manifest = {
'odoo_dump': '1',
'db_name': cr.dbname,
'version': odoo.release.version,
'version_info': odoo.release.version_info,
'major_version': odoo.release.major_version,
'pg_version': pg_version,
'modules': modules,
}
return manifest
@check_db_management_enabled
def dump_db(db_name, stream, backup_format='zip'):
"""Dump database `db` into file-like object `stream` if stream is None
return a file object with the dump """
_logger.info('DUMP DB: %s format %s', db_name, backup_format)
cmd = ['pg_dump', '--no-owner']
cmd.append(db_name)
if backup_format == 'zip':
with tempfile.TemporaryDirectory() as dump_dir:
filestore = odoo.tools.config.filestore(db_name)
if os.path.exists(filestore):
shutil.copytree(filestore, os.path.join(dump_dir, 'filestore'))
with open(os.path.join(dump_dir, 'manifest.json'), 'w') as fh:
db = odoo.sql_db.db_connect(db_name)
with db.cursor() as cr:
json.dump(dump_db_manifest(cr), fh, indent=4)
cmd.insert(-1, '--file=' + os.path.join(dump_dir, 'dump.sql'))
odoo.tools.exec_pg_command(*cmd)
if stream:
odoo.tools.osutil.zip_dir(dump_dir, stream, include_dir=False, fnct_sort=lambda file_name: file_name != 'dump.sql')
else:
t=tempfile.TemporaryFile()
odoo.tools.osutil.zip_dir(dump_dir, t, include_dir=False, fnct_sort=lambda file_name: file_name != 'dump.sql')
t.seek(0)
return t
else:
cmd.insert(-1, '--format=c')
stdin, stdout = odoo.tools.exec_pg_command_pipe(*cmd)
if stream:
shutil.copyfileobj(stdout, stream)
else:
return stdout
@check_db_management_enabled
def exp_restore(db_name, data, copy=False):
def chunks(d, n=8192):
for i in range(0, len(d), n):
yield d[i:i+n]
data_file = tempfile.NamedTemporaryFile(delete=False)
try:
for chunk in chunks(data):
data_file.write(base64.b64decode(chunk))
data_file.close()
restore_db(db_name, data_file.name, copy=copy)
finally:
os.unlink(data_file.name)
return True
@check_db_management_enabled
def restore_db(db, dump_file, copy=False):
assert isinstance(db, str)
if exp_db_exist(db):
_logger.info('RESTORE DB: %s already exists', db)
raise Exception("Database already exists")
_create_empty_database(db)
filestore_path = None
with tempfile.TemporaryDirectory() as dump_dir:
if zipfile.is_zipfile(dump_file):
# v8 format
with zipfile.ZipFile(dump_file, 'r') as z:
# only extract known members!
filestore = [m for m in z.namelist() if m.startswith('filestore/')]
z.extractall(dump_dir, ['dump.sql'] + filestore)
if filestore:
filestore_path = os.path.join(dump_dir, 'filestore')
pg_cmd = 'psql'
pg_args = ['-q', '-f', os.path.join(dump_dir, 'dump.sql')]
else:
# <= 7.0 format (raw pg_dump output)
pg_cmd = 'pg_restore'
pg_args = ['--no-owner', dump_file]
args = []
args.append('--dbname=' + db)
pg_args = args + pg_args
if odoo.tools.exec_pg_command(pg_cmd, *pg_args):
raise Exception("Couldn't restore database")
registry = odoo.modules.registry.Registry.new(db)
with registry.cursor() as cr:
env = odoo.api.Environment(cr, SUPERUSER_ID, {})
if copy:
# if it's a copy of a database, force generation of a new dbuuid
env['ir.config_parameter'].init(force=True)
if filestore_path:
filestore_dest = env['ir.attachment']._filestore()
shutil.move(filestore_path, filestore_dest)
_logger.info('RESTORE DB: %s', db)
@check_db_management_enabled
def exp_rename(old_name, new_name):
odoo.modules.registry.Registry.delete(old_name)
odoo.sql_db.close_db(old_name)
db = odoo.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
# database-altering operations cannot be executed inside a transaction
cr._cnx.autocommit = True
_drop_conn(cr, old_name)
try:
cr.execute(sql.SQL('ALTER DATABASE {} RENAME TO {}').format(sql.Identifier(old_name), sql.Identifier(new_name)))
_logger.info('RENAME DB: %s -> %s', old_name, new_name)
except Exception as e:
_logger.info('RENAME DB: %s -> %s failed:\n%s', old_name, new_name, e)
raise Exception("Couldn't rename database %s to %s: %s" % (old_name, new_name, e))
old_fs = odoo.tools.config.filestore(old_name)
new_fs = odoo.tools.config.filestore(new_name)
if os.path.exists(old_fs) and not os.path.exists(new_fs):
shutil.move(old_fs, new_fs)
return True
@check_db_management_enabled
def exp_change_admin_password(new_password):
odoo.tools.config.set_admin_password(new_password)
odoo.tools.config.save()
return True
@check_db_management_enabled
def exp_migrate_databases(databases):
for db in databases:
_logger.info('migrate database %s', db)
odoo.tools.config['update']['base'] = True
odoo.modules.registry.Registry.new(db, force_demo=False, update_module=True)
return True
#----------------------------------------------------------
# No master password required
#----------------------------------------------------------
@odoo.tools.mute_logger('odoo.sql_db')
def exp_db_exist(db_name):
## Not True: in fact, check if connection to database is possible. The database may exists
try:
db = odoo.sql_db.db_connect(db_name)
with db.cursor():
return True
except Exception:
return False
def list_dbs(force=False):
if not odoo.tools.config['list_db'] and not force:
raise odoo.exceptions.AccessDenied()
if not odoo.tools.config['dbfilter'] and odoo.tools.config['db_name']:
# In case --db-filter is not provided and --database is passed, Odoo will not
# fetch the list of databases available on the postgres server and instead will
# use the value of --database as comma seperated list of exposed databases.
res = sorted(db.strip() for db in odoo.tools.config['db_name'].split(','))
return res
chosen_template = odoo.tools.config['db_template']
templates_list = tuple(set(['postgres', chosen_template]))
db = odoo.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
try:
cr.execute("select datname from pg_database where datdba=(select usesysid from pg_user where usename=current_user) and not datistemplate and datallowconn and datname not in %s order by datname", (templates_list,))
res = [odoo.tools.ustr(name) for (name,) in cr.fetchall()]
except Exception:
_logger.exception('Listing databases failed:')
res = []
return res
def list_db_incompatible(databases):
""""Check a list of databases if they are compatible with this version of Odoo
:param databases: A list of existing Postgresql databases
:return: A list of databases that are incompatible
"""
incompatible_databases = []
server_version = '.'.join(str(v) for v in version_info[:2])
for database_name in databases:
with closing(db_connect(database_name).cursor()) as cr:
if odoo.tools.table_exists(cr, 'ir_module_module'):
cr.execute("SELECT latest_version FROM ir_module_module WHERE name=%s", ('base',))
base_version = cr.fetchone()
if not base_version or not base_version[0]:
incompatible_databases.append(database_name)
else:
# e.g. 10.saas~15
local_version = '.'.join(base_version[0].split('.')[:2])
if local_version != server_version:
incompatible_databases.append(database_name)
else:
incompatible_databases.append(database_name)
for database_name in incompatible_databases:
# release connection
odoo.sql_db.close_db(database_name)
return incompatible_databases
def exp_list(document=False):
if not odoo.tools.config['list_db']:
raise odoo.exceptions.AccessDenied()
return list_dbs()
def exp_list_lang():
return odoo.tools.scan_languages()
def exp_list_countries():
list_countries = []
root = ET.parse(os.path.join(odoo.tools.config['root_path'], 'addons/base/data/res_country_data.xml')).getroot()
for country in root.find('data').findall('record[@model="res.country"]'):
name = country.find('field[@name="name"]').text
code = country.find('field[@name="code"]').text
list_countries.append([code, name])
return sorted(list_countries, key=lambda c: c[1])
def exp_server_version():
""" Return the version of the server
Used by the client to verify the compatibility with its own version
"""
return odoo.release.version
#----------------------------------------------------------
# db service dispatch
#----------------------------------------------------------
def dispatch(method, params):
g = globals()
exp_method_name = 'exp_' + method
if method in ['db_exist', 'list', 'list_lang', 'server_version']:
return g[exp_method_name](*params)
elif exp_method_name in g:
passwd = params[0]
params = params[1:]
check_super(passwd)
return g[exp_method_name](*params)
else:
raise KeyError("Method not found: %s" % method)
| 38.840517 | 18,022 |
56,882 |
py
|
PYTHON
|
15.0
|
#-----------------------------------------------------------
# Threaded, Gevent and Prefork Servers
#-----------------------------------------------------------
import datetime
import errno
import logging
import os
import os.path
import platform
import random
import select
import signal
import socket
import subprocess
import sys
import threading
import time
import unittest
from itertools import chain
import psutil
import werkzeug.serving
from werkzeug.debug import DebuggedApplication
if os.name == 'posix':
# Unix only for workers
import fcntl
import resource
try:
import inotify
from inotify.adapters import InotifyTrees
from inotify.constants import IN_MODIFY, IN_CREATE, IN_MOVED_TO
INOTIFY_LISTEN_EVENTS = IN_MODIFY | IN_CREATE | IN_MOVED_TO
except ImportError:
inotify = None
else:
# Windows shim
signal.SIGHUP = -1
inotify = None
if not inotify:
try:
import watchdog
from watchdog.observers import Observer
from watchdog.events import FileCreatedEvent, FileModifiedEvent, FileMovedEvent
except ImportError:
watchdog = None
# Optional process names for workers
try:
from setproctitle import setproctitle
except ImportError:
setproctitle = lambda x: None
import odoo
from odoo.modules import get_modules
from odoo.modules.registry import Registry
from odoo.release import nt_service_name
from odoo.tools import config
from odoo.tools import stripped_sys_argv, dumpstacks, log_ormcache_stats
from ..tests import loader, runner
_logger = logging.getLogger(__name__)
SLEEP_INTERVAL = 60 # 1 min
def memory_info(process):
"""
:return: the relevant memory usage according to the OS in bytes.
"""
# psutil < 2.0 does not have memory_info, >= 3.0 does not have get_memory_info
pmem = (getattr(process, 'memory_info', None) or process.get_memory_info)()
# MacOSX allocates very large vms to all processes so we only monitor the rss usage.
if platform.system() == 'Darwin':
return pmem.rss
return pmem.vms
def set_limit_memory_hard():
if os.name == 'posix' and config['limit_memory_hard']:
rlimit = resource.RLIMIT_RSS if platform.system() == 'Darwin' else resource.RLIMIT_AS
soft, hard = resource.getrlimit(rlimit)
resource.setrlimit(rlimit, (config['limit_memory_hard'], hard))
def empty_pipe(fd):
try:
while os.read(fd, 1):
pass
except OSError as e:
if e.errno not in [errno.EAGAIN]:
raise
#----------------------------------------------------------
# Werkzeug WSGI servers patched
#----------------------------------------------------------
class LoggingBaseWSGIServerMixIn(object):
def handle_error(self, request, client_address):
t, e, _ = sys.exc_info()
if t == socket.error and e.errno == errno.EPIPE:
# broken pipe, ignore error
return
_logger.exception('Exception happened during processing of request from %s', client_address)
class BaseWSGIServerNoBind(LoggingBaseWSGIServerMixIn, werkzeug.serving.BaseWSGIServer):
""" werkzeug Base WSGI Server patched to skip socket binding. PreforkServer
use this class, sets the socket and calls the process_request() manually
"""
def __init__(self, app):
werkzeug.serving.BaseWSGIServer.__init__(self, "127.0.0.1", 0, app)
# Directly close the socket. It will be replaced by WorkerHTTP when processing requests
if self.socket:
self.socket.close()
def server_activate(self):
# dont listen as we use PreforkServer#socket
pass
class RequestHandler(werkzeug.serving.WSGIRequestHandler):
def setup(self):
# timeout to avoid chrome headless preconnect during tests
if config['test_enable'] or config['test_file']:
self.timeout = 5
# flag the current thread as handling a http request
super(RequestHandler, self).setup()
me = threading.current_thread()
me.name = 'odoo.service.http.request.%s' % (me.ident,)
class ThreadedWSGIServerReloadable(LoggingBaseWSGIServerMixIn, werkzeug.serving.ThreadedWSGIServer):
""" werkzeug Threaded WSGI Server patched to allow reusing a listen socket
given by the environment, this is used by autoreload to keep the listen
socket open when a reload happens.
"""
def __init__(self, host, port, app):
# The ODOO_MAX_HTTP_THREADS environment variable allows to limit the amount of concurrent
# socket connections accepted by a threaded server, implicitly limiting the amount of
# concurrent threads running for http requests handling.
self.max_http_threads = os.environ.get("ODOO_MAX_HTTP_THREADS")
if self.max_http_threads:
try:
self.max_http_threads = int(self.max_http_threads)
except ValueError:
# If the value can't be parsed to an integer then it's computed in an automated way to
# half the size of db_maxconn because while most requests won't borrow cursors concurrently
# there are some exceptions where some controllers might allocate two or more cursors.
self.max_http_threads = config['db_maxconn'] // 2
self.http_threads_sem = threading.Semaphore(self.max_http_threads)
super(ThreadedWSGIServerReloadable, self).__init__(host, port, app,
handler=RequestHandler)
# See https://github.com/pallets/werkzeug/pull/770
# This allow the request threads to not be set as daemon
# so the server waits for them when shutting down gracefully.
self.daemon_threads = False
def server_bind(self):
SD_LISTEN_FDS_START = 3
if os.environ.get('LISTEN_FDS') == '1' and os.environ.get('LISTEN_PID') == str(os.getpid()):
self.reload_socket = True
self.socket = socket.fromfd(SD_LISTEN_FDS_START, socket.AF_INET, socket.SOCK_STREAM)
_logger.info('HTTP service (werkzeug) running through socket activation')
else:
self.reload_socket = False
super(ThreadedWSGIServerReloadable, self).server_bind()
_logger.info('HTTP service (werkzeug) running on %s:%s', self.server_name, self.server_port)
def server_activate(self):
if not self.reload_socket:
super(ThreadedWSGIServerReloadable, self).server_activate()
def process_request(self, request, client_address):
"""
Start a new thread to process the request.
Override the default method of class socketserver.ThreadingMixIn
to be able to get the thread object which is instantiated
and set its start time as an attribute
"""
t = threading.Thread(target = self.process_request_thread,
args = (request, client_address))
t.daemon = self.daemon_threads
t.type = 'http'
t.start_time = time.time()
t.start()
# TODO: Remove this method as soon as either of the revision
# - python/cpython@8b1f52b5a93403acd7d112cd1c1bc716b31a418a for Python 3.6,
# - python/cpython@908082451382b8b3ba09ebba638db660edbf5d8e for Python 3.7,
# is included in all Python 3 releases installed on all operating systems supported by Odoo.
# These revisions are included in Python from releases 3.6.8 and Python 3.7.2 respectively.
def _handle_request_noblock(self):
"""
In the python module `socketserver` `process_request` loop,
the __shutdown_request flag is not checked between select and accept.
Thus when we set it to `True` thanks to the call `httpd.shutdown`,
a last request is accepted before exiting the loop.
We override this function to add an additional check before the accept().
"""
if self._BaseServer__shutdown_request:
return
if self.max_http_threads and not self.http_threads_sem.acquire(timeout=0.1):
# If the semaphore is full we will return immediately to the upstream (most probably
# socketserver.BaseServer's serve_forever loop which will retry immediately as the
# selector will find a pending connection to accept on the socket. There is a 100 ms
# penalty in such case in order to avoid cpu bound loop while waiting for the semaphore.
return
# upstream _handle_request_noblock will handle errors and call shutdown_request in any cases
super(ThreadedWSGIServerReloadable, self)._handle_request_noblock()
def shutdown_request(self, request):
if self.max_http_threads:
# upstream is supposed to call this function no matter what happens during processing
self.http_threads_sem.release()
super().shutdown_request(request)
#----------------------------------------------------------
# FileSystem Watcher for autoreload and cache invalidation
#----------------------------------------------------------
class FSWatcherBase(object):
def handle_file(self, path):
if path.endswith('.py') and not os.path.basename(path).startswith('.~'):
try:
source = open(path, 'rb').read() + b'\n'
compile(source, path, 'exec')
except IOError:
_logger.error('autoreload: python code change detected, IOError for %s', path)
except SyntaxError:
_logger.error('autoreload: python code change detected, SyntaxError in %s', path)
else:
if not getattr(odoo, 'phoenix', False):
_logger.info('autoreload: python code updated, autoreload activated')
restart()
return True
class FSWatcherWatchdog(FSWatcherBase):
def __init__(self):
self.observer = Observer()
for path in odoo.addons.__path__:
_logger.info('Watching addons folder %s', path)
self.observer.schedule(self, path, recursive=True)
def dispatch(self, event):
if isinstance(event, (FileCreatedEvent, FileModifiedEvent, FileMovedEvent)):
if not event.is_directory:
path = getattr(event, 'dest_path', event.src_path)
self.handle_file(path)
def start(self):
self.observer.start()
_logger.info('AutoReload watcher running with watchdog')
def stop(self):
self.observer.stop()
self.observer.join()
class FSWatcherInotify(FSWatcherBase):
def __init__(self):
self.started = False
# ignore warnings from inotify in case we have duplicate addons paths.
inotify.adapters._LOGGER.setLevel(logging.ERROR)
# recreate a list as InotifyTrees' __init__ deletes the list's items
paths_to_watch = []
for path in odoo.addons.__path__:
paths_to_watch.append(path)
_logger.info('Watching addons folder %s', path)
self.watcher = InotifyTrees(paths_to_watch, mask=INOTIFY_LISTEN_EVENTS, block_duration_s=.5)
def run(self):
_logger.info('AutoReload watcher running with inotify')
dir_creation_events = set(('IN_MOVED_TO', 'IN_CREATE'))
while self.started:
for event in self.watcher.event_gen(timeout_s=0, yield_nones=False):
(_, type_names, path, filename) = event
if 'IN_ISDIR' not in type_names:
# despite not having IN_DELETE in the watcher's mask, the
# watcher sends these events when a directory is deleted.
if 'IN_DELETE' not in type_names:
full_path = os.path.join(path, filename)
if self.handle_file(full_path):
return
elif dir_creation_events.intersection(type_names):
full_path = os.path.join(path, filename)
for root, _, files in os.walk(full_path):
for file in files:
if self.handle_file(os.path.join(root, file)):
return
def start(self):
self.started = True
self.thread = threading.Thread(target=self.run, name="odoo.service.autoreload.watcher")
self.thread.daemon = True
self.thread.start()
def stop(self):
self.started = False
self.thread.join()
del self.watcher # ensures inotify watches are freed up before reexec
#----------------------------------------------------------
# Servers: Threaded, Gevented and Prefork
#----------------------------------------------------------
class CommonServer(object):
def __init__(self, app):
self.app = app
self._on_stop_funcs = []
# config
self.interface = config['http_interface'] or '0.0.0.0'
self.port = config['http_port']
# runtime
self.pid = os.getpid()
def close_socket(self, sock):
""" Closes a socket instance cleanly
:param sock: the network socket to close
:type sock: socket.socket
"""
try:
sock.shutdown(socket.SHUT_RDWR)
except socket.error as e:
if e.errno == errno.EBADF:
# Werkzeug > 0.9.6 closes the socket itself (see commit
# https://github.com/mitsuhiko/werkzeug/commit/4d8ca089)
return
# On OSX, socket shutdowns both sides if any side closes it
# causing an error 57 'Socket is not connected' on shutdown
# of the other side (or something), see
# http://bugs.python.org/issue4397
# note: stdlib fixed test, not behavior
if e.errno != errno.ENOTCONN or platform.system() not in ['Darwin', 'Windows']:
raise
sock.close()
def on_stop(self, func):
""" Register a cleanup function to be executed when the server stops """
self._on_stop_funcs.append(func)
def stop(self):
for func in self._on_stop_funcs:
try:
_logger.debug("on_close call %s", func)
func()
except Exception:
_logger.warning("Exception in %s", func.__name__, exc_info=True)
class ThreadedServer(CommonServer):
def __init__(self, app):
super(ThreadedServer, self).__init__(app)
self.main_thread_id = threading.current_thread().ident
# Variable keeping track of the number of calls to the signal handler defined
# below. This variable is monitored by ``quit_on_signals()``.
self.quit_signals_received = 0
#self.socket = None
self.httpd = None
self.limits_reached_threads = set()
self.limit_reached_time = None
def signal_handler(self, sig, frame):
if sig in [signal.SIGINT, signal.SIGTERM]:
# shutdown on kill -INT or -TERM
self.quit_signals_received += 1
if self.quit_signals_received > 1:
# logging.shutdown was already called at this point.
sys.stderr.write("Forced shutdown.\n")
os._exit(0)
# interrupt run() to start shutdown
raise KeyboardInterrupt()
elif hasattr(signal, 'SIGXCPU') and sig == signal.SIGXCPU:
sys.stderr.write("CPU time limit exceeded! Shutting down immediately\n")
sys.stderr.flush()
os._exit(0)
elif sig == signal.SIGHUP:
# restart on kill -HUP
odoo.phoenix = True
self.quit_signals_received += 1
# interrupt run() to start shutdown
raise KeyboardInterrupt()
def process_limit(self):
memory = memory_info(psutil.Process(os.getpid()))
if config['limit_memory_soft'] and memory > config['limit_memory_soft']:
_logger.warning('Server memory limit (%s) reached.', memory)
self.limits_reached_threads.add(threading.current_thread())
for thread in threading.enumerate():
if not thread.daemon or getattr(thread, 'type', None) == 'cron':
# We apply the limits on cron threads and HTTP requests,
# longpolling requests excluded.
if getattr(thread, 'start_time', None):
thread_execution_time = time.time() - thread.start_time
thread_limit_time_real = config['limit_time_real']
if (getattr(thread, 'type', None) == 'cron' and
config['limit_time_real_cron'] and config['limit_time_real_cron'] > 0):
thread_limit_time_real = config['limit_time_real_cron']
if thread_limit_time_real and thread_execution_time > thread_limit_time_real:
_logger.warning(
'Thread %s virtual real time limit (%d/%ds) reached.',
thread, thread_execution_time, thread_limit_time_real)
self.limits_reached_threads.add(thread)
# Clean-up threads that are no longer alive
# e.g. threads that exceeded their real time,
# but which finished before the server could restart.
for thread in list(self.limits_reached_threads):
if not thread.is_alive():
self.limits_reached_threads.remove(thread)
if self.limits_reached_threads:
self.limit_reached_time = self.limit_reached_time or time.time()
else:
self.limit_reached_time = None
def cron_thread(self, number):
# Steve Reich timing style with thundering herd mitigation.
#
# On startup, all workers bind on a notification channel in
# postgres so they can be woken up at will. At worst they wake
# up every SLEEP_INTERVAL with a jitter. The jitter creates a
# chorus effect that helps distribute on the timeline the moment
# when individual worker wake up.
#
# On NOTIFY, all workers are awaken at the same time, sleeping
# just a bit prevents they all poll the database at the exact
# same time. This is known as the thundering herd effect.
from odoo.addons.base.models.ir_cron import ir_cron
conn = odoo.sql_db.db_connect('postgres')
with conn.cursor() as cr:
pg_conn = cr._cnx
# LISTEN / NOTIFY doesn't work in recovery mode
cr.execute("SELECT pg_is_in_recovery()")
in_recovery = cr.fetchone()[0]
if not in_recovery:
cr.execute("LISTEN cron_trigger")
else:
_logger.warning("PG cluster in recovery mode, cron trigger not activated")
cr.commit()
while True:
select.select([pg_conn], [], [], SLEEP_INTERVAL + number)
time.sleep(number / 100)
pg_conn.poll()
registries = odoo.modules.registry.Registry.registries
_logger.debug('cron%d polling for jobs', number)
for db_name, registry in registries.d.items():
if registry.ready:
thread = threading.current_thread()
thread.start_time = time.time()
try:
ir_cron._process_jobs(db_name)
except Exception:
_logger.warning('cron%d encountered an Exception:', number, exc_info=True)
thread.start_time = None
def cron_spawn(self):
""" Start the above runner function in a daemon thread.
The thread is a typical daemon thread: it will never quit and must be
terminated when the main process exits - with no consequence (the processing
threads it spawns are not marked daemon).
"""
# Force call to strptime just before starting the cron thread
# to prevent time.strptime AttributeError within the thread.
# See: http://bugs.python.org/issue7980
datetime.datetime.strptime('2012-01-01', '%Y-%m-%d')
for i in range(odoo.tools.config['max_cron_threads']):
def target():
self.cron_thread(i)
t = threading.Thread(target=target, name="odoo.service.cron.cron%d" % i)
t.daemon = True
t.type = 'cron'
t.start()
_logger.debug("cron%d started!" % i)
def http_thread(self):
def app(e, s):
return self.app(e, s)
self.httpd = ThreadedWSGIServerReloadable(self.interface, self.port, app)
self.httpd.serve_forever()
def http_spawn(self):
t = threading.Thread(target=self.http_thread, name="odoo.service.httpd")
t.daemon = True
t.start()
def start(self, stop=False):
_logger.debug("Setting signal handlers")
set_limit_memory_hard()
if os.name == 'posix':
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
signal.signal(signal.SIGCHLD, self.signal_handler)
signal.signal(signal.SIGHUP, self.signal_handler)
signal.signal(signal.SIGXCPU, self.signal_handler)
signal.signal(signal.SIGQUIT, dumpstacks)
signal.signal(signal.SIGUSR1, log_ormcache_stats)
elif os.name == 'nt':
import win32api
win32api.SetConsoleCtrlHandler(lambda sig: self.signal_handler(sig, None), 1)
test_mode = config['test_enable'] or config['test_file']
if test_mode or (config['http_enable'] and not stop):
# some tests need the http daemon to be available...
self.http_spawn()
def stop(self):
""" Shutdown the WSGI server. Wait for non daemon threads.
"""
if getattr(odoo, 'phoenix', None):
_logger.info("Initiating server reload")
else:
_logger.info("Initiating shutdown")
_logger.info("Hit CTRL-C again or send a second signal to force the shutdown.")
stop_time = time.time()
if self.httpd:
self.httpd.shutdown()
super().stop()
# Manually join() all threads before calling sys.exit() to allow a second signal
# to trigger _force_quit() in case some non-daemon threads won't exit cleanly.
# threading.Thread.join() should not mask signals (at least in python 2.5).
me = threading.current_thread()
_logger.debug('current thread: %r', me)
for thread in threading.enumerate():
_logger.debug('process %r (%r)', thread, thread.daemon)
if (thread != me and not thread.daemon and thread.ident != self.main_thread_id and
thread not in self.limits_reached_threads):
while thread.is_alive() and (time.time() - stop_time) < 1:
# We wait for requests to finish, up to 1 second.
_logger.debug('join and sleep')
# Need a busyloop here as thread.join() masks signals
# and would prevent the forced shutdown.
thread.join(0.05)
time.sleep(0.05)
odoo.sql_db.close_all()
_logger.debug('--')
logging.shutdown()
def run(self, preload=None, stop=False):
""" Start the http server and the cron thread then wait for a signal.
The first SIGINT or SIGTERM signal will initiate a graceful shutdown while
a second one if any will force an immediate exit.
"""
self.start(stop=stop)
rc = preload_registries(preload)
if stop:
if config['test_enable']:
logger = odoo.tests.runner._logger
with Registry.registries._lock:
for db, registry in Registry.registries.d.items():
report = registry._assertion_report
log = logger.error if not report.wasSuccessful() \
else logger.warning if not report.testsRun \
else logger.info
log("%s when loading database %r", report, db)
self.stop()
return rc
self.cron_spawn()
# Wait for a first signal to be handled. (time.sleep will be interrupted
# by the signal handler)
try:
while self.quit_signals_received == 0:
self.process_limit()
if self.limit_reached_time:
has_other_valid_requests = any(
not t.daemon and
t not in self.limits_reached_threads
for t in threading.enumerate()
if getattr(t, 'type', None) == 'http')
if (not has_other_valid_requests or
(time.time() - self.limit_reached_time) > SLEEP_INTERVAL):
# We wait there is no processing requests
# other than the ones exceeding the limits, up to 1 min,
# before asking for a reload.
_logger.info('Dumping stacktrace of limit exceeding threads before reloading')
dumpstacks(thread_idents=[thread.ident for thread in self.limits_reached_threads])
self.reload()
# `reload` increments `self.quit_signals_received`
# and the loop will end after this iteration,
# therefore leading to the server stop.
# `reload` also sets the `phoenix` flag
# to tell the server to restart the server after shutting down.
else:
time.sleep(1)
else:
time.sleep(SLEEP_INTERVAL)
except KeyboardInterrupt:
pass
self.stop()
def reload(self):
os.kill(self.pid, signal.SIGHUP)
class GeventServer(CommonServer):
def __init__(self, app):
super(GeventServer, self).__init__(app)
self.port = config['longpolling_port']
self.httpd = None
def process_limits(self):
restart = False
if self.ppid != os.getppid():
_logger.warning("LongPolling Parent changed", self.pid)
restart = True
memory = memory_info(psutil.Process(self.pid))
if config['limit_memory_soft'] and memory > config['limit_memory_soft']:
_logger.warning('LongPolling virtual memory limit reached: %s', memory)
restart = True
if restart:
# suicide !!
os.kill(self.pid, signal.SIGTERM)
def watchdog(self, beat=4):
import gevent
self.ppid = os.getppid()
while True:
self.process_limits()
gevent.sleep(beat)
def start(self):
import gevent
try:
from gevent.pywsgi import WSGIServer, WSGIHandler
except ImportError:
from gevent.wsgi import WSGIServer, WSGIHandler
class ProxyHandler(WSGIHandler):
""" When logging requests, try to get the client address from
the environment so we get proxyfix's modifications (if any).
Derived from werzeug.serving.WSGIRequestHandler.log
/ werzeug.serving.WSGIRequestHandler.address_string
"""
def format_request(self):
old_address = self.client_address
if getattr(self, 'environ', None):
self.client_address = self.environ['REMOTE_ADDR']
elif not self.client_address:
self.client_address = '<local>'
# other cases are handled inside WSGIHandler
try:
return super().format_request()
finally:
self.client_address = old_address
set_limit_memory_hard()
if os.name == 'posix':
# Set process memory limit as an extra safeguard
signal.signal(signal.SIGQUIT, dumpstacks)
signal.signal(signal.SIGUSR1, log_ormcache_stats)
gevent.spawn(self.watchdog)
self.httpd = WSGIServer(
(self.interface, self.port), self.app,
log=logging.getLogger('longpolling'),
error_log=logging.getLogger('longpolling'),
handler_class=ProxyHandler,
)
_logger.info('Evented Service (longpolling) running on %s:%s', self.interface, self.port)
try:
self.httpd.serve_forever()
except:
_logger.exception("Evented Service (longpolling): uncaught error during main loop")
raise
def stop(self):
import gevent
self.httpd.stop()
super().stop()
gevent.shutdown()
def run(self, preload, stop):
self.start()
self.stop()
class PreforkServer(CommonServer):
""" Multiprocessing inspired by (g)unicorn.
PreforkServer (aka Multicorn) currently uses accept(2) as dispatching
method between workers but we plan to replace it by a more intelligent
dispatcher to will parse the first HTTP request line.
"""
def __init__(self, app):
super().__init__(app)
# config
self.population = config['workers']
self.timeout = config['limit_time_real']
self.limit_request = config['limit_request']
self.cron_timeout = config['limit_time_real_cron'] or None
if self.cron_timeout == -1:
self.cron_timeout = self.timeout
# working vars
self.beat = 4
self.socket = None
self.workers_http = {}
self.workers_cron = {}
self.workers = {}
self.generation = 0
self.queue = []
self.long_polling_pid = None
def pipe_new(self):
pipe = os.pipe()
for fd in pipe:
# non_blocking
flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
# close_on_exec
flags = fcntl.fcntl(fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
return pipe
def pipe_ping(self, pipe):
try:
os.write(pipe[1], b'.')
except IOError as e:
if e.errno not in [errno.EAGAIN, errno.EINTR]:
raise
def signal_handler(self, sig, frame):
if len(self.queue) < 5 or sig == signal.SIGCHLD:
self.queue.append(sig)
self.pipe_ping(self.pipe)
else:
_logger.warning("Dropping signal: %s", sig)
def worker_spawn(self, klass, workers_registry):
self.generation += 1
worker = klass(self)
pid = os.fork()
if pid != 0:
worker.pid = pid
self.workers[pid] = worker
workers_registry[pid] = worker
return worker
else:
worker.run()
sys.exit(0)
def long_polling_spawn(self):
nargs = stripped_sys_argv()
cmd = [sys.executable, sys.argv[0], 'gevent'] + nargs[1:]
popen = subprocess.Popen(cmd)
self.long_polling_pid = popen.pid
def worker_pop(self, pid):
if pid == self.long_polling_pid:
self.long_polling_pid = None
if pid in self.workers:
_logger.debug("Worker (%s) unregistered", pid)
try:
self.workers_http.pop(pid, None)
self.workers_cron.pop(pid, None)
u = self.workers.pop(pid)
u.close()
except OSError:
return
def worker_kill(self, pid, sig):
try:
os.kill(pid, sig)
except OSError as e:
if e.errno == errno.ESRCH:
self.worker_pop(pid)
def process_signals(self):
while len(self.queue):
sig = self.queue.pop(0)
if sig in [signal.SIGINT, signal.SIGTERM]:
raise KeyboardInterrupt
elif sig == signal.SIGHUP:
# restart on kill -HUP
odoo.phoenix = True
raise KeyboardInterrupt
elif sig == signal.SIGQUIT:
# dump stacks on kill -3
dumpstacks()
elif sig == signal.SIGUSR1:
# log ormcache stats on kill -SIGUSR1
log_ormcache_stats()
elif sig == signal.SIGTTIN:
# increase number of workers
self.population += 1
elif sig == signal.SIGTTOU:
# decrease number of workers
self.population -= 1
def process_zombie(self):
# reap dead workers
while 1:
try:
wpid, status = os.waitpid(-1, os.WNOHANG)
if not wpid:
break
if (status >> 8) == 3:
msg = "Critial worker error (%s)"
_logger.critical(msg, wpid)
raise Exception(msg % wpid)
self.worker_pop(wpid)
except OSError as e:
if e.errno == errno.ECHILD:
break
raise
def process_timeout(self):
now = time.time()
for (pid, worker) in self.workers.items():
if worker.watchdog_timeout is not None and \
(now - worker.watchdog_time) >= worker.watchdog_timeout:
_logger.error("%s (%s) timeout after %ss",
worker.__class__.__name__,
pid,
worker.watchdog_timeout)
self.worker_kill(pid, signal.SIGKILL)
def process_spawn(self):
if config['http_enable']:
while len(self.workers_http) < self.population:
self.worker_spawn(WorkerHTTP, self.workers_http)
if not self.long_polling_pid:
self.long_polling_spawn()
while len(self.workers_cron) < config['max_cron_threads']:
self.worker_spawn(WorkerCron, self.workers_cron)
def sleep(self):
try:
# map of fd -> worker
fds = {w.watchdog_pipe[0]: w for w in self.workers.values()}
fd_in = list(fds) + [self.pipe[0]]
# check for ping or internal wakeups
ready = select.select(fd_in, [], [], self.beat)
# update worker watchdogs
for fd in ready[0]:
if fd in fds:
fds[fd].watchdog_time = time.time()
empty_pipe(fd)
except select.error as e:
if e.args[0] not in [errno.EINTR]:
raise
def start(self):
# wakeup pipe, python doesn't throw EINTR when a syscall is interrupted
# by a signal simulating a pseudo SA_RESTART. We write to a pipe in the
# signal handler to overcome this behaviour
self.pipe = self.pipe_new()
# set signal handlers
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
signal.signal(signal.SIGHUP, self.signal_handler)
signal.signal(signal.SIGCHLD, self.signal_handler)
signal.signal(signal.SIGTTIN, self.signal_handler)
signal.signal(signal.SIGTTOU, self.signal_handler)
signal.signal(signal.SIGQUIT, dumpstacks)
signal.signal(signal.SIGUSR1, log_ormcache_stats)
if config['http_enable']:
# listen to socket
_logger.info('HTTP service (werkzeug) running on %s:%s', self.interface, self.port)
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.setblocking(0)
self.socket.bind((self.interface, self.port))
self.socket.listen(8 * self.population)
def stop(self, graceful=True):
if self.long_polling_pid is not None:
# FIXME make longpolling process handle SIGTERM correctly
self.worker_kill(self.long_polling_pid, signal.SIGKILL)
self.long_polling_pid = None
if self.socket:
self.socket.close()
if graceful:
_logger.info("Stopping gracefully")
super().stop()
limit = time.time() + self.timeout
for pid in self.workers:
self.worker_kill(pid, signal.SIGINT)
while self.workers and time.time() < limit:
try:
self.process_signals()
except KeyboardInterrupt:
_logger.info("Forced shutdown.")
break
self.process_zombie()
time.sleep(0.1)
else:
_logger.info("Stopping forcefully")
for pid in self.workers:
self.worker_kill(pid, signal.SIGTERM)
def run(self, preload, stop):
self.start()
rc = preload_registries(preload)
if stop:
self.stop()
return rc
# Empty the cursor pool, we dont want them to be shared among forked workers.
odoo.sql_db.close_all()
_logger.debug("Multiprocess starting")
while 1:
try:
#_logger.debug("Multiprocess beat (%s)",time.time())
self.process_signals()
self.process_zombie()
self.process_timeout()
self.process_spawn()
self.sleep()
except KeyboardInterrupt:
_logger.debug("Multiprocess clean stop")
self.stop()
break
except Exception as e:
_logger.exception(e)
self.stop(False)
return -1
class Worker(object):
""" Workers """
def __init__(self, multi):
self.multi = multi
self.watchdog_time = time.time()
self.watchdog_pipe = multi.pipe_new()
self.eintr_pipe = multi.pipe_new()
self.wakeup_fd_r, self.wakeup_fd_w = self.eintr_pipe
# Can be set to None if no watchdog is desired.
self.watchdog_timeout = multi.timeout
self.ppid = os.getpid()
self.pid = None
self.alive = True
# should we rename into lifetime ?
self.request_max = multi.limit_request
self.request_count = 0
def setproctitle(self, title=""):
setproctitle('odoo: %s %s %s' % (self.__class__.__name__, self.pid, title))
def close(self):
os.close(self.watchdog_pipe[0])
os.close(self.watchdog_pipe[1])
os.close(self.eintr_pipe[0])
os.close(self.eintr_pipe[1])
def signal_handler(self, sig, frame):
self.alive = False
def signal_time_expired_handler(self, n, stack):
# TODO: print actual RUSAGE_SELF (since last check_limits) instead of
# just repeating the config setting
_logger.info('Worker (%d) CPU time limit (%s) reached.', self.pid, config['limit_time_cpu'])
# We dont suicide in such case
raise Exception('CPU time limit exceeded.')
def sleep(self):
try:
select.select([self.multi.socket, self.wakeup_fd_r], [], [], self.multi.beat)
# clear wakeup pipe if we were interrupted
empty_pipe(self.wakeup_fd_r)
except select.error as e:
if e.args[0] not in [errno.EINTR]:
raise
def check_limits(self):
# If our parent changed suicide
if self.ppid != os.getppid():
_logger.info("Worker (%s) Parent changed", self.pid)
self.alive = False
# check for lifetime
if self.request_count >= self.request_max:
_logger.info("Worker (%d) max request (%s) reached.", self.pid, self.request_count)
self.alive = False
# Reset the worker if it consumes too much memory (e.g. caused by a memory leak).
memory = memory_info(psutil.Process(os.getpid()))
if config['limit_memory_soft'] and memory > config['limit_memory_soft']:
_logger.info('Worker (%d) virtual memory limit (%s) reached.', self.pid, memory)
self.alive = False # Commit suicide after the request.
set_limit_memory_hard()
# update RLIMIT_CPU so limit_time_cpu applies per unit of work
r = resource.getrusage(resource.RUSAGE_SELF)
cpu_time = r.ru_utime + r.ru_stime
soft, hard = resource.getrlimit(resource.RLIMIT_CPU)
resource.setrlimit(resource.RLIMIT_CPU, (int(cpu_time + config['limit_time_cpu']), hard))
def process_work(self):
pass
def start(self):
self.pid = os.getpid()
self.setproctitle()
_logger.info("Worker %s (%s) alive", self.__class__.__name__, self.pid)
# Reseed the random number generator
random.seed()
if self.multi.socket:
# Prevent fd inheritance: close_on_exec
flags = fcntl.fcntl(self.multi.socket, fcntl.F_GETFD) | fcntl.FD_CLOEXEC
fcntl.fcntl(self.multi.socket, fcntl.F_SETFD, flags)
# reset blocking status
self.multi.socket.setblocking(0)
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGXCPU, self.signal_time_expired_handler)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
signal.signal(signal.SIGHUP, signal.SIG_DFL)
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
signal.signal(signal.SIGTTIN, signal.SIG_DFL)
signal.signal(signal.SIGTTOU, signal.SIG_DFL)
signal.set_wakeup_fd(self.wakeup_fd_w)
def stop(self):
pass
def run(self):
try:
self.start()
t = threading.Thread(name="Worker %s (%s) workthread" % (self.__class__.__name__, self.pid), target=self._runloop)
t.daemon = True
t.start()
t.join()
_logger.info("Worker (%s) exiting. request_count: %s, registry count: %s.",
self.pid, self.request_count,
len(odoo.modules.registry.Registry.registries))
self.stop()
except Exception:
_logger.exception("Worker (%s) Exception occurred, exiting...", self.pid)
# should we use 3 to abort everything ?
sys.exit(1)
def _runloop(self):
signal.pthread_sigmask(signal.SIG_BLOCK, {
signal.SIGXCPU,
signal.SIGINT, signal.SIGQUIT, signal.SIGUSR1,
})
try:
while self.alive:
self.check_limits()
self.multi.pipe_ping(self.watchdog_pipe)
self.sleep()
if not self.alive:
break
self.process_work()
except:
_logger.exception("Worker %s (%s) Exception occurred, exiting...", self.__class__.__name__, self.pid)
sys.exit(1)
class WorkerHTTP(Worker):
""" HTTP Request workers """
def __init__(self, multi):
super(WorkerHTTP, self).__init__(multi)
# The ODOO_HTTP_SOCKET_TIMEOUT environment variable allows to control socket timeout for
# extreme latency situations. It's generally better to use a good buffering reverse proxy
# to quickly free workers rather than increasing this timeout to accommodate high network
# latencies & b/w saturation. This timeout is also essential to protect against accidental
# DoS due to idle HTTP connections.
sock_timeout = os.environ.get("ODOO_HTTP_SOCKET_TIMEOUT")
self.sock_timeout = float(sock_timeout) if sock_timeout else 2
def process_request(self, client, addr):
client.setblocking(1)
client.settimeout(self.sock_timeout)
client.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Prevent fd inherientence close_on_exec
flags = fcntl.fcntl(client, fcntl.F_GETFD) | fcntl.FD_CLOEXEC
fcntl.fcntl(client, fcntl.F_SETFD, flags)
# do request using BaseWSGIServerNoBind monkey patched with socket
self.server.socket = client
# tolerate broken pipe when the http client closes the socket before
# receiving the full reply
try:
self.server.process_request(client, addr)
except IOError as e:
if e.errno != errno.EPIPE:
raise
self.request_count += 1
def process_work(self):
try:
client, addr = self.multi.socket.accept()
self.process_request(client, addr)
except socket.error as e:
if e.errno not in (errno.EAGAIN, errno.ECONNABORTED):
raise
def start(self):
Worker.start(self)
self.server = BaseWSGIServerNoBind(self.multi.app)
class WorkerCron(Worker):
""" Cron workers """
def __init__(self, multi):
super(WorkerCron, self).__init__(multi)
# process_work() below process a single database per call.
# The variable db_index is keeping track of the next database to
# process.
self.db_index = 0
self.watchdog_timeout = multi.cron_timeout # Use a distinct value for CRON Worker
def sleep(self):
# Really sleep once all the databases have been processed.
if self.db_index == 0:
interval = SLEEP_INTERVAL + self.pid % 10 # chorus effect
# simulate interruptible sleep with select(wakeup_fd, timeout)
try:
select.select([self.wakeup_fd_r, self.dbcursor._cnx], [], [], interval)
# clear pg_conn/wakeup pipe if we were interrupted
time.sleep(self.pid / 100 % .1)
self.dbcursor._cnx.poll()
empty_pipe(self.wakeup_fd_r)
except select.error as e:
if e.args[0] != errno.EINTR:
raise
def _db_list(self):
if config['db_name']:
db_names = config['db_name'].split(',')
else:
db_names = odoo.service.db.list_dbs(True)
return db_names
def process_work(self):
rpc_request = logging.getLogger('odoo.netsvc.rpc.request')
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
_logger.debug("WorkerCron (%s) polling for jobs", self.pid)
db_names = self._db_list()
if len(db_names):
self.db_index = (self.db_index + 1) % len(db_names)
db_name = db_names[self.db_index]
self.setproctitle(db_name)
if rpc_request_flag:
start_time = time.time()
start_memory = memory_info(psutil.Process(os.getpid()))
from odoo.addons import base
base.models.ir_cron.ir_cron._process_jobs(db_name)
# dont keep cursors in multi database mode
if len(db_names) > 1:
odoo.sql_db.close_db(db_name)
if rpc_request_flag:
run_time = time.time() - start_time
end_memory = memory_info(psutil.Process(os.getpid()))
vms_diff = (end_memory - start_memory) / 1024
logline = '%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % \
(db_name, run_time, start_memory / 1024, end_memory / 1024, vms_diff)
_logger.debug("WorkerCron (%s) %s", self.pid, logline)
self.request_count += 1
if self.request_count >= self.request_max and self.request_max < len(db_names):
_logger.error("There are more dabatases to process than allowed "
"by the `limit_request` configuration variable: %s more.",
len(db_names) - self.request_max)
else:
self.db_index = 0
def start(self):
os.nice(10) # mommy always told me to be nice with others...
Worker.start(self)
if self.multi.socket:
self.multi.socket.close()
dbconn = odoo.sql_db.db_connect('postgres')
self.dbcursor = dbconn.cursor()
# LISTEN / NOTIFY doesn't work in recovery mode
self.dbcursor.execute("SELECT pg_is_in_recovery()")
in_recovery = self.dbcursor.fetchone()[0]
if not in_recovery:
self.dbcursor.execute("LISTEN cron_trigger")
else:
_logger.warning("PG cluster in recovery mode, cron trigger not activated")
self.dbcursor.commit()
def stop(self):
super().stop()
self.dbcursor.close()
#----------------------------------------------------------
# start/stop public api
#----------------------------------------------------------
server = None
def load_server_wide_modules():
server_wide_modules = {'base', 'web'} | set(odoo.conf.server_wide_modules)
for m in server_wide_modules:
try:
odoo.modules.module.load_openerp_module(m)
except Exception:
msg = ''
if m == 'web':
msg = """
The `web` module is provided by the addons found in the `openerp-web` project.
Maybe you forgot to add those addons in your addons_path configuration."""
_logger.exception('Failed to load server-wide module `%s`.%s', m, msg)
def _reexec(updated_modules=None):
"""reexecute openerp-server process with (nearly) the same arguments"""
if odoo.tools.osutil.is_running_as_nt_service():
subprocess.call('net stop {0} && net start {0}'.format(nt_service_name), shell=True)
exe = os.path.basename(sys.executable)
args = stripped_sys_argv()
if updated_modules:
args += ["-u", ','.join(updated_modules)]
if not args or args[0] != exe:
args.insert(0, exe)
# We should keep the LISTEN_* environment variabled in order to support socket activation on reexec
os.execve(sys.executable, args, os.environ)
def load_test_file_py(registry, test_file):
from odoo.tests.common import OdooSuite
threading.current_thread().testing = True
try:
test_path, _ = os.path.splitext(os.path.abspath(test_file))
for mod in [m for m in get_modules() if '/%s/' % m in test_file]:
for mod_mod in loader.get_test_modules(mod):
mod_path, _ = os.path.splitext(getattr(mod_mod, '__file__', ''))
if test_path == config._normalize(mod_path):
tests = loader.unwrap_suite(
unittest.TestLoader().loadTestsFromModule(mod_mod))
suite = OdooSuite(tests)
_logger.log(logging.INFO, 'running tests %s.', mod_mod.__name__)
suite(registry._assertion_report)
if not registry._assertion_report.wasSuccessful():
_logger.error('%s: at least one error occurred in a test', test_file)
return
finally:
threading.current_thread().testing = False
def preload_registries(dbnames):
""" Preload a registries, possibly run a test file."""
# TODO: move all config checks to args dont check tools.config here
dbnames = dbnames or []
rc = 0
for dbname in dbnames:
try:
update_module = config['init'] or config['update']
registry = Registry.new(dbname, update_module=update_module)
# run test_file if provided
if config['test_file']:
test_file = config['test_file']
if not os.path.isfile(test_file):
_logger.warning('test file %s cannot be found', test_file)
elif not test_file.endswith('py'):
_logger.warning('test file %s is not a python file', test_file)
else:
_logger.info('loading test file %s', test_file)
load_test_file_py(registry, test_file)
# run post-install tests
if config['test_enable']:
t0 = time.time()
t0_sql = odoo.sql_db.sql_counter
module_names = (registry.updated_modules if update_module else
sorted(registry._init_modules))
_logger.info("Starting post tests")
tests_before = registry._assertion_report.testsRun
result = loader.run_suite(loader.make_suite(module_names, 'post_install'))
registry._assertion_report.update(result)
_logger.info("%d post-tests in %.2fs, %s queries",
registry._assertion_report.testsRun - tests_before,
time.time() - t0,
odoo.sql_db.sql_counter - t0_sql)
if not registry._assertion_report.wasSuccessful():
rc += 1
except Exception:
_logger.critical('Failed to initialize database `%s`.', dbname, exc_info=True)
return -1
return rc
def start(preload=None, stop=False):
""" Start the odoo http server and cron processor.
"""
global server
load_server_wide_modules()
if odoo.evented:
server = GeventServer(odoo.service.wsgi_server.application)
elif config['workers']:
if config['test_enable'] or config['test_file']:
_logger.warning("Unit testing in workers mode could fail; use --workers 0.")
server = PreforkServer(odoo.service.wsgi_server.application)
# Workaround for Python issue24291, fixed in 3.6 (see Python issue26721)
if sys.version_info[:2] == (3,5):
# turn on buffering also for wfile, to avoid partial writes (Default buffer = 8k)
werkzeug.serving.WSGIRequestHandler.wbufsize = -1
else:
if platform.system() == "Linux" and sys.maxsize > 2**32 and "MALLOC_ARENA_MAX" not in os.environ:
# glibc's malloc() uses arenas [1] in order to efficiently handle memory allocation of multi-threaded
# applications. This allows better memory allocation handling in case of multiple threads that
# would be using malloc() concurrently [2].
# Due to the python's GIL, this optimization have no effect on multithreaded python programs.
# Unfortunately, a downside of creating one arena per cpu core is the increase of virtual memory
# which Odoo is based upon in order to limit the memory usage for threaded workers.
# On 32bit systems the default size of an arena is 512K while on 64bit systems it's 64M [3],
# hence a threaded worker will quickly reach it's default memory soft limit upon concurrent requests.
# We therefore set the maximum arenas allowed to 2 unless the MALLOC_ARENA_MAX env variable is set.
# Note: Setting MALLOC_ARENA_MAX=0 allow to explicitly set the default glibs's malloc() behaviour.
#
# [1] https://sourceware.org/glibc/wiki/MallocInternals#Arenas_and_Heaps
# [2] https://www.gnu.org/software/libc/manual/html_node/The-GNU-Allocator.html
# [3] https://sourceware.org/git/?p=glibc.git;a=blob;f=malloc/malloc.c;h=00ce48c;hb=0a8262a#l862
try:
import ctypes
libc = ctypes.CDLL("libc.so.6")
M_ARENA_MAX = -8
assert libc.mallopt(ctypes.c_int(M_ARENA_MAX), ctypes.c_int(2))
except Exception:
_logger.warning("Could not set ARENA_MAX through mallopt()")
server = ThreadedServer(odoo.service.wsgi_server.application)
watcher = None
if 'reload' in config['dev_mode'] and not odoo.evented:
if inotify:
watcher = FSWatcherInotify()
watcher.start()
elif watchdog:
watcher = FSWatcherWatchdog()
watcher.start()
else:
if os.name == 'posix' and platform.system() != 'Darwin':
module = 'inotify'
else:
module = 'watchdog'
_logger.warning("'%s' module not installed. Code autoreload feature is disabled", module)
if 'werkzeug' in config['dev_mode']:
server.app = DebuggedApplication(server.app, evalex=True)
rc = server.run(preload, stop)
if watcher:
watcher.stop()
# like the legend of the phoenix, all ends with beginnings
if getattr(odoo, 'phoenix', False):
_reexec()
return rc if rc else 0
def restart():
""" Restart the server
"""
if os.name == 'nt':
# run in a thread to let the current thread return response to the caller.
threading.Thread(target=_reexec).start()
else:
os.kill(server.pid, signal.SIGHUP)
| 41.428988 | 56,882 |
650 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import odoo
import odoo.exceptions
def check(db, uid, passwd):
res_users = odoo.registry(db)['res.users']
return res_users.check(db, uid, passwd)
def compute_session_token(session, env):
self = env['res.users'].browse(session.uid)
return self._compute_session_token(session.sid)
def check_session(session, env):
self = env['res.users'].browse(session.uid)
expected = self._compute_session_token(session.sid)
if expected and odoo.tools.misc.consteq(expected, session.session_token):
return True
return False
| 32.5 | 650 |
8,339 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from contextlib import closing
from functools import wraps
import logging
from psycopg2 import IntegrityError, OperationalError, errorcodes
import random
import threading
import time
import odoo
from odoo.exceptions import UserError, ValidationError
from odoo.models import check_method_name
from odoo.tools.translate import translate, translate_sql_constraint
from odoo.tools.translate import _
from . import security
from ..tools import traverse_containers, lazy
_logger = logging.getLogger(__name__)
PG_CONCURRENCY_ERRORS_TO_RETRY = (errorcodes.LOCK_NOT_AVAILABLE, errorcodes.SERIALIZATION_FAILURE, errorcodes.DEADLOCK_DETECTED)
MAX_TRIES_ON_CONCURRENCY_FAILURE = 5
def dispatch(method, params):
(db, uid, passwd ) = params[0], int(params[1]), params[2]
# set uid tracker - cleaned up at the WSGI
# dispatching phase in odoo.service.wsgi_server.application
threading.current_thread().uid = uid
params = params[3:]
if method == 'obj_list':
raise NameError("obj_list has been discontinued via RPC as of 6.0, please query ir.model directly!")
if method not in ['execute', 'execute_kw']:
raise NameError("Method not available %s" % method)
security.check(db,uid,passwd)
registry = odoo.registry(db).check_signaling()
fn = globals()[method]
with registry.manage_changes():
res = fn(db, uid, *params)
return res
def check(f):
@wraps(f)
def wrapper(___dbname, *args, **kwargs):
""" Wraps around OSV functions and normalises a few exceptions
"""
dbname = ___dbname # NOTE: this forbid to use "___dbname" as arguments in http routes
def tr(src, ttype):
# We try to do the same as the _(), but without the frame
# inspection, since we already are wrapping an osv function
# trans_obj = self.get('ir.translation') cannot work yet :(
ctx = {}
if not kwargs:
if args and isinstance(args[-1], dict):
ctx = args[-1]
elif isinstance(kwargs, dict):
if 'context' in kwargs:
ctx = kwargs['context']
elif 'kwargs' in kwargs and kwargs['kwargs'].get('context'):
# http entry points such as call_kw()
ctx = kwargs['kwargs'].get('context')
else:
try:
from odoo.http import request
ctx = request.env.context
except Exception:
pass
lang = ctx and ctx.get('lang')
if not (lang or hasattr(src, '__call__')):
return src
# We open a *new* cursor here, one reason is that failed SQL
# queries (as in IntegrityError) will invalidate the current one.
with closing(odoo.sql_db.db_connect(dbname).cursor()) as cr:
if ttype == 'sql_constraint':
res = translate_sql_constraint(cr, key=key, lang=lang)
else:
res = translate(cr, name=False, source_type=ttype,
lang=lang, source=src)
return res or src
def _(src):
return tr(src, 'code')
tries = 0
while True:
try:
if odoo.registry(dbname)._init and not odoo.tools.config['test_enable']:
raise odoo.exceptions.Warning('Currently, this database is not fully loaded and can not be used.')
return f(dbname, *args, **kwargs)
except OperationalError as e:
# Automatically retry the typical transaction serialization errors
if e.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY:
raise
if tries >= MAX_TRIES_ON_CONCURRENCY_FAILURE:
_logger.info("%s, maximum number of tries reached" % errorcodes.lookup(e.pgcode))
raise
wait_time = random.uniform(0.0, 2 ** tries)
tries += 1
_logger.info("%s, retry %d/%d in %.04f sec..." % (errorcodes.lookup(e.pgcode), tries, MAX_TRIES_ON_CONCURRENCY_FAILURE, wait_time))
time.sleep(wait_time)
except IntegrityError as inst:
registry = odoo.registry(dbname)
key = inst.diag.constraint_name
if key in registry._sql_constraints:
raise ValidationError(tr(key, 'sql_constraint') or inst.pgerror)
if inst.pgcode in (errorcodes.NOT_NULL_VIOLATION, errorcodes.FOREIGN_KEY_VIOLATION, errorcodes.RESTRICT_VIOLATION):
msg = _('The operation cannot be completed:')
_logger.debug("IntegrityError", exc_info=True)
try:
# Get corresponding model and field
model = field = None
for name, rclass in registry.items():
if inst.diag.table_name == rclass._table:
model = rclass
field = model._fields.get(inst.diag.column_name)
break
if inst.pgcode == errorcodes.NOT_NULL_VIOLATION:
# This is raised when a field is set with `required=True`. 2 cases:
# - Create/update: a mandatory field is not set.
# - Delete: another model has a not nullable using the deleted record.
msg += '\n'
msg += _(
'- Create/update: a mandatory field is not set.\n'
'- Delete: another model requires the record being deleted. If possible, archive it instead.'
)
if model:
msg += '\n\n{} {} ({}), {} {} ({})'.format(
_('Model:'), model._description, model._name,
_('Field:'), field.string if field else _('Unknown'), field.name if field else _('Unknown'),
)
elif inst.pgcode == errorcodes.FOREIGN_KEY_VIOLATION:
# This is raised when a field is set with `ondelete='restrict'`, at
# unlink only.
msg += _(' another model requires the record being deleted. If possible, archive it instead.')
constraint = inst.diag.constraint_name
if model or constraint:
msg += '\n\n{} {} ({}), {} {}'.format(
_('Model:'), model._description if model else _('Unknown'), model._name if model else _('Unknown'),
_('Constraint:'), constraint if constraint else _('Unknown'),
)
except Exception:
pass
raise ValidationError(msg)
else:
raise ValidationError(inst.args[0])
return wrapper
def execute_cr(cr, uid, obj, method, *args, **kw):
# clean cache etc if we retry the same transaction
cr.reset()
recs = odoo.api.Environment(cr, uid, {}).get(obj)
if recs is None:
raise UserError(_("Object %s doesn't exist", obj))
result = odoo.api.call_kw(recs, method, args, kw)
# force evaluation of lazy values before the cursor is closed, as it would
# error afterwards if the lazy isn't already evaluated (and cached)
for l in traverse_containers(result, lazy):
_0 = l._value
return result
def execute_kw(db, uid, obj, method, args, kw=None):
return execute(db, uid, obj, method, *args, **kw or {})
@check
def execute(db, uid, obj, method, *args, **kw):
threading.current_thread().dbname = db
with odoo.registry(db).cursor() as cr:
check_method_name(method)
res = execute_cr(cr, uid, obj, method, *args, **kw)
if res is None:
_logger.info('The method %s of the object %s can not return `None` !', method, obj)
return res
| 46.586592 | 8,339 |
1,550 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
import logging
import odoo.release
import odoo.tools
from odoo.exceptions import AccessDenied
from odoo.tools.translate import _
_logger = logging.getLogger(__name__)
RPC_VERSION_1 = {
'server_version': odoo.release.version,
'server_version_info': odoo.release.version_info,
'server_serie': odoo.release.serie,
'protocol_version': 1,
}
def exp_login(db, login, password):
return exp_authenticate(db, login, password, None)
def exp_authenticate(db, login, password, user_agent_env):
if not user_agent_env:
user_agent_env = {}
res_users = odoo.registry(db)['res.users']
try:
return res_users.authenticate(db, login, password, {**user_agent_env, 'interactive': False})
except AccessDenied:
return False
def exp_version():
return RPC_VERSION_1
def exp_about(extended=False):
"""Return information about the OpenERP Server.
@param extended: if True then return version info
@return string if extended is False else tuple
"""
info = _('See http://openerp.com')
if extended:
return info, odoo.release.version
return info
def exp_set_loglevel(loglevel, logger=None):
# TODO Previously, the level was set on the now deprecated
# `odoo.netsvc.Logger` class.
return True
def dispatch(method, params):
g = globals()
exp_method_name = 'exp_' + method
if exp_method_name in g:
return g[exp_method_name](*params)
else:
raise Exception("Method not found: %s" % method)
| 26.724138 | 1,550 |
4,462 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
"""
WSGI stack, common code.
"""
import logging
import sys
import threading
import traceback
from xmlrpc import client as xmlrpclib
import werkzeug.exceptions
import werkzeug.wrappers
import werkzeug.serving
import odoo
from odoo.tools import config
_logger = logging.getLogger(__name__)
# XML-RPC fault codes. Some care must be taken when changing these: the
# constants are also defined client-side and must remain in sync.
# User code must use the exceptions defined in ``odoo.exceptions`` (not
# create directly ``xmlrpclib.Fault`` objects).
RPC_FAULT_CODE_CLIENT_ERROR = 1 # indistinguishable from app. error.
RPC_FAULT_CODE_APPLICATION_ERROR = 1
RPC_FAULT_CODE_WARNING = 2
RPC_FAULT_CODE_ACCESS_DENIED = 3
RPC_FAULT_CODE_ACCESS_ERROR = 4
def xmlrpc_handle_exception_int(e):
if isinstance(e, odoo.exceptions.RedirectWarning):
fault = xmlrpclib.Fault(RPC_FAULT_CODE_WARNING, str(e))
elif isinstance(e, odoo.exceptions.AccessError):
fault = xmlrpclib.Fault(RPC_FAULT_CODE_ACCESS_ERROR, str(e))
elif isinstance(e, odoo.exceptions.AccessDenied):
fault = xmlrpclib.Fault(RPC_FAULT_CODE_ACCESS_DENIED, str(e))
elif isinstance(e, odoo.exceptions.UserError):
fault = xmlrpclib.Fault(RPC_FAULT_CODE_WARNING, str(e))
else:
info = sys.exc_info()
# Which one is the best ?
formatted_info = "".join(traceback.format_exception(*info))
#formatted_info = odoo.tools.exception_to_unicode(e) + '\n' + info
fault = xmlrpclib.Fault(RPC_FAULT_CODE_APPLICATION_ERROR, formatted_info)
return xmlrpclib.dumps(fault, allow_none=None)
def xmlrpc_handle_exception_string(e):
if isinstance(e, odoo.exceptions.RedirectWarning):
fault = xmlrpclib.Fault('warning -- Warning\n\n' + str(e), '')
elif isinstance(e, odoo.exceptions.MissingError):
fault = xmlrpclib.Fault('warning -- MissingError\n\n' + str(e), '')
elif isinstance(e, odoo.exceptions.AccessError):
fault = xmlrpclib.Fault('warning -- AccessError\n\n' + str(e), '')
elif isinstance(e, odoo.exceptions.AccessDenied):
fault = xmlrpclib.Fault('AccessDenied', str(e))
elif isinstance(e, odoo.exceptions.UserError):
fault = xmlrpclib.Fault('warning -- UserError\n\n' + str(e), '')
#InternalError
else:
info = sys.exc_info()
formatted_info = "".join(traceback.format_exception(*info))
fault = xmlrpclib.Fault(odoo.tools.exception_to_unicode(e), formatted_info)
return xmlrpclib.dumps(fault, allow_none=None, encoding=None)
def application_unproxied(environ, start_response):
""" WSGI entry point."""
# cleanup db/uid trackers - they're set at HTTP dispatch in
# web.session.OpenERPSession.send() and at RPC dispatch in
# odoo.service.web_services.objects_proxy.dispatch().
# /!\ The cleanup cannot be done at the end of this `application`
# method because werkzeug still produces relevant logging afterwards
if hasattr(threading.current_thread(), 'uid'):
del threading.current_thread().uid
if hasattr(threading.current_thread(), 'dbname'):
del threading.current_thread().dbname
if hasattr(threading.current_thread(), 'url'):
del threading.current_thread().url
result = odoo.http.root(environ, start_response)
if result is not None:
return result
# We never returned from the loop.
return werkzeug.exceptions.NotFound("No handler found.\n")(environ, start_response)
try:
# werkzeug >= 0.15
from werkzeug.middleware.proxy_fix import ProxyFix as ProxyFix_
# 0.15 also supports port and prefix, but 0.14 only forwarded for, proto
# and host so replicate that
ProxyFix = lambda app: ProxyFix_(app, x_for=1, x_proto=1, x_host=1)
except ImportError:
# werkzeug < 0.15
from werkzeug.contrib.fixers import ProxyFix
def application(environ, start_response):
# FIXME: is checking for the presence of HTTP_X_FORWARDED_HOST really useful?
# we're ignoring the user configuration, and that means we won't
# support the standardised Forwarded header once werkzeug supports
# it
if config['proxy_mode'] and 'HTTP_X_FORWARDED_HOST' in environ:
return ProxyFix(application_unproxied)(environ, start_response)
else:
return application_unproxied(environ, start_response)
| 39.839286 | 4,462 |
6,238 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import odoo.modules
import logging
_logger = logging.getLogger(__name__)
def is_initialized(cr):
""" Check if a database has been initialized for the ORM.
The database can be initialized with the 'initialize' function below.
"""
return odoo.tools.table_exists(cr, 'ir_module_module')
def initialize(cr):
""" Initialize a database with for the ORM.
This executes base/data/base_data.sql, creates the ir_module_categories
(taken from each module descriptor file), and creates the ir_module_module
and ir_model_data entries.
"""
f = odoo.modules.get_module_resource('base', 'data', 'base_data.sql')
if not f:
m = "File not found: 'base.sql' (provided by module 'base')."
_logger.critical(m)
raise IOError(m)
with odoo.tools.misc.file_open(f) as base_sql_file:
cr.execute(base_sql_file.read()) # pylint: disable=sql-injection
for i in odoo.modules.get_modules():
mod_path = odoo.modules.get_module_path(i)
if not mod_path:
continue
# This will raise an exception if no/unreadable descriptor file.
info = odoo.modules.load_information_from_description_file(i)
if not info:
continue
categories = info['category'].split('/')
category_id = create_categories(cr, categories)
if info['installable']:
state = 'uninstalled'
else:
state = 'uninstallable'
cr.execute('INSERT INTO ir_module_module \
(author, website, name, shortdesc, description, \
category_id, auto_install, state, web, license, application, icon, sequence, summary) \
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING id', (
info['author'],
info['website'], i, info['name'],
info['description'], category_id,
info['auto_install'] is not False, state,
info['web'],
info['license'],
info['application'], info['icon'],
info['sequence'], info['summary']))
id = cr.fetchone()[0]
cr.execute('INSERT INTO ir_model_data \
(name,model,module, res_id, noupdate) VALUES (%s,%s,%s,%s,%s)', (
'module_'+i, 'ir.module.module', 'base', id, True))
dependencies = info['depends']
for d in dependencies:
cr.execute(
'INSERT INTO ir_module_module_dependency (module_id, name, auto_install_required)'
' VALUES (%s, %s, %s)',
(id, d, d in (info['auto_install'] or ()))
)
# Install recursively all auto-installing modules
while True:
# this selects all the auto_install modules whose auto_install_required
# deps are marked as to install
cr.execute("""
SELECT m.name FROM ir_module_module m
WHERE m.auto_install
AND state != 'to install'
AND NOT EXISTS (
SELECT 1 FROM ir_module_module_dependency d
JOIN ir_module_module mdep ON (d.name = mdep.name)
WHERE d.module_id = m.id
AND d.auto_install_required
AND mdep.state != 'to install'
)""")
to_auto_install = [x[0] for x in cr.fetchall()]
# however if the module has non-required deps we need to install
# those, so merge-in the modules which have a dependen*t* which is
# *either* to_install or in to_auto_install and merge it in?
cr.execute("""
SELECT d.name FROM ir_module_module_dependency d
JOIN ir_module_module m ON (d.module_id = m.id)
JOIN ir_module_module mdep ON (d.name = mdep.name)
WHERE (m.state = 'to install' OR m.name = any(%s))
-- don't re-mark marked modules
AND NOT (mdep.state = 'to install' OR mdep.name = any(%s))
""", [to_auto_install, to_auto_install])
to_auto_install.extend(x[0] for x in cr.fetchall())
if not to_auto_install: break
cr.execute("""UPDATE ir_module_module SET state='to install' WHERE name in %s""", (tuple(to_auto_install),))
def create_categories(cr, categories):
""" Create the ir_module_category entries for some categories.
categories is a list of strings forming a single category with its
parent categories, like ['Grand Parent', 'Parent', 'Child'].
Return the database id of the (last) category.
"""
p_id = None
category = []
while categories:
category.append(categories[0])
xml_id = 'module_category_' + ('_'.join(x.lower() for x in category)).replace('&', 'and').replace(' ', '_')
# search via xml_id (because some categories are renamed)
cr.execute("SELECT res_id FROM ir_model_data WHERE name=%s AND module=%s AND model=%s",
(xml_id, "base", "ir.module.category"))
c_id = cr.fetchone()
if not c_id:
cr.execute('INSERT INTO ir_module_category \
(name, parent_id) \
VALUES (%s, %s) RETURNING id', (categories[0], p_id))
c_id = cr.fetchone()[0]
cr.execute('INSERT INTO ir_model_data (module, name, res_id, model, noupdate) \
VALUES (%s, %s, %s, %s, %s)', ('base', xml_id, c_id, 'ir.module.category', True))
else:
c_id = c_id[0]
p_id = c_id
categories = categories[1:]
return p_id
def has_unaccent(cr):
""" Test if the database has an unaccent function.
The unaccent is supposed to be provided by the PostgreSQL unaccent contrib
module but any similar function will be picked by OpenERP.
"""
cr.execute("SELECT proname FROM pg_proc WHERE proname='unaccent'")
return len(cr.fetchall()) > 0
def has_trigram(cr):
""" Test if the database has the a word_similarity function.
The word_similarity is supposed to be provided by the PostgreSQL built-in
pg_trgm module but any similar function will be picked by Odoo.
"""
cr.execute("SELECT proname FROM pg_proc WHERE proname='word_similarity'")
return len(cr.fetchall()) > 0
| 39.481013 | 6,238 |
26,659 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
""" Modules (also called addons) management.
"""
import itertools
import logging
import sys
import threading
import time
import odoo
import odoo.modules.db
import odoo.modules.graph
import odoo.modules.migration
import odoo.modules.registry
from .. import SUPERUSER_ID, api, tools
from .module import adapt_version, initialize_sys_path, load_openerp_module
_logger = logging.getLogger(__name__)
_test_logger = logging.getLogger('odoo.tests')
def load_data(cr, idref, mode, kind, package):
"""
kind: data, demo, test, init_xml, update_xml, demo_xml.
noupdate is False, unless it is demo data or it is csv data in
init mode.
:returns: Whether a file was loaded
:rtype: bool
"""
def _get_files_of_kind(kind):
if kind == 'demo':
kind = ['demo_xml', 'demo']
elif kind == 'data':
kind = ['init_xml', 'update_xml', 'data']
if isinstance(kind, str):
kind = [kind]
files = []
for k in kind:
for f in package.data[k]:
files.append(f)
if k.endswith('_xml') and not (k == 'init_xml' and not f.endswith('.xml')):
# init_xml, update_xml and demo_xml are deprecated except
# for the case of init_xml with csv and sql files as
# we can't specify noupdate for those file.
correct_key = 'demo' if k.count('demo') else 'data'
_logger.warning(
"module %s: key '%s' is deprecated in favor of '%s' for file '%s'.",
package.name, k, correct_key, f
)
return files
filename = None
try:
if kind in ('demo', 'test'):
threading.current_thread().testing = True
for filename in _get_files_of_kind(kind):
_logger.info("loading %s/%s", package.name, filename)
noupdate = False
if kind in ('demo', 'demo_xml') or (filename.endswith('.csv') and kind in ('init', 'init_xml')):
noupdate = True
tools.convert_file(cr, package.name, filename, idref, mode, noupdate, kind)
finally:
if kind in ('demo', 'test'):
threading.current_thread().testing = False
return bool(filename)
def load_demo(cr, package, idref, mode):
"""
Loads demo data for the specified package.
"""
if not package.should_have_demo():
return False
try:
_logger.info("Module %s: loading demo", package.name)
with cr.savepoint(flush=False):
load_data(cr, idref, mode, kind='demo', package=package)
return True
except Exception as e:
# If we could not install demo data for this module
_logger.warning(
"Module %s demo data failed to install, installed without demo data",
package.name, exc_info=True)
env = api.Environment(cr, SUPERUSER_ID, {})
todo = env.ref('base.demo_failure_todo', raise_if_not_found=False)
Failure = env.get('ir.demo_failure')
if todo and Failure is not None:
todo.state = 'open'
Failure.create({'module_id': package.id, 'error': str(e)})
return False
def force_demo(cr):
"""
Forces the `demo` flag on all modules, and installs demo data for all installed modules.
"""
graph = odoo.modules.graph.Graph()
cr.execute('UPDATE ir_module_module SET demo=True')
cr.execute(
"SELECT name FROM ir_module_module WHERE state IN ('installed', 'to upgrade', 'to remove')"
)
module_list = [name for (name,) in cr.fetchall()]
graph.add_modules(cr, module_list, ['demo'])
for package in graph:
load_demo(cr, package, {}, 'init')
env = api.Environment(cr, SUPERUSER_ID, {})
env['ir.module.module'].invalidate_cache(['demo'])
env['res.groups']._update_user_groups_view()
def load_module_graph(cr, graph, status=None, perform_checks=True,
skip_modules=None, report=None, models_to_check=None):
"""Migrates+Updates or Installs all module nodes from ``graph``
:param graph: graph of module nodes to load
:param status: deprecated parameter, unused, left to avoid changing signature in 8.0
:param perform_checks: whether module descriptors should be checked for validity (prints warnings
for same cases)
:param skip_modules: optional list of module names (packages) which have previously been loaded and can be skipped
:return: list of modules that were installed or updated
"""
if models_to_check is None:
models_to_check = set()
processed_modules = []
loaded_modules = []
registry = odoo.registry(cr.dbname)
migrations = odoo.modules.migration.MigrationManager(cr, graph)
module_count = len(graph)
_logger.info('loading %d modules...', module_count)
# register, instantiate and initialize models for each modules
t0 = time.time()
loading_extra_query_count = odoo.sql_db.sql_counter
loading_cursor_query_count = cr.sql_log_count
models_updated = set()
for index, package in enumerate(graph, 1):
module_name = package.name
module_id = package.id
if skip_modules and module_name in skip_modules:
continue
module_t0 = time.time()
module_cursor_query_count = cr.sql_log_count
module_extra_query_count = odoo.sql_db.sql_counter
needs_update = (
hasattr(package, "init")
or hasattr(package, "update")
or package.state in ("to install", "to upgrade")
)
module_log_level = logging.DEBUG
if needs_update:
module_log_level = logging.INFO
_logger.log(module_log_level, 'Loading module %s (%d/%d)', module_name, index, module_count)
if needs_update:
if package.name != 'base':
registry.setup_models(cr)
migrations.migrate_module(package, 'pre')
if package.name != 'base':
env = api.Environment(cr, SUPERUSER_ID, {})
env['base'].flush()
load_openerp_module(package.name)
new_install = package.state == 'to install'
if new_install:
py_module = sys.modules['odoo.addons.%s' % (module_name,)]
pre_init = package.info.get('pre_init_hook')
if pre_init:
getattr(py_module, pre_init)(cr)
model_names = registry.load(cr, package)
mode = 'update'
if hasattr(package, 'init') or package.state == 'to install':
mode = 'init'
loaded_modules.append(package.name)
if needs_update:
models_updated |= set(model_names)
models_to_check -= set(model_names)
registry.setup_models(cr)
registry.init_models(cr, model_names, {'module': package.name}, new_install)
elif package.state != 'to remove':
# The current module has simply been loaded. The models extended by this module
# and for which we updated the schema, must have their schema checked again.
# This is because the extension may have changed the model,
# e.g. adding required=True to an existing field, but the schema has not been
# updated by this module because it's not marked as 'to upgrade/to install'.
models_to_check |= set(model_names) & models_updated
idref = {}
if needs_update:
env = api.Environment(cr, SUPERUSER_ID, {})
# Can't put this line out of the loop: ir.module.module will be
# registered by init_models() above.
module = env['ir.module.module'].browse(module_id)
if perform_checks:
module._check()
if package.state == 'to upgrade':
# upgrading the module information
module.write(module.get_values_from_terp(package.data))
load_data(cr, idref, mode, kind='data', package=package)
demo_loaded = package.dbdemo = load_demo(cr, package, idref, mode)
cr.execute('update ir_module_module set demo=%s where id=%s', (demo_loaded, module_id))
module.invalidate_cache(['demo'])
migrations.migrate_module(package, 'post')
# Update translations for all installed languages
overwrite = odoo.tools.config["overwrite_existing_translations"]
module._update_translations(overwrite=overwrite)
if package.name is not None:
registry._init_modules.add(package.name)
if needs_update:
if new_install:
post_init = package.info.get('post_init_hook')
if post_init:
getattr(py_module, post_init)(cr, registry)
if mode == 'update':
# validate the views that have not been checked yet
env['ir.ui.view']._validate_module_views(module_name)
# need to commit any modification the module's installation or
# update made to the schema or data so the tests can run
# (separately in their own transaction)
cr.commit()
concrete_models = [model for model in model_names if not registry[model]._abstract]
if concrete_models:
cr.execute("""
SELECT model FROM ir_model
WHERE id NOT IN (SELECT DISTINCT model_id FROM ir_model_access) AND model IN %s
""", [tuple(concrete_models)])
models = [model for [model] in cr.fetchall()]
if models:
lines = [
f"The models {models} have no access rules in module {module_name}, consider adding some, like:",
"id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink"
]
for model in models:
xmlid = model.replace('.', '_')
lines.append(f"{module_name}.access_{xmlid},access_{xmlid},{module_name}.model_{xmlid},base.group_user,1,0,0,0")
_logger.warning('\n'.join(lines))
updating = tools.config.options['init'] or tools.config.options['update']
test_time = test_queries = 0
test_results = None
if tools.config.options['test_enable'] and (needs_update or not updating):
env = api.Environment(cr, SUPERUSER_ID, {})
loader = odoo.tests.loader
suite = loader.make_suite([module_name], 'at_install')
if suite.countTestCases():
if not needs_update:
registry.setup_models(cr)
# Python tests
env['ir.http']._clear_routing_map() # force routing map to be rebuilt
tests_t0, tests_q0 = time.time(), odoo.sql_db.sql_counter
test_results = loader.run_suite(suite, module_name)
report.update(test_results)
test_time = time.time() - tests_t0
test_queries = odoo.sql_db.sql_counter - tests_q0
# tests may have reset the environment
env = api.Environment(cr, SUPERUSER_ID, {})
module = env['ir.module.module'].browse(module_id)
if needs_update:
processed_modules.append(package.name)
ver = adapt_version(package.data['version'])
# Set new modules and dependencies
module.write({'state': 'installed', 'latest_version': ver})
package.load_state = package.state
package.load_version = package.installed_version
package.state = 'installed'
for kind in ('init', 'demo', 'update'):
if hasattr(package, kind):
delattr(package, kind)
module.flush()
extra_queries = odoo.sql_db.sql_counter - module_extra_query_count - test_queries
extras = []
if test_queries:
extras.append(f'+{test_queries} test')
if extra_queries:
extras.append(f'+{extra_queries} other')
_logger.log(
module_log_level, "Module %s loaded in %.2fs%s, %s queries%s",
module_name, time.time() - module_t0,
f' (incl. {test_time:.2f}s test)' if test_time else '',
cr.sql_log_count - module_cursor_query_count,
f' ({", ".join(extras)})' if extras else ''
)
if test_results and not test_results.wasSuccessful():
_logger.error(
"Module %s: %d failures, %d errors of %d tests",
module_name, len(test_results.failures), len(test_results.errors),
test_results.testsRun
)
_logger.runbot("%s modules loaded in %.2fs, %s queries (+%s extra)",
len(graph),
time.time() - t0,
cr.sql_log_count - loading_cursor_query_count,
odoo.sql_db.sql_counter - loading_extra_query_count) # extra queries: testes, notify, any other closed cursor
return loaded_modules, processed_modules
def _check_module_names(cr, module_names):
mod_names = set(module_names)
if 'base' in mod_names:
# ignore dummy 'all' module
if 'all' in mod_names:
mod_names.remove('all')
if mod_names:
cr.execute("SELECT count(id) AS count FROM ir_module_module WHERE name in %s", (tuple(mod_names),))
if cr.dictfetchone()['count'] != len(mod_names):
# find out what module name(s) are incorrect:
cr.execute("SELECT name FROM ir_module_module")
incorrect_names = mod_names.difference([x['name'] for x in cr.dictfetchall()])
_logger.warning('invalid module names, ignored: %s', ", ".join(incorrect_names))
def load_marked_modules(cr, graph, states, force, progressdict, report,
loaded_modules, perform_checks, models_to_check=None):
"""Loads modules marked with ``states``, adding them to ``graph`` and
``loaded_modules`` and returns a list of installed/upgraded modules."""
if models_to_check is None:
models_to_check = set()
processed_modules = []
while True:
cr.execute("SELECT name from ir_module_module WHERE state IN %s" ,(tuple(states),))
module_list = [name for (name,) in cr.fetchall() if name not in graph]
if not module_list:
break
graph.add_modules(cr, module_list, force)
_logger.debug('Updating graph with %d more modules', len(module_list))
loaded, processed = load_module_graph(
cr, graph, progressdict, report=report, skip_modules=loaded_modules,
perform_checks=perform_checks, models_to_check=models_to_check
)
processed_modules.extend(processed)
loaded_modules.extend(loaded)
if not processed:
break
return processed_modules
def load_modules(registry, force_demo=False, status=None, update_module=False):
""" Load the modules for a registry object that has just been created. This
function is part of Registry.new() and should not be used anywhere else.
"""
initialize_sys_path()
force = []
if force_demo:
force.append('demo')
models_to_check = set()
with registry.cursor() as cr:
# prevent endless wait for locks on schema changes (during online
# installs) if a concurrent transaction has accessed the table;
# connection settings are automatically reset when the connection is
# borrowed from the pool
cr.execute("SET SESSION lock_timeout = '15s'")
if not odoo.modules.db.is_initialized(cr):
if not update_module:
_logger.error("Database %s not initialized, you can force it with `-i base`", cr.dbname)
return
_logger.info("init db")
odoo.modules.db.initialize(cr)
update_module = True # process auto-installed modules
tools.config["init"]["all"] = 1
if not tools.config['without_demo']:
tools.config["demo"]['all'] = 1
if 'base' in tools.config['update'] or 'all' in tools.config['update']:
cr.execute("update ir_module_module set state=%s where name=%s and state=%s", ('to upgrade', 'base', 'installed'))
# STEP 1: LOAD BASE (must be done before module dependencies can be computed for later steps)
graph = odoo.modules.graph.Graph()
graph.add_module(cr, 'base', force)
if not graph:
_logger.critical('module base cannot be loaded! (hint: verify addons-path)')
raise ImportError('Module `base` cannot be loaded! (hint: verify addons-path)')
# processed_modules: for cleanup step after install
# loaded_modules: to avoid double loading
report = registry._assertion_report
loaded_modules, processed_modules = load_module_graph(
cr, graph, status, perform_checks=update_module,
report=report, models_to_check=models_to_check)
load_lang = tools.config.pop('load_language')
if load_lang or update_module:
# some base models are used below, so make sure they are set up
registry.setup_models(cr)
if load_lang:
for lang in load_lang.split(','):
tools.load_language(cr, lang)
# STEP 2: Mark other modules to be loaded/updated
if update_module:
env = api.Environment(cr, SUPERUSER_ID, {})
Module = env['ir.module.module']
_logger.info('updating modules list')
Module.update_list()
_check_module_names(cr, itertools.chain(tools.config['init'], tools.config['update']))
module_names = [k for k, v in tools.config['init'].items() if v]
if module_names:
modules = Module.search([('state', '=', 'uninstalled'), ('name', 'in', module_names)])
if modules:
modules.button_install()
module_names = [k for k, v in tools.config['update'].items() if v]
if module_names:
modules = Module.search([('state', 'in', ('installed', 'to upgrade')), ('name', 'in', module_names)])
if modules:
modules.button_upgrade()
cr.execute("update ir_module_module set state=%s where name=%s", ('installed', 'base'))
Module.invalidate_cache(['state'])
Module.flush()
# STEP 3: Load marked modules (skipping base which was done in STEP 1)
# IMPORTANT: this is done in two parts, first loading all installed or
# partially installed modules (i.e. installed/to upgrade), to
# offer a consistent system to the second part: installing
# newly selected modules.
# We include the modules 'to remove' in the first step, because
# they are part of the "currently installed" modules. They will
# be dropped in STEP 6 later, before restarting the loading
# process.
# IMPORTANT 2: We have to loop here until all relevant modules have been
# processed, because in some rare cases the dependencies have
# changed, and modules that depend on an uninstalled module
# will not be processed on the first pass.
# It's especially useful for migrations.
previously_processed = -1
while previously_processed < len(processed_modules):
previously_processed = len(processed_modules)
processed_modules += load_marked_modules(cr, graph,
['installed', 'to upgrade', 'to remove'],
force, status, report, loaded_modules, update_module, models_to_check)
if update_module:
processed_modules += load_marked_modules(cr, graph,
['to install'], force, status, report,
loaded_modules, update_module, models_to_check)
registry.loaded = True
registry.setup_models(cr)
# check that all installed modules have been loaded by the registry
env = api.Environment(cr, SUPERUSER_ID, {})
Module = env['ir.module.module']
modules = Module.search(Module._get_modules_to_load_domain(), order='name')
missing = [name for name in modules.mapped('name') if name not in graph]
if missing:
_logger.error("Some modules are not loaded, some dependencies or manifest may be missing: %s", missing)
# STEP 3.5: execute migration end-scripts
migrations = odoo.modules.migration.MigrationManager(cr, graph)
for package in graph:
migrations.migrate_module(package, 'end')
# check that new module dependencies have been properly installed after a migration/upgrade
cr.execute("SELECT name from ir_module_module WHERE state IN ('to install', 'to upgrade')")
module_list = [name for (name,) in cr.fetchall()]
if module_list:
_logger.error("Some modules have inconsistent states, some dependencies may be missing: %s", sorted(module_list))
# STEP 3.6: apply remaining constraints in case of an upgrade
registry.finalize_constraints()
# STEP 4: Finish and cleanup installations
if processed_modules:
env = api.Environment(cr, SUPERUSER_ID, {})
cr.execute("SELECT model from ir_model")
for (model,) in cr.fetchall():
if model in registry:
env[model]._check_removed_columns(log=True)
elif _logger.isEnabledFor(logging.INFO): # more an info that a warning...
_logger.runbot("Model %s is declared but cannot be loaded! (Perhaps a module was partially removed or renamed)", model)
# Cleanup orphan records
env['ir.model.data']._process_end(processed_modules)
env['base'].flush()
for kind in ('init', 'demo', 'update'):
tools.config[kind] = {}
# STEP 5: Uninstall modules to remove
if update_module:
# Remove records referenced from ir_model_data for modules to be
# removed (and removed the references from ir_model_data).
cr.execute("SELECT name, id FROM ir_module_module WHERE state=%s", ('to remove',))
modules_to_remove = dict(cr.fetchall())
if modules_to_remove:
env = api.Environment(cr, SUPERUSER_ID, {})
pkgs = reversed([p for p in graph if p.name in modules_to_remove])
for pkg in pkgs:
uninstall_hook = pkg.info.get('uninstall_hook')
if uninstall_hook:
py_module = sys.modules['odoo.addons.%s' % (pkg.name,)]
getattr(py_module, uninstall_hook)(cr, registry)
env['base'].flush()
Module = env['ir.module.module']
Module.browse(modules_to_remove.values()).module_uninstall()
# Recursive reload, should only happen once, because there should be no
# modules to remove next time
cr.commit()
_logger.info('Reloading registry once more after uninstalling modules')
registry = odoo.modules.registry.Registry.new(
cr.dbname, force_demo, status, update_module
)
cr.reset()
registry.check_tables_exist(cr)
cr.commit()
return registry
# STEP 5.5: Verify extended fields on every model
# This will fix the schema of all models in a situation such as:
# - module A is loaded and defines model M;
# - module B is installed/upgraded and extends model M;
# - module C is loaded and extends model M;
# - module B and C depend on A but not on each other;
# The changes introduced by module C are not taken into account by the upgrade of B.
if models_to_check:
registry.init_models(cr, list(models_to_check), {'models_to_check': True})
# STEP 6: verify custom views on every model
if update_module:
env = api.Environment(cr, SUPERUSER_ID, {})
env['res.groups']._update_user_groups_view()
View = env['ir.ui.view']
for model in registry:
try:
View._validate_custom_views(model)
except Exception as e:
_logger.warning('invalid custom view(s) for model %s: %s', model, tools.ustr(e))
if report.wasSuccessful():
_logger.info('Modules loaded.')
else:
_logger.error('At least one test failed when loading the modules.')
# STEP 8: call _register_hook on every model
# This is done *exactly once* when the registry is being loaded. See the
# management of those hooks in `Registry.setup_models`: all the calls to
# setup_models() done here do not mess up with hooks, as registry.ready
# is False.
env = api.Environment(cr, SUPERUSER_ID, {})
for model in env.values():
model._register_hook()
env['base'].flush()
# STEP 9: save installed/updated modules for post-install tests
registry.updated_modules += processed_modules
def reset_modules_state(db_name):
"""
Resets modules flagged as "to x" to their original state
"""
# Warning, this function was introduced in response to commit 763d714
# which locks cron jobs for dbs which have modules marked as 'to %'.
# The goal of this function is to be called ONLY when module
# installation/upgrade/uninstallation fails, which is the only known case
# for which modules can stay marked as 'to %' for an indefinite amount
# of time
db = odoo.sql_db.db_connect(db_name)
with db.cursor() as cr:
cr.execute(
"UPDATE ir_module_module SET state='installed' WHERE state IN ('to remove', 'to upgrade')"
)
cr.execute(
"UPDATE ir_module_module SET state='uninstalled' WHERE state='to install'"
)
_logger.warning("Transient module states were reset")
| 43.847039 | 26,659 |
30,053 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
""" Models registries.
"""
from collections import defaultdict, deque
from collections.abc import Mapping
from contextlib import closing, contextmanager
from functools import partial
from operator import attrgetter
import logging
import os
import threading
import time
import psycopg2
import odoo
from .. import SUPERUSER_ID
from odoo.sql_db import TestCursor
from odoo.tools import (config, existing_tables, ignore,
lazy_classproperty, lazy_property, sql,
Collector, OrderedSet)
from odoo.tools.lru import LRU
_logger = logging.getLogger(__name__)
_schema = logging.getLogger('odoo.schema')
class Registry(Mapping):
""" Model registry for a particular database.
The registry is essentially a mapping between model names and model classes.
There is one registry instance per database.
"""
_lock = threading.RLock()
_saved_lock = None
@lazy_classproperty
def registries(cls):
""" A mapping from database names to registries. """
size = config.get('registry_lru_size', None)
if not size:
# Size the LRU depending of the memory limits
if os.name != 'posix':
# cannot specify the memory limit soft on windows...
size = 42
else:
# A registry takes 10MB of memory on average, so we reserve
# 10Mb (registry) + 5Mb (working memory) per registry
avgsz = 15 * 1024 * 1024
size = int(config['limit_memory_soft'] / avgsz)
return LRU(size)
def __new__(cls, db_name):
""" Return the registry for the given database name."""
with cls._lock:
try:
return cls.registries[db_name]
except KeyError:
return cls.new(db_name)
finally:
# set db tracker - cleaned up at the WSGI dispatching phase in
# odoo.service.wsgi_server.application
threading.current_thread().dbname = db_name
@classmethod
def new(cls, db_name, force_demo=False, status=None, update_module=False):
""" Create and return a new registry for the given database name. """
t0 = time.time()
with cls._lock:
registry = object.__new__(cls)
registry.init(db_name)
# Initializing a registry will call general code which will in
# turn call Registry() to obtain the registry being initialized.
# Make it available in the registries dictionary then remove it
# if an exception is raised.
cls.delete(db_name)
cls.registries[db_name] = registry # pylint: disable=unsupported-assignment-operation
try:
registry.setup_signaling()
# This should be a method on Registry
try:
odoo.modules.load_modules(registry, force_demo, status, update_module)
except Exception:
odoo.modules.reset_modules_state(db_name)
raise
except Exception:
_logger.exception('Failed to load registry')
del cls.registries[db_name] # pylint: disable=unsupported-delete-operation
raise
# load_modules() above can replace the registry by calling
# indirectly new() again (when modules have to be uninstalled).
# Yeah, crazy.
registry = cls.registries[db_name] # pylint: disable=unsubscriptable-object
registry._init = False
registry.ready = True
registry.registry_invalidated = bool(update_module)
registry.new = registry.init = registry.registries = None
_logger.info("Registry loaded in %.3fs", time.time() - t0)
return registry
def init(self, db_name):
self.models = {} # model name/model instance mapping
self._sql_constraints = set()
self._init = True
self._assertion_report = odoo.tests.runner.OdooTestResult()
self._fields_by_model = None
self._ordinary_tables = None
self._constraint_queue = deque()
self.__cache = LRU(8192)
# modules fully loaded (maintained during init phase by `loading` module)
self._init_modules = set()
self.updated_modules = [] # installed/updated modules
self.loaded_xmlids = set()
self.db_name = db_name
self._db = odoo.sql_db.db_connect(db_name)
# cursor for test mode; None means "normal" mode
self.test_cr = None
self.test_lock = None
# Indicates that the registry is
self.loaded = False # whether all modules are loaded
self.ready = False # whether everything is set up
# field dependencies
self.field_depends = Collector()
self.field_depends_context = Collector()
self.field_inverses = Collector()
# Inter-process signaling:
# The `base_registry_signaling` sequence indicates the whole registry
# must be reloaded.
# The `base_cache_signaling sequence` indicates all caches must be
# invalidated (i.e. cleared).
self.registry_sequence = None
self.cache_sequence = None
# Flags indicating invalidation of the registry or the cache.
self._invalidation_flags = threading.local()
with closing(self.cursor()) as cr:
self.has_unaccent = odoo.modules.db.has_unaccent(cr)
self.has_trigram = odoo.modules.db.has_trigram(cr)
@classmethod
def delete(cls, db_name):
""" Delete the registry linked to a given database. """
with cls._lock:
if db_name in cls.registries:
del cls.registries[db_name]
@classmethod
def delete_all(cls):
""" Delete all the registries. """
with cls._lock:
cls.registries.clear()
#
# Mapping abstract methods implementation
# => mixin provides methods keys, items, values, get, __eq__, and __ne__
#
def __len__(self):
""" Return the size of the registry. """
return len(self.models)
def __iter__(self):
""" Return an iterator over all model names. """
return iter(self.models)
def __getitem__(self, model_name):
""" Return the model with the given name or raise KeyError if it doesn't exist."""
return self.models[model_name]
def __call__(self, model_name):
""" Same as ``self[model_name]``. """
return self.models[model_name]
def __setitem__(self, model_name, model):
""" Add or replace a model in the registry."""
self.models[model_name] = model
def __delitem__(self, model_name):
""" Remove a (custom) model from the registry. """
del self.models[model_name]
# the custom model can inherit from mixins ('mail.thread', ...)
for Model in self.models.values():
Model._inherit_children.discard(model_name)
def descendants(self, model_names, *kinds):
""" Return the models corresponding to ``model_names`` and all those
that inherit/inherits from them.
"""
assert all(kind in ('_inherit', '_inherits') for kind in kinds)
funcs = [attrgetter(kind + '_children') for kind in kinds]
models = OrderedSet()
queue = deque(model_names)
while queue:
model = self[queue.popleft()]
models.add(model._name)
for func in funcs:
queue.extend(func(model))
return models
def load(self, cr, module):
""" Load a given module in the registry, and return the names of the
modified models.
At the Python level, the modules are already loaded, but not yet on a
per-registry level. This method populates a registry with the given
modules, i.e. it instantiates all the classes of a the given module
and registers them in the registry.
"""
from .. import models
# clear cache to ensure consistency, but do not signal it
self.__cache.clear()
lazy_property.reset_all(self)
# Instantiate registered classes (via the MetaModel automatic discovery
# or via explicit constructor call), and add them to the pool.
model_names = []
for cls in models.MetaModel.module_to_models.get(module.name, []):
# models register themselves in self.models
model = cls._build_model(self, cr)
model_names.append(model._name)
return self.descendants(model_names, '_inherit', '_inherits')
def setup_models(self, cr):
""" Complete the setup of models.
This must be called after loading modules and before using the ORM.
"""
env = odoo.api.Environment(cr, SUPERUSER_ID, {})
env['base'].flush()
# Uninstall registry hooks. Because of the condition, this only happens
# on a fully loaded registry, and not on a registry being loaded.
if self.ready:
for model in env.values():
model._unregister_hook()
# clear cache to ensure consistency, but do not signal it
self.__cache.clear()
lazy_property.reset_all(self)
self.registry_invalidated = True
if env.all.tocompute:
_logger.error(
"Remaining fields to compute before setting up registry: %s",
env.all.tocompute, stack_info=True,
)
# we must setup ir.model before adding manual fields because _add_manual_models may
# depend on behavior that is implemented through overrides, such as is_mail_thread which
# is implemented through an override to env['ir.model']._instanciate
env['ir.model']._prepare_setup()
# add manual models
if self._init_modules:
env['ir.model']._add_manual_models()
# prepare the setup on all models
models = list(env.values())
for model in models:
model._prepare_setup()
self.field_depends.clear()
self.field_depends_context.clear()
self.field_inverses.clear()
# do the actual setup
for model in models:
model._setup_base()
self._m2m = defaultdict(list)
for model in models:
model._setup_fields()
del self._m2m
for model in models:
model._setup_complete()
# determine field_depends and field_depends_context
for model in models:
for field in model._fields.values():
depends, depends_context = field.get_depends(model)
self.field_depends[field] = tuple(depends)
self.field_depends_context[field] = tuple(depends_context)
# Reinstall registry hooks. Because of the condition, this only happens
# on a fully loaded registry, and not on a registry being loaded.
if self.ready:
for model in env.values():
model._register_hook()
env['base'].flush()
@lazy_property
def field_computed(self):
""" Return a dict mapping each field to the fields computed by the same method. """
computed = {}
for model_name, Model in self.models.items():
groups = defaultdict(list)
for field in Model._fields.values():
if field.compute:
computed[field] = group = groups[field.compute]
group.append(field)
for fields in groups.values():
if len({field.compute_sudo for field in fields}) > 1:
_logger.warning("%s: inconsistent 'compute_sudo' for computed fields: %s",
model_name, ", ".join(field.name for field in fields))
return computed
@lazy_property
def field_triggers(self):
# determine field dependencies
dependencies = {}
for Model in self.models.values():
if Model._abstract:
continue
for field in Model._fields.values():
# dependencies of custom fields may not exist; ignore that case
exceptions = (Exception,) if field.base_field.manual else ()
with ignore(*exceptions):
dependencies[field] = OrderedSet(field.resolve_depends(self))
# determine transitive dependencies
def transitive_dependencies(field, seen=[]):
if field in seen:
return
for seq1 in dependencies.get(field, ()):
yield seq1
for seq2 in transitive_dependencies(seq1[-1], seen + [field]):
yield concat(seq1[:-1], seq2)
def concat(seq1, seq2):
if seq1 and seq2:
f1, f2 = seq1[-1], seq2[0]
if f1.type == 'one2many' and f2.type == 'many2one' and \
f1.model_name == f2.comodel_name and f1.inverse_name == f2.name:
return concat(seq1[:-1], seq2[1:])
return seq1 + seq2
# determine triggers based on transitive dependencies
triggers = {}
for field in dependencies:
for path in transitive_dependencies(field):
if path:
tree = triggers
for label in reversed(path):
tree = tree.setdefault(label, {})
tree.setdefault(None, OrderedSet()).add(field)
return triggers
def post_init(self, func, *args, **kwargs):
""" Register a function to call at the end of :meth:`~.init_models`. """
self._post_init_queue.append(partial(func, *args, **kwargs))
def post_constraint(self, func, *args, **kwargs):
""" Call the given function, and delay it if it fails during an upgrade. """
try:
if (func, args, kwargs) not in self._constraint_queue:
# Module A may try to apply a constraint and fail but another module B inheriting
# from Module A may try to reapply the same constraint and succeed, however the
# constraint would already be in the _constraint_queue and would be executed again
# at the end of the registry cycle, this would fail (already-existing constraint)
# and generate an error, therefore a constraint should only be applied if it's
# not already marked as "to be applied".
func(*args, **kwargs)
except Exception as e:
if self._is_install:
_schema.error(*e.args)
else:
_schema.info(*e.args)
self._constraint_queue.append((func, args, kwargs))
def finalize_constraints(self):
""" Call the delayed functions from above. """
while self._constraint_queue:
func, args, kwargs = self._constraint_queue.popleft()
try:
func(*args, **kwargs)
except Exception as e:
# warn only, this is not a deployment showstopper, and
# can sometimes be a transient error
_schema.warning(*e.args)
def init_models(self, cr, model_names, context, install=True):
""" Initialize a list of models (given by their name). Call methods
``_auto_init`` and ``init`` on each model to create or update the
database tables supporting the models.
The ``context`` may contain the following items:
- ``module``: the name of the module being installed/updated, if any;
- ``update_custom_fields``: whether custom fields should be updated.
"""
if not model_names:
return
if 'module' in context:
_logger.info('module %s: creating or updating database tables', context['module'])
elif context.get('models_to_check', False):
_logger.info("verifying fields for every extended model")
env = odoo.api.Environment(cr, SUPERUSER_ID, context)
models = [env[model_name] for model_name in model_names]
try:
self._post_init_queue = deque()
self._foreign_keys = {}
self._is_install = install
for model in models:
model._auto_init()
model.init()
env['ir.model']._reflect_models(model_names)
env['ir.model.fields']._reflect_fields(model_names)
env['ir.model.fields.selection']._reflect_selections(model_names)
env['ir.model.constraint']._reflect_constraints(model_names)
self._ordinary_tables = None
while self._post_init_queue:
func = self._post_init_queue.popleft()
func()
self.check_indexes(cr, model_names)
self.check_foreign_keys(cr)
env['base'].flush()
# make sure all tables are present
self.check_tables_exist(cr)
finally:
del self._post_init_queue
del self._foreign_keys
del self._is_install
def check_indexes(self, cr, model_names):
""" Create or drop column indexes for the given models. """
expected = [
("%s_%s_index" % (Model._table, field.name), Model._table, field.name, field.index)
for model_name in model_names
for Model in [self.models[model_name]]
if Model._auto and not Model._abstract
for field in Model._fields.values()
if field.column_type and field.store
]
if not expected:
return
# retrieve existing indexes with their corresponding table
cr.execute("SELECT indexname, tablename FROM pg_indexes WHERE indexname IN %s",
[tuple(row[0] for row in expected)])
existing = dict(cr.fetchall())
for indexname, tablename, columnname, index in expected:
if index and indexname not in existing:
try:
with cr.savepoint(flush=False):
sql.create_index(cr, indexname, tablename, ['"%s"' % columnname])
except psycopg2.OperationalError:
_schema.error("Unable to add index for %s", self)
elif not index and tablename == existing.get(indexname):
_schema.info("Keep unexpected index %s on table %s", indexname, tablename)
def add_foreign_key(self, table1, column1, table2, column2, ondelete,
model, module, force=True):
""" Specify an expected foreign key. """
key = (table1, column1)
val = (table2, column2, ondelete, model, module)
if force:
self._foreign_keys[key] = val
else:
self._foreign_keys.setdefault(key, val)
def check_foreign_keys(self, cr):
""" Create or update the expected foreign keys. """
if not self._foreign_keys:
return
# determine existing foreign keys on the tables
query = """
SELECT fk.conname, c1.relname, a1.attname, c2.relname, a2.attname, fk.confdeltype
FROM pg_constraint AS fk
JOIN pg_class AS c1 ON fk.conrelid = c1.oid
JOIN pg_class AS c2 ON fk.confrelid = c2.oid
JOIN pg_attribute AS a1 ON a1.attrelid = c1.oid AND fk.conkey[1] = a1.attnum
JOIN pg_attribute AS a2 ON a2.attrelid = c2.oid AND fk.confkey[1] = a2.attnum
WHERE fk.contype = 'f' AND c1.relname IN %s
"""
cr.execute(query, [tuple({table for table, column in self._foreign_keys})])
existing = {
(table1, column1): (name, table2, column2, deltype)
for name, table1, column1, table2, column2, deltype in cr.fetchall()
}
# create or update foreign keys
for key, val in self._foreign_keys.items():
table1, column1 = key
table2, column2, ondelete, model, module = val
deltype = sql._CONFDELTYPES[ondelete.upper()]
spec = existing.get(key)
if spec is None:
sql.add_foreign_key(cr, table1, column1, table2, column2, ondelete)
conname = sql.get_foreign_keys(cr, table1, column1, table2, column2, ondelete)[0]
model.env['ir.model.constraint']._reflect_constraint(model, conname, 'f', None, module)
elif (spec[1], spec[2], spec[3]) != (table2, column2, deltype):
sql.drop_constraint(cr, table1, spec[0])
sql.add_foreign_key(cr, table1, column1, table2, column2, ondelete)
conname = sql.get_foreign_keys(cr, table1, column1, table2, column2, ondelete)[0]
model.env['ir.model.constraint']._reflect_constraint(model, conname, 'f', None, module)
def check_tables_exist(self, cr):
"""
Verify that all tables are present and try to initialize those that are missing.
"""
env = odoo.api.Environment(cr, SUPERUSER_ID, {})
table2model = {
model._table: name
for name, model in env.items()
if not model._abstract and model.__class__._table_query is None
}
missing_tables = set(table2model).difference(existing_tables(cr, table2model))
if missing_tables:
missing = {table2model[table] for table in missing_tables}
_logger.info("Models have no table: %s.", ", ".join(missing))
# recreate missing tables
for name in missing:
_logger.info("Recreate table of model %s.", name)
env[name].init()
env['base'].flush()
# check again, and log errors if tables are still missing
missing_tables = set(table2model).difference(existing_tables(cr, table2model))
for table in missing_tables:
_logger.error("Model %s has no table.", table2model[table])
def _clear_cache(self):
""" Clear the cache and mark it as invalidated. """
self.__cache.clear()
self.cache_invalidated = True
def clear_caches(self):
""" Clear the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models.
"""
for model in self.models.values():
model.clear_caches()
def is_an_ordinary_table(self, model):
""" Return whether the given model has an ordinary table. """
if self._ordinary_tables is None:
cr = model.env.cr
query = """
SELECT c.relname
FROM pg_class c
JOIN pg_namespace n ON (n.oid = c.relnamespace)
WHERE c.relname IN %s
AND c.relkind = 'r'
AND n.nspname = 'public'
"""
tables = tuple(m._table for m in self.models.values())
cr.execute(query, [tables])
self._ordinary_tables = {row[0] for row in cr.fetchall()}
return model._table in self._ordinary_tables
@property
def registry_invalidated(self):
""" Determine whether the current thread has modified the registry. """
return getattr(self._invalidation_flags, 'registry', False)
@registry_invalidated.setter
def registry_invalidated(self, value):
self._invalidation_flags.registry = value
@property
def cache_invalidated(self):
""" Determine whether the current thread has modified the cache. """
return getattr(self._invalidation_flags, 'cache', False)
@cache_invalidated.setter
def cache_invalidated(self, value):
self._invalidation_flags.cache = value
def setup_signaling(self):
""" Setup the inter-process signaling on this registry. """
if self.in_test_mode():
return
with self.cursor() as cr:
# The `base_registry_signaling` sequence indicates when the registry
# must be reloaded.
# The `base_cache_signaling` sequence indicates when all caches must
# be invalidated (i.e. cleared).
cr.execute("SELECT sequence_name FROM information_schema.sequences WHERE sequence_name='base_registry_signaling'")
if not cr.fetchall():
cr.execute("CREATE SEQUENCE base_registry_signaling INCREMENT BY 1 START WITH 1")
cr.execute("SELECT nextval('base_registry_signaling')")
cr.execute("CREATE SEQUENCE base_cache_signaling INCREMENT BY 1 START WITH 1")
cr.execute("SELECT nextval('base_cache_signaling')")
cr.execute(""" SELECT base_registry_signaling.last_value,
base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""")
self.registry_sequence, self.cache_sequence = cr.fetchone()
_logger.debug("Multiprocess load registry signaling: [Registry: %s] [Cache: %s]",
self.registry_sequence, self.cache_sequence)
def check_signaling(self):
""" Check whether the registry has changed, and performs all necessary
operations to update the registry. Return an up-to-date registry.
"""
if self.in_test_mode():
return self
with closing(self.cursor()) as cr:
cr.execute(""" SELECT base_registry_signaling.last_value,
base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""")
r, c = cr.fetchone()
_logger.debug("Multiprocess signaling check: [Registry - %s -> %s] [Cache - %s -> %s]",
self.registry_sequence, r, self.cache_sequence, c)
# Check if the model registry must be reloaded
if self.registry_sequence != r:
_logger.info("Reloading the model registry after database signaling.")
self = Registry.new(self.db_name)
# Check if the model caches must be invalidated.
elif self.cache_sequence != c:
_logger.info("Invalidating all model caches after database signaling.")
self.clear_caches()
# prevent re-signaling the clear_caches() above, or any residual one that
# would be inherited from the master process (first request in pre-fork mode)
self.cache_invalidated = False
self.registry_sequence = r
self.cache_sequence = c
return self
def signal_changes(self):
""" Notifies other processes if registry or cache has been invalidated. """
if self.registry_invalidated and not self.in_test_mode():
_logger.info("Registry changed, signaling through the database")
with closing(self.cursor()) as cr:
cr.execute("select nextval('base_registry_signaling')")
self.registry_sequence = cr.fetchone()[0]
# no need to notify cache invalidation in case of registry invalidation,
# because reloading the registry implies starting with an empty cache
elif self.cache_invalidated and not self.in_test_mode():
_logger.info("At least one model cache has been invalidated, signaling through the database.")
with closing(self.cursor()) as cr:
cr.execute("select nextval('base_cache_signaling')")
self.cache_sequence = cr.fetchone()[0]
self.registry_invalidated = False
self.cache_invalidated = False
def reset_changes(self):
""" Reset the registry and cancel all invalidations. """
if self.registry_invalidated:
with closing(self.cursor()) as cr:
self.setup_models(cr)
self.registry_invalidated = False
if self.cache_invalidated:
self.__cache.clear()
self.cache_invalidated = False
@contextmanager
def manage_changes(self):
""" Context manager to signal/discard registry and cache invalidations. """
try:
yield self
self.signal_changes()
except Exception:
self.reset_changes()
raise
def in_test_mode(self):
""" Test whether the registry is in 'test' mode. """
return self.test_cr is not None
def enter_test_mode(self, cr):
""" Enter the 'test' mode, where one cursor serves several requests. """
assert self.test_cr is None
self.test_cr = cr
self.test_lock = threading.RLock()
assert Registry._saved_lock is None
Registry._saved_lock = Registry._lock
Registry._lock = DummyRLock()
def leave_test_mode(self):
""" Leave the test mode. """
assert self.test_cr is not None
self.test_cr = None
self.test_lock = None
assert Registry._saved_lock is not None
Registry._lock = Registry._saved_lock
Registry._saved_lock = None
def cursor(self):
""" Return a new cursor for the database. The cursor itself may be used
as a context manager to commit/rollback and close automatically.
"""
if self.test_cr is not None:
# in test mode we use a proxy object that uses 'self.test_cr' underneath
return TestCursor(self.test_cr, self.test_lock)
return self._db.cursor()
class DummyRLock(object):
""" Dummy reentrant lock, to be used while running rpc and js tests """
def acquire(self):
pass
def release(self):
pass
def __enter__(self):
self.acquire()
def __exit__(self, type, value, traceback):
self.release()
| 40.612162 | 30,053 |
6,679 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
""" Modules dependency graph. """
import itertools
import logging
import odoo
import odoo.tools as tools
_logger = logging.getLogger(__name__)
class Graph(dict):
""" Modules dependency graph.
The graph is a mapping from module name to Nodes.
"""
def add_node(self, name, info):
max_depth, father = 0, None
for d in info['depends']:
n = self.get(d) or Node(d, self, None) # lazy creation, do not use default value for get()
if n.depth >= max_depth:
father = n
max_depth = n.depth
if father:
return father.add_child(name, info)
else:
return Node(name, self, info)
def update_from_db(self, cr):
if not len(self):
return
# update the graph with values from the database (if exist)
## First, we set the default values for each package in graph
additional_data = {key: {'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'installed_version': None} for key in self.keys()}
## Then we get the values from the database
cr.execute('SELECT name, id, state, demo AS dbdemo, latest_version AS installed_version'
' FROM ir_module_module'
' WHERE name IN %s',(tuple(additional_data),)
)
## and we update the default values with values from the database
additional_data.update((x['name'], x) for x in cr.dictfetchall())
for package in self.values():
for k, v in additional_data[package.name].items():
setattr(package, k, v)
def add_module(self, cr, module, force=None):
self.add_modules(cr, [module], force)
def add_modules(self, cr, module_list, force=None):
if force is None:
force = []
packages = []
len_graph = len(self)
for module in module_list:
# This will raise an exception if no/unreadable descriptor file.
# NOTE The call to load_information_from_description_file is already
# done by db.initialize, so it is possible to not do it again here.
info = odoo.modules.module.load_information_from_description_file(module)
if info and info['installable']:
packages.append((module, info)) # TODO directly a dict, like in get_modules_with_version
elif module != 'studio_customization':
_logger.warning('module %s: not installable, skipped', module)
dependencies = dict([(p, info['depends']) for p, info in packages])
current, later = set([p for p, info in packages]), set()
while packages and current > later:
package, info = packages[0]
deps = info['depends']
# if all dependencies of 'package' are already in the graph, add 'package' in the graph
if all(dep in self for dep in deps):
if not package in current:
packages.pop(0)
continue
later.clear()
current.remove(package)
node = self.add_node(package, info)
for kind in ('init', 'demo', 'update'):
if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force:
setattr(node, kind, True)
else:
later.add(package)
packages.append((package, info))
packages.pop(0)
self.update_from_db(cr)
for package in later:
unmet_deps = [p for p in dependencies[package] if p not in self]
_logger.info('module %s: Unmet dependencies: %s', package, ', '.join(unmet_deps))
return len(self) - len_graph
def __iter__(self):
level = 0
done = set(self.keys())
while done:
level_modules = sorted((name, module) for name, module in self.items() if module.depth==level)
for name, module in level_modules:
done.remove(name)
yield module
level += 1
def __str__(self):
return '\n'.join(str(n) for n in self if n.depth == 0)
class Node(object):
""" One module in the modules dependency graph.
Node acts as a per-module singleton. A node is constructed via
Graph.add_module() or Graph.add_modules(). Some of its fields are from
ir_module_module (set by Graph.update_from_db()).
"""
def __new__(cls, name, graph, info):
if name in graph:
inst = graph[name]
else:
inst = object.__new__(cls)
graph[name] = inst
return inst
def __init__(self, name, graph, info):
self.name = name
self.graph = graph
self.info = info or getattr(self, 'info', {})
if not hasattr(self, 'children'):
self.children = []
if not hasattr(self, 'depth'):
self.depth = 0
@property
def data(self):
return self.info
def add_child(self, name, info):
node = Node(name, self.graph, info)
node.depth = self.depth + 1
if node not in self.children:
self.children.append(node)
for attr in ('init', 'update', 'demo'):
if hasattr(self, attr):
setattr(node, attr, True)
self.children.sort(key=lambda x: x.name)
return node
def __setattr__(self, name, value):
super(Node, self).__setattr__(name, value)
if name in ('init', 'update', 'demo'):
tools.config[name][self.name] = 1
for child in self.children:
setattr(child, name, value)
if name == 'depth':
for child in self.children:
setattr(child, name, value + 1)
def __iter__(self):
return itertools.chain(
self.children,
itertools.chain.from_iterable(self.children)
)
def __str__(self):
return self._pprint()
def _pprint(self, depth=0):
s = '%s\n' % self.name
for c in self.children:
s += '%s`-> %s' % (' ' * depth, c._pprint(depth+1))
return s
def should_have_demo(self):
return (hasattr(self, 'demo') or (self.dbdemo and self.state != 'installed')) and all(p.dbdemo for p in self.parents)
@property
def parents(self):
if self.depth == 0:
return []
return (
node for node in self.graph.values()
if node.depth < self.depth
if self in node.children
)
| 34.786458 | 6,679 |
15,966 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import ast
import collections.abc
import importlib
import logging
import os
import pkg_resources
import re
import sys
import warnings
from os.path import join as opj
import odoo
import odoo.tools as tools
import odoo.release as release
from odoo.tools import pycompat
MANIFEST_NAMES = ('__manifest__.py', '__openerp__.py')
README = ['README.rst', 'README.md', 'README.txt']
_logger = logging.getLogger(__name__)
# addons path as a list
# ad_paths is a deprecated alias, please use odoo.addons.__path__
@tools.lazy
def ad_paths():
warnings.warn(
'"odoo.modules.module.ad_paths" is a deprecated proxy to '
'"odoo.addons.__path__".', DeprecationWarning, stacklevel=2)
return odoo.addons.__path__
# Modules already loaded
loaded = []
class AddonsHook(object):
""" Makes modules accessible through openerp.addons.* """
def find_module(self, name, path=None):
if name.startswith('openerp.addons.') and name.count('.') == 2:
warnings.warn(
'"openerp.addons" is a deprecated alias to "odoo.addons".',
DeprecationWarning, stacklevel=2)
return self
def load_module(self, name):
assert name not in sys.modules
odoo_name = re.sub(r'^openerp.addons.(\w+)$', r'odoo.addons.\g<1>', name)
odoo_module = sys.modules.get(odoo_name)
if not odoo_module:
odoo_module = importlib.import_module(odoo_name)
sys.modules[name] = odoo_module
return odoo_module
class OdooHook(object):
""" Makes odoo package also available as openerp """
def find_module(self, name, path=None):
# openerp.addons.<identifier> should already be matched by AddonsHook,
# only framework and subdirectories of modules should match
if re.match(r'^openerp\b', name):
warnings.warn(
'openerp is a deprecated alias to odoo.',
DeprecationWarning, stacklevel=2)
return self
def load_module(self, name):
assert name not in sys.modules
canonical = re.sub(r'^openerp(.*)', r'odoo\g<1>', name)
if canonical in sys.modules:
mod = sys.modules[canonical]
else:
# probable failure: canonical execution calling old naming -> corecursion
mod = importlib.import_module(canonical)
# just set the original module at the new location. Don't proxy,
# it breaks *-import (unless you can find how `from a import *` lists
# what's supposed to be imported by `*`, and manage to override it)
sys.modules[name] = mod
return sys.modules[name]
class UpgradeHook(object):
"""Makes the legacy `migrations` package being `odoo.upgrade`"""
def find_module(self, name, path=None):
if re.match(r"^odoo.addons.base.maintenance.migrations\b", name):
# We can't trigger a DeprecationWarning in this case.
# In order to be cross-versions, the multi-versions upgrade scripts (0.0.0 scripts),
# the tests, and the common files (utility functions) still needs to import from the
# legacy name.
return self
def load_module(self, name):
assert name not in sys.modules
canonical_upgrade = name.replace("odoo.addons.base.maintenance.migrations", "odoo.upgrade")
if canonical_upgrade in sys.modules:
mod = sys.modules[canonical_upgrade]
else:
mod = importlib.import_module(canonical_upgrade)
sys.modules[name] = mod
return sys.modules[name]
def initialize_sys_path():
"""
Setup the addons path ``odoo.addons.__path__`` with various defaults
and explicit directories.
"""
# hook odoo.addons on data dir
dd = os.path.normcase(tools.config.addons_data_dir)
if os.access(dd, os.R_OK) and dd not in odoo.addons.__path__:
odoo.addons.__path__.append(dd)
# hook odoo.addons on addons paths
for ad in tools.config['addons_path'].split(','):
ad = os.path.normcase(os.path.abspath(tools.ustr(ad.strip())))
if ad not in odoo.addons.__path__:
odoo.addons.__path__.append(ad)
# hook odoo.addons on base module path
base_path = os.path.normcase(os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'addons')))
if base_path not in odoo.addons.__path__ and os.path.isdir(base_path):
odoo.addons.__path__.append(base_path)
# hook odoo.upgrade on upgrade-path
from odoo import upgrade
legacy_upgrade_path = os.path.join(base_path, 'base', 'maintenance', 'migrations')
for up in (tools.config['upgrade_path'] or legacy_upgrade_path).split(','):
up = os.path.normcase(os.path.abspath(tools.ustr(up.strip())))
if os.path.isdir(up) and up not in upgrade.__path__:
upgrade.__path__.append(up)
# create decrecated module alias from odoo.addons.base.maintenance.migrations to odoo.upgrade
spec = importlib.machinery.ModuleSpec("odoo.addons.base.maintenance", None, is_package=True)
maintenance_pkg = importlib.util.module_from_spec(spec)
maintenance_pkg.migrations = upgrade
sys.modules["odoo.addons.base.maintenance"] = maintenance_pkg
sys.modules["odoo.addons.base.maintenance.migrations"] = upgrade
# hook deprecated module alias from openerp to odoo and "crm"-like to odoo.addons
if not getattr(initialize_sys_path, 'called', False): # only initialize once
sys.meta_path.insert(0, UpgradeHook())
sys.meta_path.insert(0, OdooHook())
sys.meta_path.insert(0, AddonsHook())
initialize_sys_path.called = True
def get_module_path(module, downloaded=False, display_warning=True):
"""Return the path of the given module.
Search the addons paths and return the first path where the given
module is found. If downloaded is True, return the default addons
path if nothing else is found.
"""
for adp in odoo.addons.__path__:
files = [opj(adp, module, manifest) for manifest in MANIFEST_NAMES] +\
[opj(adp, module + '.zip')]
if any(os.path.exists(f) for f in files):
return opj(adp, module)
if downloaded:
return opj(tools.config.addons_data_dir, module)
if display_warning:
_logger.warning('module %s: module not found', module)
return False
def get_module_filetree(module, dir='.'):
path = get_module_path(module)
if not path:
return False
dir = os.path.normpath(dir)
if dir == '.':
dir = ''
if dir.startswith('..') or (dir and dir[0] == '/'):
raise Exception('Cannot access file outside the module')
files = odoo.tools.osutil.listdir(path, True)
tree = {}
for f in files:
if not f.startswith(dir):
continue
if dir:
f = f[len(dir)+int(not dir.endswith('/')):]
lst = f.split(os.sep)
current = tree
while len(lst) != 1:
current = current.setdefault(lst.pop(0), {})
current[lst.pop(0)] = None
return tree
def get_resource_path(module, *args):
"""Return the full path of a resource of the given module.
:param module: module name
:param list(str) args: resource path components within module
:rtype: str
:return: absolute path to the resource
TODO make it available inside on osv object (self.get_resource_path)
"""
mod_path = get_module_path(module)
if not mod_path:
return False
return check_resource_path(mod_path, *args)
def check_resource_path(mod_path, *args):
resource_path = opj(mod_path, *args)
if os.path.exists(resource_path):
return resource_path
return False
# backwards compatibility
get_module_resource = get_resource_path
def get_resource_from_path(path):
"""Tries to extract the module name and the resource's relative path
out of an absolute resource path.
If operation is successful, returns a tuple containing the module name, the relative path
to the resource using '/' as filesystem seperator[1] and the same relative path using
os.path.sep seperators.
[1] same convention as the resource path declaration in manifests
:param path: absolute resource path
:rtype: tuple
:return: tuple(module_name, relative_path, os_relative_path) if possible, else None
"""
resource = False
for adpath in odoo.addons.__path__:
# force trailing separator
adpath = os.path.join(adpath, "")
if os.path.commonprefix([adpath, path]) == adpath:
resource = path.replace(adpath, "", 1)
break
if resource:
relative = resource.split(os.path.sep)
if not relative[0]:
relative.pop(0)
module = relative.pop(0)
return (module, '/'.join(relative), os.path.sep.join(relative))
return None
def get_module_icon(module):
iconpath = ['static', 'description', 'icon.png']
if get_module_resource(module, *iconpath):
return ('/' + module + '/') + '/'.join(iconpath)
return '/base/' + '/'.join(iconpath)
def get_module_icon_path(module):
iconpath = ['static', 'description', 'icon.png']
path = get_module_resource(module.name, *iconpath)
if not path:
path = get_module_resource('base', *iconpath)
return path
def module_manifest(path):
"""Returns path to module manifest if one can be found under `path`, else `None`."""
if not path:
return None
for manifest_name in MANIFEST_NAMES:
if os.path.isfile(opj(path, manifest_name)):
return opj(path, manifest_name)
def read_manifest(addons_path, module):
mod_path = opj(addons_path, module)
manifest_path = module_manifest(mod_path)
if manifest_path:
with tools.file_open(manifest_path, 'r') as fd:
manifest_data = fd.read()
return ast.literal_eval(manifest_data)
def get_module_root(path):
"""
Get closest module's root beginning from path
# Given:
# /foo/bar/module_dir/static/src/...
get_module_root('/foo/bar/module_dir/static/')
# returns '/foo/bar/module_dir'
get_module_root('/foo/bar/module_dir/')
# returns '/foo/bar/module_dir'
get_module_root('/foo/bar')
# returns None
@param path: Path from which the lookup should start
@return: Module root path or None if not found
"""
while not module_manifest(path):
new_path = os.path.abspath(opj(path, os.pardir))
if path == new_path:
return None
path = new_path
return path
def load_information_from_description_file(module, mod_path=None):
"""
:param module: The name of the module (sale, purchase, ...)
:param mod_path: Physical path of module, if not providedThe name of the module (sale, purchase, ...)
"""
if not mod_path:
mod_path = get_module_path(module, downloaded=True)
manifest_file = module_manifest(mod_path)
if manifest_file:
# default values for descriptor
info = {
'application': False,
'author': 'Odoo S.A.',
'auto_install': False,
'category': 'Uncategorized',
'depends': [],
'description': '',
'icon': get_module_icon(module),
'installable': True,
'post_load': None,
'version': '1.0',
'web': False,
'sequence': 100,
'summary': '',
'website': '',
}
info.update(zip(
'depends data demo test init_xml update_xml demo_xml'.split(),
iter(list, None)))
f = tools.file_open(manifest_file, mode='rb')
try:
info.update(ast.literal_eval(pycompat.to_text(f.read())))
finally:
f.close()
if not info.get('description'):
readme_path = [opj(mod_path, x) for x in README
if os.path.isfile(opj(mod_path, x))]
if readme_path:
with tools.file_open(readme_path[0]) as fd:
info['description'] = fd.read()
if not info.get('license'):
info['license'] = 'LGPL-3'
_logger.warning("Missing `license` key in manifest for '%s', defaulting to LGPL-3", module)
# auto_install is either `False` (by default) in which case the module
# is opt-in, either a list of dependencies in which case the module is
# automatically installed if all dependencies are (special case: [] to
# always install the module), either `True` to auto-install the module
# in case all dependencies declared in `depends` are installed.
if isinstance(info['auto_install'], collections.abc.Iterable):
info['auto_install'] = set(info['auto_install'])
non_dependencies = info['auto_install'].difference(info['depends'])
assert not non_dependencies,\
"auto_install triggers must be dependencies, found " \
"non-dependencies [%s] for module %s" % (
', '.join(non_dependencies), module
)
elif info['auto_install']:
info['auto_install'] = set(info['depends'])
info['version'] = adapt_version(info['version'])
return info
_logger.debug('module %s: no manifest file found %s', module, MANIFEST_NAMES)
return {}
def load_openerp_module(module_name):
""" Load an OpenERP module, if not already loaded.
This loads the module and register all of its models, thanks to either
the MetaModel metaclass, or the explicit instantiation of the model.
This is also used to load server-wide module (i.e. it is also used
when there is no model to register).
"""
global loaded
if module_name in loaded:
return
try:
__import__('odoo.addons.' + module_name)
# Call the module's post-load hook. This can done before any model or
# data has been initialized. This is ok as the post-load hook is for
# server-wide (instead of registry-specific) functionalities.
info = load_information_from_description_file(module_name)
if info['post_load']:
getattr(sys.modules['odoo.addons.' + module_name], info['post_load'])()
except Exception as e:
msg = "Couldn't load module %s" % (module_name)
_logger.critical(msg)
_logger.critical(e)
raise
else:
loaded.append(module_name)
def get_modules():
"""Returns the list of module names
"""
def listdir(dir):
def clean(name):
name = os.path.basename(name)
if name[-4:] == '.zip':
name = name[:-4]
return name
def is_really_module(name):
for mname in MANIFEST_NAMES:
if os.path.isfile(opj(dir, name, mname)):
return True
return [
clean(it)
for it in os.listdir(dir)
if is_really_module(it)
]
plist = []
for ad in odoo.addons.__path__:
if not os.path.exists(ad):
_logger.warning("addons path does not exist: %s", ad)
continue
plist.extend(listdir(ad))
return list(set(plist))
def get_modules_with_version():
modules = get_modules()
res = dict.fromkeys(modules, adapt_version('1.0'))
for module in modules:
try:
info = load_information_from_description_file(module)
res[module] = info['version']
except Exception:
continue
return res
def adapt_version(version):
serie = release.major_version
if version == serie or not version.startswith(serie + '.'):
version = '%s.%s' % (serie, version)
return version
current_test = None
| 34.335484 | 15,966 |
7,816 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
""" Modules migration handling. """
from collections import defaultdict
import glob
import importlib.util
import logging
import os
from os.path import join as opj
from odoo.modules.module import get_resource_path
import odoo.release as release
import odoo.upgrade
from odoo.tools.parse_version import parse_version
_logger = logging.getLogger(__name__)
def load_script(path, module_name):
full_path = get_resource_path(*path.split(os.path.sep)) if not os.path.isabs(path) else path
spec = importlib.util.spec_from_file_location(module_name, full_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
class MigrationManager(object):
"""
This class manage the migration of modules
Migrations files must be python files containing a `migrate(cr, installed_version)`
function. These files must respect a directory tree structure: A 'migrations' folder
which contains a folder by version. Version can be 'module' version or 'server.module'
version (in this case, the files will only be processed by this version of the server).
Python file names must start by `pre-` or `post-` and will be executed, respectively,
before and after the module initialisation. `end-` scripts are run after all modules have
been updated.
A special folder named `0.0.0` can contain scripts that will be run on any version change.
In `pre` stage, `0.0.0` scripts are run first, while in `post` and `end`, they are run last.
Example:
<moduledir>
`-- migrations
|-- 1.0
| |-- pre-update_table_x.py
| |-- pre-update_table_y.py
| |-- post-create_plop_records.py
| |-- end-cleanup.py
| `-- README.txt # not processed
|-- 9.0.1.1 # processed only on a 9.0 server
| |-- pre-delete_table_z.py
| `-- post-clean-data.py
|-- 0.0.0
| `-- end-invariants.py # processed on all version update
`-- foo.py # not processed
"""
def __init__(self, cr, graph):
self.cr = cr
self.graph = graph
self.migrations = defaultdict(dict)
self._get_files()
def _get_files(self):
def _get_upgrade_path(pkg):
for path in odoo.upgrade.__path__:
upgrade_path = opj(path, pkg)
if os.path.exists(upgrade_path):
yield upgrade_path
def get_scripts(path):
if not path:
return {}
return {
version: glob.glob(opj(path, version, '*.py'))
for version in os.listdir(path)
if os.path.isdir(opj(path, version))
}
for pkg in self.graph:
if not (hasattr(pkg, 'update') or pkg.state == 'to upgrade' or
getattr(pkg, 'load_state', None) == 'to upgrade'):
continue
self.migrations[pkg.name] = {
'module': get_scripts(get_resource_path(pkg.name, 'migrations')),
'module_upgrades': get_scripts(get_resource_path(pkg.name, 'upgrades')),
}
scripts = defaultdict(list)
for p in _get_upgrade_path(pkg.name):
for v, s in get_scripts(p).items():
scripts[v].extend(s)
self.migrations[pkg.name]["upgrade"] = scripts
def migrate_module(self, pkg, stage):
assert stage in ('pre', 'post', 'end')
stageformat = {
'pre': '[>%s]',
'post': '[%s>]',
'end': '[$%s]',
}
state = pkg.state if stage in ('pre', 'post') else getattr(pkg, 'load_state', None)
if not (hasattr(pkg, 'update') or state == 'to upgrade') or state == 'to install':
return
def convert_version(version):
if version.count('.') >= 2:
return version # the version number already contains the server version
return "%s.%s" % (release.major_version, version)
def _get_migration_versions(pkg, stage):
versions = sorted({
ver
for lv in self.migrations[pkg.name].values()
for ver, lf in lv.items()
if lf
}, key=lambda k: parse_version(convert_version(k)))
if "0.0.0" in versions:
# reorder versions
versions.remove("0.0.0")
if stage == "pre":
versions.insert(0, "0.0.0")
else:
versions.append("0.0.0")
return versions
def _get_migration_files(pkg, version, stage):
""" return a list of migration script files
"""
m = self.migrations[pkg.name]
return sorted(
(
f
for k in m
for f in m[k].get(version, [])
if os.path.basename(f).startswith(f"{stage}-")
),
key=os.path.basename,
)
installed_version = getattr(pkg, 'load_version', pkg.installed_version) or ''
parsed_installed_version = parse_version(installed_version)
current_version = parse_version(convert_version(pkg.data['version']))
def compare(version):
if version == "0.0.0" and parsed_installed_version < current_version:
return True
full_version = convert_version(version)
majorless_version = (version != full_version)
if majorless_version:
# We should not re-execute major-less scripts when upgrading to new Odoo version
# a module in `9.0.2.0` should not re-execute a `2.0` script when upgrading to `10.0.2.0`.
# In which case we must compare just the module version
return parsed_installed_version[2:] < parse_version(full_version)[2:] <= current_version[2:]
return parsed_installed_version < parse_version(full_version) <= current_version
versions = _get_migration_versions(pkg, stage)
for version in versions:
if compare(version):
strfmt = {'addon': pkg.name,
'stage': stage,
'version': stageformat[stage] % version,
}
for pyfile in _get_migration_files(pkg, version, stage):
name, ext = os.path.splitext(os.path.basename(pyfile))
if ext.lower() != '.py':
continue
mod = None
try:
mod = load_script(pyfile, name)
_logger.info('module %(addon)s: Running migration %(version)s %(name)s' % dict(strfmt, name=mod.__name__))
migrate = mod.migrate
except ImportError:
_logger.exception('module %(addon)s: Unable to load %(stage)s-migration file %(file)s' % dict(strfmt, file=pyfile))
raise
except AttributeError:
_logger.error('module %(addon)s: Each %(stage)s-migration file must have a "migrate(cr, installed_version)" function' % strfmt)
else:
migrate(self.cr, installed_version)
finally:
if mod:
del mod
| 41.354497 | 7,816 |
1,031 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Bulgaria - Accounting',
'icon': '/l10n_bg/static/description/icon.png',
'version': '1.0',
'category': 'Accounting/Localizations/Account Charts',
'author': 'Odoo S.A.',
'description': """
Chart accounting and taxes for Bulgaria
""",
'depends': [
'account', 'base_vat', 'l10n_multilang',
],
'data': [
'data/account_chart_template_data.xml',
'data/account.account.template.csv',
'data/account.group.template.csv',
'data/l10n_bg_chart_data.xml',
'data/tax_report.xml',
'data/account_tax_group_data.xml',
'data/account_tax_template_data.xml',
"data/account_fiscal_position_template.xml",
'data/account_chart_template_configure_data.xml',
'data/menuitem.xml',
],
'demo': [
'demo/demo_company.xml',
],
'post_init_hook': 'load_translations',
'license': 'LGPL-3',
}
| 32.21875 | 1,031 |
725 |
py
|
PYTHON
|
15.0
|
{
'name': '2FA Invite mail',
'description': """
2FA Invite mail
===============
Allow the users to invite another user to use Two-Factor authentication
by sending an email to the target user. This email redirect him to :
- the users security settings if the user is internal.
- the portal security settings page if the user is not internal.
""",
'depends': ['auth_totp', 'mail'],
'category': 'Extra Tools',
'auto_install': True,
'data': [
'data/ir_action_data.xml',
'data/mail_template_data.xml',
'views/res_users_views.xml',
],
'assets': {
'web.assets_tests': [
'auth_totp_mail/static/tests/**/*',
],
},
'license': 'LGPL-3',
}
| 29 | 725 |
444 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests import tagged
from odoo.addons.auth_totp.tests.test_totp import TestTOTP
@tagged('post_install', '-at_install')
class TestTOTPInvite(TestTOTP):
def test_totp_administration(self):
self.start_tour('/web', 'totp_admin_invite', login='admin')
self.start_tour('/web', 'totp_admin_self_invite', login='admin')
| 34.153846 | 444 |
1,662 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import _, models
class Users(models.Model):
_inherit = 'res.users'
def action_open_my_account_settings(self):
action = {
"name": _("Account Security"),
"type": "ir.actions.act_window",
"res_model": "res.users",
"views": [[self.env.ref('auth_totp_mail.res_users_view_form').id, "form"]],
"res_id": self.id,
}
return action
def get_totp_invite_url(self):
return '/web#action=auth_totp_mail.action_activate_two_factor_authentication'
def action_totp_invite(self):
invite_template = self.env.ref('auth_totp_mail.mail_template_totp_invite')
users_to_invite = self.sudo().filtered(lambda user: not user.totp_secret)
for user in users_to_invite:
email_values = {
'email_from': self.env.user.email_formatted,
'author_id': self.env.user.partner_id.id,
}
invite_template.send_mail(user.id, force_send=True, email_values=email_values,
notif_layout='mail.mail_notification_light')
# Display a confirmation toaster
return {
'type': 'ir.actions.client',
'tag': 'display_notification',
'params': {
'type': 'info',
'sticky': False,
'message': _("Invitation to use two-factor authentication sent for the following user(s): %s",
', '.join(users_to_invite.mapped('name'))),
}
}
| 37.772727 | 1,662 |
840 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Signup',
'description': """
Allow users to sign up and reset their password
===============================================
""",
'version': '1.0',
'category': 'Hidden/Tools',
'auto_install': True,
'depends': [
'base_setup',
'mail',
'web',
],
'data': [
'data/ir_config_parameter_data.xml',
'data/ir_cron_data.xml',
'data/mail_template_data.xml',
'views/res_config_settings_views.xml',
'views/res_users_views.xml',
'views/auth_signup_login_templates.xml',
],
'bootstrap': True,
'assets': {
'web.assets_frontend': [
'auth_signup/static/**/*',
],
},
'license': 'LGPL-3',
}
| 25.454545 | 840 |
2,338 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from unittest.mock import patch
import odoo
from odoo.tests import HttpCase
from odoo import http
from odoo.exceptions import AccessError
class TestAuthSignupFlow(HttpCase):
def setUp(self):
super(TestAuthSignupFlow, self).setUp()
res_config = self.env['res.config.settings']
self.default_values = res_config.default_get(list(res_config.fields_get()))
def _activate_free_signup(self):
self.default_values.update({'auth_signup_uninvited': 'b2c'})
def _get_free_signup_url(self):
return '/web/signup'
def test_confirmation_mail_free_signup2(self):
"""
Check if a new user is informed by email when he is registered
"""
# Activate free signup
self._activate_free_signup()
# Get csrf_token
self.authenticate(None, None)
csrf_token = http.WebRequest.csrf_token(self)
# Values from login form
name = 'toto'
payload = {
'login': '[email protected]',
'name': name,
'password': 'mypassword',
'confirm_password': 'mypassword',
'csrf_token': csrf_token,
}
# Override unlink to not delete the email if the send works.
with patch.object(odoo.addons.mail.models.mail_mail.MailMail, 'unlink', lambda self: None):
# Call the controller
url_free_signup = self._get_free_signup_url()
self.url_open(url_free_signup, data=payload)
# Check if an email is sent to the new userw
new_user = self.env['res.users'].search([('name', '=', name)])
self.assertTrue(new_user)
mail = self.env['mail.message'].search([('message_type', '=', 'email'), ('model', '=', 'res.users'), ('res_id', '=', new_user.id)], limit=1)
self.assertTrue(mail, "The new user must be informed of his registration")
def test_compute_signup_url(self):
user = self.env.ref('base.user_demo')
user.groups_id -= self.env.ref('base.group_partner_manager')
partner = self.env.ref('base.partner_demo_portal')
partner.signup_prepare()
with self.assertRaises(AccessError):
partner.with_user(user.id).signup_url
| 35.969231 | 2,338 |
615 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
from odoo.http import request
class Http(models.AbstractModel):
_inherit = 'ir.http'
@classmethod
def _dispatch(cls):
# add signup token or login to the session if given
if 'auth_signup_token' in request.params:
request.session['auth_signup_token'] = request.params['auth_signup_token']
if 'auth_login' in request.params:
request.session['auth_login'] = request.params['auth_login']
return super(Http, cls)._dispatch()
| 32.368421 | 615 |
11,256 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from ast import literal_eval
from collections import defaultdict
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.osv import expression
from odoo.tools.misc import ustr
from odoo.addons.base.models.ir_mail_server import MailDeliveryException
from odoo.addons.auth_signup.models.res_partner import SignupError, now
_logger = logging.getLogger(__name__)
class ResUsers(models.Model):
_inherit = 'res.users'
state = fields.Selection(compute='_compute_state', search='_search_state', string='Status',
selection=[('new', 'Never Connected'), ('active', 'Confirmed')])
def _search_state(self, operator, value):
negative = operator in expression.NEGATIVE_TERM_OPERATORS
# In case we have no value
if not value:
return expression.TRUE_DOMAIN if negative else expression.FALSE_DOMAIN
if operator in ['in', 'not in']:
if len(value) > 1:
return expression.FALSE_DOMAIN if negative else expression.TRUE_DOMAIN
if value[0] == 'new':
comp = '!=' if negative else '='
if value[0] == 'active':
comp = '=' if negative else '!='
return [('log_ids', comp, False)]
if operator in ['=', '!=']:
# In case we search against anything else than new, we have to invert the operator
if value != 'new':
operator = expression.TERM_OPERATORS_NEGATION[operator]
return [('log_ids', operator, False)]
return expression.TRUE_DOMAIN
def _compute_state(self):
for user in self:
user.state = 'active' if user.login_date else 'new'
@api.model
def signup(self, values, token=None):
""" signup a user, to either:
- create a new user (no token), or
- create a user for a partner (with token, but no user for partner), or
- change the password of a user (with token, and existing user).
:param values: a dictionary with field values that are written on user
:param token: signup token (optional)
:return: (dbname, login, password) for the signed up user
"""
if token:
# signup with a token: find the corresponding partner id
partner = self.env['res.partner']._signup_retrieve_partner(token, check_validity=True, raise_exception=True)
# invalidate signup token
partner.write({'signup_token': False, 'signup_type': False, 'signup_expiration': False})
partner_user = partner.user_ids and partner.user_ids[0] or False
# avoid overwriting existing (presumably correct) values with geolocation data
if partner.country_id or partner.zip or partner.city:
values.pop('city', None)
values.pop('country_id', None)
if partner.lang:
values.pop('lang', None)
if partner_user:
# user exists, modify it according to values
values.pop('login', None)
values.pop('name', None)
partner_user.write(values)
if not partner_user.login_date:
partner_user._notify_inviter()
return (self.env.cr.dbname, partner_user.login, values.get('password'))
else:
# user does not exist: sign up invited user
values.update({
'name': partner.name,
'partner_id': partner.id,
'email': values.get('email') or values.get('login'),
})
if partner.company_id:
values['company_id'] = partner.company_id.id
values['company_ids'] = [(6, 0, [partner.company_id.id])]
partner_user = self._signup_create_user(values)
partner_user._notify_inviter()
else:
# no token, sign up an external user
values['email'] = values.get('email') or values.get('login')
self._signup_create_user(values)
return (self.env.cr.dbname, values.get('login'), values.get('password'))
@api.model
def _get_signup_invitation_scope(self):
return self.env['ir.config_parameter'].sudo().get_param('auth_signup.invitation_scope', 'b2b')
@api.model
def _signup_create_user(self, values):
""" signup a new user using the template user """
# check that uninvited users may sign up
if 'partner_id' not in values:
if self._get_signup_invitation_scope() != 'b2c':
raise SignupError(_('Signup is not allowed for uninvited users'))
return self._create_user_from_template(values)
def _notify_inviter(self):
for user in self:
invite_partner = user.create_uid.partner_id
if invite_partner:
# notify invite user that new user is connected
self.env['bus.bus']._sendone(invite_partner, 'res.users/connection', {
'username': user.name,
'partnerId': user.partner_id.id,
})
def _create_user_from_template(self, values):
template_user_id = literal_eval(self.env['ir.config_parameter'].sudo().get_param('base.template_portal_user_id', 'False'))
template_user = self.browse(template_user_id)
if not template_user.exists():
raise ValueError(_('Signup: invalid template user'))
if not values.get('login'):
raise ValueError(_('Signup: no login given for new user'))
if not values.get('partner_id') and not values.get('name'):
raise ValueError(_('Signup: no name or partner given for new user'))
# create a copy of the template user (attached to a specific partner_id if given)
values['active'] = True
try:
with self.env.cr.savepoint():
return template_user.with_context(no_reset_password=True).copy(values)
except Exception as e:
# copy may failed if asked login is not available.
raise SignupError(ustr(e))
def reset_password(self, login):
""" retrieve the user corresponding to login (login or email),
and reset their password
"""
users = self.search([('login', '=', login)])
if not users:
users = self.search([('email', '=', login)])
if len(users) != 1:
raise Exception(_('Reset password: invalid username or email'))
return users.action_reset_password()
def action_reset_password(self):
""" create signup token for each user, and send their signup url by email """
if self.env.context.get('install_mode', False):
return
if self.filtered(lambda user: not user.active):
raise UserError(_("You cannot perform this action on an archived user."))
# prepare reset password signup
create_mode = bool(self.env.context.get('create_user'))
# no time limit for initial invitation, only for reset password
expiration = False if create_mode else now(days=+1)
self.mapped('partner_id').signup_prepare(signup_type="reset", expiration=expiration)
# send email to users with their signup url
template = False
if create_mode:
try:
template = self.env.ref('auth_signup.set_password_email', raise_if_not_found=False)
except ValueError:
pass
if not template:
template = self.env.ref('auth_signup.reset_password_email')
assert template._name == 'mail.template'
email_values = {
'email_cc': False,
'auto_delete': True,
'recipient_ids': [],
'partner_ids': [],
'scheduled_date': False,
}
for user in self:
if not user.email:
raise UserError(_("Cannot send email: user %s has no email address.", user.name))
email_values['email_to'] = user.email
# TDE FIXME: make this template technical (qweb)
with self.env.cr.savepoint():
force_send = not(self.env.context.get('import_file', False))
template.send_mail(user.id, force_send=force_send, raise_exception=True, email_values=email_values)
_logger.info("Password reset email sent for user <%s> to <%s>", user.login, user.email)
def send_unregistered_user_reminder(self, after_days=5):
datetime_min = fields.Datetime.today() - relativedelta(days=after_days)
datetime_max = datetime_min + relativedelta(hours=23, minutes=59, seconds=59)
res_users_with_details = self.env['res.users'].search_read([
('share', '=', False),
('create_uid.email', '!=', False),
('create_date', '>=', datetime_min),
('create_date', '<=', datetime_max),
('log_ids', '=', False)], ['create_uid', 'name', 'login'])
# group by invited by
invited_users = defaultdict(list)
for user in res_users_with_details:
invited_users[user.get('create_uid')[0]].append("%s (%s)" % (user.get('name'), user.get('login')))
# For sending mail to all the invitors about their invited users
for user in invited_users:
template = self.env.ref('auth_signup.mail_template_data_unregistered_users').with_context(dbname=self._cr.dbname, invited_users=invited_users[user])
template.send_mail(user, notif_layout='mail.mail_notification_light', force_send=False)
@api.model
def web_create_users(self, emails):
inactive_users = self.search([('state', '=', 'new'), '|', ('login', 'in', emails), ('email', 'in', emails)])
new_emails = set(emails) - set(inactive_users.mapped('email'))
res = super(ResUsers, self).web_create_users(list(new_emails))
if inactive_users:
inactive_users.with_context(create_user=True).action_reset_password()
return res
@api.model_create_multi
def create(self, vals_list):
# overridden to automatically invite user to sign up
users = super(ResUsers, self).create(vals_list)
if not self.env.context.get('no_reset_password'):
users_with_email = users.filtered('email')
if users_with_email:
try:
users_with_email.with_context(create_user=True).action_reset_password()
except MailDeliveryException:
users_with_email.partner_id.with_context(create_user=True).signup_cancel()
return users
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
self.ensure_one()
sup = super(ResUsers, self)
if not default or not default.get('email'):
# avoid sending email to the user we are duplicating
sup = super(ResUsers, self.with_context(no_reset_password=True))
return sup.copy(default=default)
| 44.141176 | 11,256 |
1,186 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from ast import literal_eval
from odoo import api, fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
auth_signup_reset_password = fields.Boolean(string='Enable password reset from Login page', config_parameter='auth_signup.reset_password')
auth_signup_uninvited = fields.Selection([
('b2b', 'On invitation'),
('b2c', 'Free sign up'),
], string='Customer Account', default='b2b', config_parameter='auth_signup.invitation_scope')
auth_signup_template_user_id = fields.Many2one('res.users', string='Template user for new users created through signup',
config_parameter='base.template_portal_user_id')
def open_template_user(self):
action = self.env["ir.actions.actions"]._for_xml_id("base.action_res_users")
action['res_id'] = literal_eval(self.env['ir.config_parameter'].sudo().get_param('base.template_portal_user_id', 'False'))
action['views'] = [[self.env.ref('base.view_users_form').id, 'form']]
return action
| 49.416667 | 1,186 |
7,822 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import random
import werkzeug.urls
from collections import defaultdict
from datetime import datetime, timedelta
from odoo import api, exceptions, fields, models, _
class SignupError(Exception):
pass
def random_token():
# the token has an entropy of about 120 bits (6 bits/char * 20 chars)
chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
return ''.join(random.SystemRandom().choice(chars) for _ in range(20))
def now(**kwargs):
return datetime.now() + timedelta(**kwargs)
class ResPartner(models.Model):
_inherit = 'res.partner'
signup_token = fields.Char(copy=False, groups="base.group_erp_manager")
signup_type = fields.Char(string='Signup Token Type', copy=False, groups="base.group_erp_manager")
signup_expiration = fields.Datetime(copy=False, groups="base.group_erp_manager")
signup_valid = fields.Boolean(compute='_compute_signup_valid', string='Signup Token is Valid')
signup_url = fields.Char(compute='_compute_signup_url', string='Signup URL')
@api.depends('signup_token', 'signup_expiration')
def _compute_signup_valid(self):
dt = now()
for partner, partner_sudo in zip(self, self.sudo()):
partner.signup_valid = bool(partner_sudo.signup_token) and \
(not partner_sudo.signup_expiration or dt <= partner_sudo.signup_expiration)
def _compute_signup_url(self):
""" proxy for function field towards actual implementation """
result = self.sudo()._get_signup_url_for_action()
for partner in self:
if any(u.has_group('base.group_user') for u in partner.user_ids if u != self.env.user):
self.env['res.users'].check_access_rights('write')
if any(u.has_group('base.group_portal') for u in partner.user_ids if u != self.env.user):
self.env['res.partner'].check_access_rights('write')
partner.signup_url = result.get(partner.id, False)
def _get_signup_url_for_action(self, url=None, action=None, view_type=None, menu_id=None, res_id=None, model=None):
""" generate a signup url for the given partner ids and action, possibly overriding
the url state components (menu_id, id, view_type) """
res = dict.fromkeys(self.ids, False)
for partner in self:
base_url = partner.get_base_url()
# when required, make sure the partner has a valid signup token
if self.env.context.get('signup_valid') and not partner.user_ids:
partner.sudo().signup_prepare()
route = 'login'
# the parameters to encode for the query
query = dict(db=self.env.cr.dbname)
signup_type = self.env.context.get('signup_force_type_in_url', partner.sudo().signup_type or '')
if signup_type:
route = 'reset_password' if signup_type == 'reset' else signup_type
if partner.sudo().signup_token and signup_type:
query['token'] = partner.sudo().signup_token
elif partner.user_ids:
query['login'] = partner.user_ids[0].login
else:
continue # no signup token, no user, thus no signup url!
if url:
query['redirect'] = url
else:
fragment = dict()
base = '/web#'
if action == '/mail/view':
base = '/mail/view?'
elif action:
fragment['action'] = action
if view_type:
fragment['view_type'] = view_type
if menu_id:
fragment['menu_id'] = menu_id
if model:
fragment['model'] = model
if res_id:
fragment['res_id'] = res_id
if fragment:
query['redirect'] = base + werkzeug.urls.url_encode(fragment)
signup_url = "/web/%s?%s" % (route, werkzeug.urls.url_encode(query))
if not self.env.context.get('relative_url'):
signup_url = werkzeug.urls.url_join(base_url, signup_url)
res[partner.id] = signup_url
return res
def action_signup_prepare(self):
return self.signup_prepare()
def signup_get_auth_param(self):
""" Get a signup token related to the partner if signup is enabled.
If the partner already has a user, get the login parameter.
"""
if not self.env.user.has_group('base.group_user') and not self.env.is_admin():
raise exceptions.AccessDenied()
res = defaultdict(dict)
allow_signup = self.env['res.users']._get_signup_invitation_scope() == 'b2c'
for partner in self:
partner = partner.sudo()
if allow_signup and not partner.user_ids:
partner.signup_prepare()
res[partner.id]['auth_signup_token'] = partner.signup_token
elif partner.user_ids:
res[partner.id]['auth_login'] = partner.user_ids[0].login
return res
def signup_cancel(self):
return self.write({'signup_token': False, 'signup_type': False, 'signup_expiration': False})
def signup_prepare(self, signup_type="signup", expiration=False):
""" generate a new token for the partners with the given validity, if necessary
:param expiration: the expiration datetime of the token (string, optional)
"""
for partner in self:
if expiration or not partner.signup_valid:
token = random_token()
while self._signup_retrieve_partner(token):
token = random_token()
partner.write({'signup_token': token, 'signup_type': signup_type, 'signup_expiration': expiration})
return True
@api.model
def _signup_retrieve_partner(self, token, check_validity=False, raise_exception=False):
""" find the partner corresponding to a token, and possibly check its validity
:param token: the token to resolve
:param check_validity: if True, also check validity
:param raise_exception: if True, raise exception instead of returning False
:return: partner (browse record) or False (if raise_exception is False)
"""
partner = self.search([('signup_token', '=', token)], limit=1)
if not partner:
if raise_exception:
raise exceptions.UserError(_("Signup token '%s' is not valid", token))
return False
if check_validity and not partner.signup_valid:
if raise_exception:
raise exceptions.UserError(_("Signup token '%s' is no longer valid", token))
return False
return partner
@api.model
def signup_retrieve_info(self, token):
""" retrieve the user info about the token
:return: a dictionary with the user information:
- 'db': the name of the database
- 'token': the token, if token is valid
- 'name': the name of the partner, if token is valid
- 'login': the user login, if the user already exists
- 'email': the partner email, if the user does not exist
"""
partner = self._signup_retrieve_partner(token, raise_exception=True)
res = {'db': self.env.cr.dbname}
if partner.signup_valid:
res['token'] = token
res['name'] = partner.name
if partner.user_ids:
res['login'] = partner.user_ids[0].login
else:
res['email'] = res['login'] = partner.email or ''
return res
| 43.94382 | 7,822 |
7,166 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import werkzeug
from odoo import http, tools, _
from odoo.addons.auth_signup.models.res_users import SignupError
from odoo.addons.web.controllers.main import ensure_db, Home, SIGN_UP_REQUEST_PARAMS
from odoo.addons.base_setup.controllers.main import BaseSetup
from odoo.exceptions import UserError
from odoo.http import request
_logger = logging.getLogger(__name__)
class AuthSignupHome(Home):
@http.route()
def web_login(self, *args, **kw):
ensure_db()
response = super(AuthSignupHome, self).web_login(*args, **kw)
response.qcontext.update(self.get_auth_signup_config())
if request.httprequest.method == 'GET' and request.session.uid and request.params.get('redirect'):
# Redirect if already logged in and redirect param is present
return request.redirect(request.params.get('redirect'))
return response
@http.route('/web/signup', type='http', auth='public', website=True, sitemap=False)
def web_auth_signup(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('signup_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
self.do_signup(qcontext)
# Send an account creation confirmation email
User = request.env['res.users']
user_sudo = User.sudo().search(
User._get_login_domain(qcontext.get('login')), order=User._get_login_order(), limit=1
)
template = request.env.ref('auth_signup.mail_template_user_signup_account_created', raise_if_not_found=False)
if user_sudo and template:
template.sudo().send_mail(user_sudo.id, force_send=True)
return self.web_login(*args, **kw)
except UserError as e:
qcontext['error'] = e.args[0]
except (SignupError, AssertionError) as e:
if request.env["res.users"].sudo().search([("login", "=", qcontext.get("login"))]):
qcontext["error"] = _("Another user is already registered using this email address.")
else:
_logger.error("%s", e)
qcontext['error'] = _("Could not create a new account.")
response = request.render('auth_signup.signup', qcontext)
response.headers['X-Frame-Options'] = 'DENY'
return response
@http.route('/web/reset_password', type='http', auth='public', website=True, sitemap=False)
def web_auth_reset_password(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('reset_password_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
if qcontext.get('token'):
self.do_signup(qcontext)
return self.web_login(*args, **kw)
else:
login = qcontext.get('login')
assert login, _("No login provided.")
_logger.info(
"Password reset attempt for <%s> by user <%s> from %s",
login, request.env.user.login, request.httprequest.remote_addr)
request.env['res.users'].sudo().reset_password(login)
qcontext['message'] = _("An email has been sent with credentials to reset your password")
except UserError as e:
qcontext['error'] = e.args[0]
except SignupError:
qcontext['error'] = _("Could not reset your password")
_logger.exception('error when resetting password')
except Exception as e:
qcontext['error'] = str(e)
response = request.render('auth_signup.reset_password', qcontext)
response.headers['X-Frame-Options'] = 'DENY'
return response
def get_auth_signup_config(self):
"""retrieve the module config (which features are enabled) for the login page"""
get_param = request.env['ir.config_parameter'].sudo().get_param
return {
'disable_database_manager': not tools.config['list_db'],
'signup_enabled': request.env['res.users']._get_signup_invitation_scope() == 'b2c',
'reset_password_enabled': get_param('auth_signup.reset_password') == 'True',
}
def get_auth_signup_qcontext(self):
""" Shared helper returning the rendering context for signup and reset password """
qcontext = {k: v for (k, v) in request.params.items() if k in SIGN_UP_REQUEST_PARAMS}
qcontext.update(self.get_auth_signup_config())
if not qcontext.get('token') and request.session.get('auth_signup_token'):
qcontext['token'] = request.session.get('auth_signup_token')
if qcontext.get('token'):
try:
# retrieve the user info (name, login or email) corresponding to a signup token
token_infos = request.env['res.partner'].sudo().signup_retrieve_info(qcontext.get('token'))
for k, v in token_infos.items():
qcontext.setdefault(k, v)
except:
qcontext['error'] = _("Invalid signup token")
qcontext['invalid_token'] = True
return qcontext
def _prepare_signup_values(self, qcontext):
values = { key: qcontext.get(key) for key in ('login', 'name', 'password') }
if not values:
raise UserError(_("The form was not properly filled in."))
if values.get('password') != qcontext.get('confirm_password'):
raise UserError(_("Passwords do not match; please retype them."))
supported_lang_codes = [code for code, _ in request.env['res.lang'].get_installed()]
lang = request.context.get('lang', '')
if lang in supported_lang_codes:
values['lang'] = lang
return values
def do_signup(self, qcontext):
""" Shared helper that creates a res.partner out of a token """
values = self._prepare_signup_values(qcontext)
self._signup_with_values(qcontext.get('token'), values)
request.env.cr.commit()
def _signup_with_values(self, token, values):
db, login, password = request.env['res.users'].sudo().signup(values, token)
request.env.cr.commit() # as authenticate will use its own cursor we need to commit the current transaction
uid = request.session.authenticate(db, login, password)
if not uid:
raise SignupError(_('Authentication Failed.'))
class AuthBaseSetup(BaseSetup):
@http.route('/base_setup/data', type='json', auth='user')
def base_setup_data(self, **kwargs):
res = super().base_setup_data(**kwargs)
res.update({'resend_invitation': True})
return res
| 48.09396 | 7,166 |
1,255 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) 2016 Onestein (<http://www.onestein.eu>).
{
'name': 'Netherlands - Accounting',
'version': '3.0',
'category': 'Accounting/Localizations/Account Charts',
'author': 'Onestein',
'website': 'http://www.onestein.eu',
'depends': [
'account',
'base_iban',
'base_vat',
'base_address_extended',
],
'data': [
'data/account_account_tag.xml',
'data/account_chart_template.xml',
'data/account.account.template.csv',
'data/account_chart_template_post_data.xml',
'data/account_tax_group_data.xml',
'data/account_tax_report_data.xml',
'data/account_tax_template.xml',
'data/account_fiscal_position_template.xml',
'data/account_fiscal_position_tax_template.xml',
'data/account_fiscal_position_account_template.xml',
'data/account_chart_template_data.xml',
'data/menuitem.xml',
'views/res_partner_views.xml',
'views/res_company_views.xml',
],
'demo': [
'demo/demo_company.xml',
],
'auto_install': False,
'installable': True,
'license': 'LGPL-3',
}
| 31.375 | 1,255 |
265 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
import odoo
def migrate(cr, version):
registry = odoo.registry(cr.dbname)
from odoo.addons.account.models.chart_template import migrate_set_tags_and_taxes_updatable
migrate_set_tags_and_taxes_updatable(cr, registry, 'l10n_nl')
| 33.125 | 265 |
697 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo import api, fields, models, _
class AccountJournal(models.Model):
_inherit = 'account.journal'
@api.model
def _prepare_liquidity_account_vals(self, company, code, vals):
# OVERRIDE
account_vals = super()._prepare_liquidity_account_vals(company, code, vals)
if company.account_fiscal_country_id.code == 'NL':
# Ensure the newly liquidity accounts have the right account tag in order to be part
# of the Dutch financial reports.
account_vals.setdefault('tag_ids', [])
account_vals['tag_ids'].append((4, self.env.ref('l10n_nl.account_tag_25').id))
return account_vals
| 34.85 | 697 |
272 |
py
|
PYTHON
|
15.0
|
# coding: utf-8
from odoo import fields, models
class ResCompany(models.Model):
_inherit = 'res.company'
l10n_nl_kvk = fields.Char(related='partner_id.l10n_nl_kvk', readonly=False)
l10n_nl_oin = fields.Char(related='partner_id.l10n_nl_oin', readonly=False)
| 30.222222 | 272 |
1,447 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo import api, Command, models, _
class AccountChartTemplate(models.Model):
_inherit = 'account.chart.template'
def _load(self, sale_tax_rate, purchase_tax_rate, company):
# Add tag to 999999 account
res = super(AccountChartTemplate, self)._load(sale_tax_rate, purchase_tax_rate, company)
if company.account_fiscal_country_id.code == 'NL':
account = self.env['account.account'].search([('code', '=', '999999'), ('company_id', '=', self.env.company.id)])
if account:
account.tag_ids = [(4, self.env.ref('l10n_nl.account_tag_12').id)]
return res
@api.model
def _prepare_transfer_account_for_direct_creation(self, name, company):
res = super(AccountChartTemplate, self)._prepare_transfer_account_for_direct_creation(name, company)
if company.account_fiscal_country_id.code == 'NL':
xml_id = self.env.ref('l10n_nl.account_tag_25').id
res.setdefault('tag_ids', [])
res['tag_ids'].append((4, xml_id))
return res
@api.model
def _create_liquidity_journal_suspense_account(self, company, code_digits):
account = super()._create_liquidity_journal_suspense_account(company, code_digits)
if company.account_fiscal_country_id.code == 'NL':
account.tag_ids = [Command.link(self.env.ref('l10n_nl.account_tag_25').id)]
return account
| 45.21875 | 1,447 |
237 |
py
|
PYTHON
|
15.0
|
# coding: utf-8
from odoo import fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
l10n_nl_kvk = fields.Char(string='KVK-nummer')
l10n_nl_oin = fields.Char(string='Organisatie Indentificatie Nummer')
| 26.333333 | 237 |
1,251 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) 2014 InnOpen Group Kft (<http://www.innopen.eu>).
# Copyright (C) 2021 Odoo S.A.
{
'name': 'Hungarian - Accounting',
'version': '3.0',
'category': 'Accounting/Localizations/Account Charts',
'author': 'Odoo S.A.',
'description': """
Base module for Hungarian localization
==========================================
This module consists of:
- Generic Hungarian chart of accounts
- Hungarian taxes
- Hungarian Bank information
""",
'depends': [
'account'
],
'data': [
'data/l10n_hu_chart_data.xml',
'data/account.account.template.csv',
'data/account.group.template.csv',
'data/account.tax.group.csv',
'data/account_tax_report_data.xml',
'data/account_tax_template_data.xml',
'data/account.fiscal.position.template.csv',
'data/account.fiscal.position.tax.template.csv',
'data/res.bank.csv',
'data/account_chart_template_data.xml',
'data/account_chart_template_configure_data.xml',
'data/menuitem_data.xml',
],
'demo': [
'demo/demo_company.xml',
],
'license': 'LGPL-3',
}
| 28.431818 | 1,251 |
1,767 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Author: Goran Kliska
# mail: goran.kliska(AT)slobodni-programi.hr
# Copyright (C) 2011- Slobodni programi d.o.o., Zagreb
# Contributions:
# Tomislav Bošnjaković, Storm Computers d.o.o. :
# - account types
{
"name": "Croatia - Accounting (RRIF 2012)",
"description": """
Croatian localisation.
======================
Author: Goran Kliska, Slobodni programi d.o.o., Zagreb
https://www.slobodni-programi.hr
Contributions:
Tomislav Bošnjaković, Storm Computers: tipovi konta
Ivan Vađić, Slobodni programi: tipovi konta
Description:
Croatian Chart of Accounts (RRIF ver.2012)
RRIF-ov računski plan za poduzetnike za 2012.
Vrste konta
Kontni plan prema RRIF-u, dorađen u smislu kraćenja naziva i dodavanja analitika
Porezne grupe prema poreznoj prijavi
Porezi PDV obrasca
Ostali porezi
Osnovne fiskalne pozicije
Izvori podataka:
https://www.rrif.hr/dok/preuzimanje/rrif-rp2011.rar
https://www.rrif.hr/dok/preuzimanje/rrif-rp2012.rar
""",
"version": "13.0",
"author": "OpenERP Croatian Community",
'category': 'Accounting/Localizations/Account Charts',
'depends': [
'account',
],
'data': [
'data/l10n_hr_chart_data.xml',
'data/account.account.type.csv',
'data/account.account.template.csv',
'data/account_chart_tag_data.xml',
'data/account.tax.group.csv',
'data/account_tax_report_data.xml',
'data/account_tax_template_data.xml',
'data/account_tax_fiscal_position_data.xml',
'data/account_chart_template_data.xml',
],
'demo': [
'demo/demo_company.xml',
],
'license': 'LGPL-3',
}
| 27.904762 | 1,758 |
265 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
import odoo
def migrate(cr, version):
registry = odoo.registry(cr.dbname)
from odoo.addons.account.models.chart_template import migrate_set_tags_and_taxes_updatable
migrate_set_tags_and_taxes_updatable(cr, registry, 'l10n_hr')
| 33.125 | 265 |
561 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Accounting/Fleet bridge',
'category': 'Accounting/Accounting',
'summary': 'Manage accounting with fleets',
'description': "",
'version': '1.0',
'depends': ['fleet', 'account'],
'data': [
'data/fleet_service_type_data.xml',
'views/account_move_views.xml',
'views/fleet_vehicle_views.xml',
],
'installable': True,
'auto_install': True,
'application': False,
'license': 'LGPL-3',
}
| 29.526316 | 561 |
2,189 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields, api, _
class AccountMove(models.Model):
_inherit = 'account.move'
def _post(self, soft=True):
vendor_bill_service = self.env.ref('account_fleet.data_fleet_service_type_vendor_bill', raise_if_not_found=False)
if not vendor_bill_service:
return super()._post(soft)
val_list = []
log_list = []
not_posted_before = self.filtered(lambda r: not r.posted_before)
posted = super()._post(soft) # We need the move name to be set, but we also need to know which move are posted for the first time.
for line in (not_posted_before & posted).line_ids.filtered(lambda ml: ml.vehicle_id and ml.move_id.move_type == 'in_invoice'):
val = line._prepare_fleet_log_service()
log = _('Service Vendor Bill: <a href=# data-oe-model=account.move data-oe-id={move_id}>{move_name}</a>').format(
move_id=line.move_id.id,
move_name=line.move_id.name,
)
val_list.append(val)
log_list.append(log)
log_service_ids = self.env['fleet.vehicle.log.services'].create(val_list)
for log_service_id, log in zip(log_service_ids, log_list):
log_service_id.message_post(body=log)
return posted
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
vehicle_id = fields.Many2one('fleet.vehicle', string='Vehicle', index=True)
need_vehicle = fields.Boolean(compute='_compute_need_vehicle',
help="Technical field to decide whether the vehicle_id field is editable")
def _compute_need_vehicle(self):
self.need_vehicle = False
def _prepare_fleet_log_service(self):
vendor_bill_service = self.env.ref('account_fleet.data_fleet_service_type_vendor_bill', raise_if_not_found=False)
return {
'service_type_id': vendor_bill_service.id,
'vehicle_id': self.vehicle_id.id,
'amount': self.price_subtotal,
'vendor_id': self.partner_id.id,
'description': self.name,
}
| 42.921569 | 2,189 |
1,764 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import Command, models, fields
class FleetVehicle(models.Model):
_inherit = 'fleet.vehicle'
bill_count = fields.Integer(compute='_compute_move_ids', string="Bills Count")
account_move_ids = fields.One2many('account.move', compute='_compute_move_ids')
def _compute_move_ids(self):
if not self.env.user.has_group('account.group_account_readonly'):
self.account_move_ids = False
self.bill_count = 0
return
moves = self.env['account.move.line'].read_group(
domain=[
('vehicle_id', 'in', self.ids),
('parent_state', '!=', 'cancel'),
('move_id.move_type', 'in', self.env['account.move'].get_purchase_types())
],
fields=['vehicle_id', 'move_id:array_agg'],
groupby=['vehicle_id'],
)
vehicle_move_mapping = {move['vehicle_id'][0]: set(move['move_id']) for move in moves}
for vehicle in self:
vehicle.account_move_ids = [Command.set(vehicle_move_mapping.get(vehicle.id, []))]
vehicle.bill_count = len(vehicle.account_move_ids)
def action_view_bills(self):
self.ensure_one()
form_view_ref = self.env.ref('account.view_move_form', False)
tree_view_ref = self.env.ref('account_fleet.account_move_view_tree', False)
result = self.env['ir.actions.act_window']._for_xml_id('account.action_move_in_invoice_type')
result.update({
'domain': [('id', 'in', self.account_move_ids.ids)],
'views': [(tree_view_ref.id, 'tree'), (form_view_ref.id, 'form')],
})
return result
| 40.090909 | 1,764 |
749 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Irreversible Lock Date',
'version': '1.0',
'category': 'Accounting/Accounting',
'description': """
Make the lock date irreversible:
* You cannot set stricter restrictions on advisors than on users. Therefore, the All Users Lock Date must be anterior (or equal) to the Invoice/Bills Lock Date.
* You cannot lock a period that has not yet ended. Therefore, the All Users Lock Date must be anterior (or equal) to the last day of the previous month.
* Any new All Users Lock Date must be posterior (or equal) to the previous one.
""",
'depends': ['account'],
'data': [],
'license': 'LGPL-3',
}
| 44.058824 | 749 |
5,023 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
import calendar
from dateutil.relativedelta import relativedelta
from odoo import fields, models, api, _
from odoo.exceptions import UserError
class ResCompany(models.Model):
_inherit = 'res.company'
def _autorise_lock_date_changes(self, vals):
'''Check the lock dates for the current companies. This can't be done in a api.constrains because we need
to perform some comparison between new/old values. This method forces the lock dates to be irreversible.
* You cannot set stricter restrictions on advisors than on users.
Therefore, the All Users Lock Date must be anterior (or equal) to the Invoice/Bills Lock Date.
* You cannot lock a period that has not yet ended.
Therefore, the All Users Lock Date must be anterior (or equal) to the last day of the previous month.
* Any new All Users Lock Date must be posterior (or equal) to the previous one.
* You cannot delete a tax lock date, lock a period that is not finished yet or the tax lock date must be set after
the last day of the previous month.
:param vals: The values passed to the write method.
'''
period_lock_date = vals.get('period_lock_date') and fields.Date.from_string(vals['period_lock_date'])
fiscalyear_lock_date = vals.get('fiscalyear_lock_date') and fields.Date.from_string(vals['fiscalyear_lock_date'])
tax_lock_date = vals.get('tax_lock_date') and fields.Date.from_string(vals['tax_lock_date'])
previous_month = fields.Date.today() + relativedelta(months=-1)
days_previous_month = calendar.monthrange(previous_month.year, previous_month.month)
previous_month = previous_month.replace(day=days_previous_month[1])
for company in self:
old_fiscalyear_lock_date = company.fiscalyear_lock_date
old_period_lock_date = company.period_lock_date
old_tax_lock_date = company.tax_lock_date
# The user attempts to remove the tax lock date
if old_tax_lock_date and not tax_lock_date and 'tax_lock_date' in vals:
raise UserError(_('The tax lock date is irreversible and can\'t be removed.'))
# The user attempts to set a tax lock date prior to the previous one
if old_tax_lock_date and tax_lock_date and tax_lock_date < old_tax_lock_date:
raise UserError(_('The new tax lock date must be set after the previous lock date.'))
# In case of no new tax lock date in vals, fallback to the oldest
tax_lock_date = tax_lock_date or old_tax_lock_date
# The user attempts to set a tax lock date prior to the last day of previous month
if tax_lock_date and tax_lock_date > previous_month:
raise UserError(_('You cannot lock a period that has not yet ended. Therefore, the tax lock date must be anterior (or equal) to the last day of the previous month.'))
# The user attempts to remove the lock date for advisors
if old_fiscalyear_lock_date and not fiscalyear_lock_date and 'fiscalyear_lock_date' in vals:
raise UserError(_('The lock date for advisors is irreversible and can\'t be removed.'))
# The user attempts to set a lock date for advisors prior to the previous one
if old_fiscalyear_lock_date and fiscalyear_lock_date and fiscalyear_lock_date < old_fiscalyear_lock_date:
raise UserError(_('Any new All Users Lock Date must be posterior (or equal) to the previous one.'))
# In case of no new fiscal year in vals, fallback to the oldest
fiscalyear_lock_date = fiscalyear_lock_date or old_fiscalyear_lock_date
if not fiscalyear_lock_date:
continue
# The user attempts to set a lock date for advisors prior to the last day of previous month
if fiscalyear_lock_date > previous_month:
raise UserError(_('You cannot lock a period that has not yet ended. Therefore, the All Users Lock Date must be anterior (or equal) to the last day of the previous month.'))
# In case of no new period lock date in vals, fallback to the one defined in the company
period_lock_date = period_lock_date or old_period_lock_date
if not period_lock_date:
continue
# The user attempts to set a lock date for advisors prior to the lock date for users
if period_lock_date < fiscalyear_lock_date:
raise UserError(_('You cannot set stricter restrictions on advisors than on users. Therefore, the All Users Lock Date must be anterior (or equal) to the Invoice/Bills Lock Date.'))
def write(self, vals):
# fiscalyear_lock_date can't be set to a prior date
if 'fiscalyear_lock_date' in vals or 'period_lock_date' in vals or 'tax_lock_date' in vals:
self._autorise_lock_date_changes(vals)
return super(ResCompany, self).write(vals)
| 62.012346 | 5,023 |
1,226 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Live Event Tracks',
'category': 'Marketing/Events',
'sequence': 1006,
'version': '1.0',
'summary': 'Support live tracks: streaming, participation, youtube',
'website': 'https://www.odoo.com/app/events',
'description': "",
'depends': [
'website_event_track',
],
'data': [
'views/event_track_templates_list.xml',
'views/event_track_templates_page.xml',
'views/event_track_views.xml',
],
'demo': [
'data/event_track_demo.xml'
],
'application': False,
'installable': True,
'assets': {
'web.assets_frontend': [
'website_event_track_live/static/src/scss/website_event_track_live.scss',
'website_event_track_live/static/src/js/website_event_track_replay_suggestion.js',
'website_event_track_live/static/src/js/website_event_track_suggestion.js',
'website_event_track_live/static/src/js/website_event_track_live.js',
],
'web.assets_qweb': [
'website_event_track_live/static/src/xml/**/*',
],
},
'license': 'LGPL-3',
}
| 32.263158 | 1,226 |
2,210 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
from odoo import api, fields, models
class Track(models.Model):
_inherit = 'event.track'
youtube_video_url = fields.Char('Youtube Video URL',
help="Configure this URL so that event attendees can see your Track in video!")
youtube_video_id = fields.Char('Youtube video ID', compute='_compute_youtube_video_id',
help="Extracted from the video URL and used to infer various links (embed/thumbnail/...)")
is_youtube_replay = fields.Boolean('Is Youtube Replay',
help="Check this option if the video is already available on Youtube to avoid showing 'Direct' options (Chat, ...)")
is_youtube_chat_available = fields.Boolean('Is Chat Available', compute='_compute_is_youtube_chat_available')
@api.depends('youtube_video_url')
def _compute_youtube_video_id(self):
for track in self:
if track.youtube_video_url:
regex = r'^.*(youtu.be\/|v\/|u\/\w\/|embed\/|watch\?v=|&v=)([^#&?]*).*'
match = re.match(regex, track.youtube_video_url)
if match and len(match.groups()) == 2 and len(match.group(2)) == 11:
track.youtube_video_id = match.group(2)
if not track.youtube_video_id:
track.youtube_video_id = False
@api.depends('youtube_video_id', 'is_youtube_replay', 'date_end', 'is_track_done')
def _compute_website_image_url(self):
youtube_thumbnail_tracks = self.filtered(lambda track: not track.website_image and track.youtube_video_id)
super(Track, self - youtube_thumbnail_tracks)._compute_website_image_url()
for track in youtube_thumbnail_tracks:
track.website_image_url = f'https://img.youtube.com/vi/{track.youtube_video_id}/maxresdefault.jpg'
@api.depends('youtube_video_url', 'is_youtube_replay', 'date', 'date_end', 'is_track_upcoming', 'is_track_live')
def _compute_is_youtube_chat_available(self):
for track in self:
track.is_youtube_chat_available = track.youtube_video_url and not track.is_youtube_replay and (track.is_track_soon or track.is_track_live)
| 52.619048 | 2,210 |
1,077 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
from odoo.addons.website_event_track.controllers.event_track import EventTrackController
from odoo.http import request
class WebsiteEventSessionLiveController(EventTrackController):
def _event_track_page_get_values(self, event, track, **options):
if 'widescreen' not in options:
options['widescreen'] = track.youtube_video_url and (track.is_youtube_replay or track.is_track_soon or track.is_track_live or track.is_track_done)
values = super(WebsiteEventSessionLiveController, self)._event_track_page_get_values(event, track, **options)
# Youtube disables the chat embed on all mobile devices
# This regex is a naive attempt at matching their behavior (should work for most cases)
values['is_mobile_chat_disabled'] = bool(re.match(
r'^.*(Android|iPad|iPhone).*',
request.httprequest.headers.get('User-Agent', request.httprequest.headers.get('user-agent', ''))))
return values
| 51.285714 | 1,077 |
1,469 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import http
from odoo.addons.website_event_track.controllers.event_track import EventTrackController
from odoo.osv import expression
class EventTrackLiveController(EventTrackController):
@http.route('/event_track/get_track_suggestion', type='json', auth='public', website=True)
def get_next_track_suggestion(self, track_id):
track = self._fetch_track(track_id)
track_suggestion = track._get_track_suggestions(
restrict_domain=expression.AND([
self._get_event_tracks_domain(track.event_id),
[('youtube_video_url', '!=', False)]
]), limit=1)
if not track_suggestion:
return False
track_suggestion_sudo = track_suggestion.sudo()
track_sudo = track.sudo()
return self._prepare_track_suggestion_values(track_sudo, track_suggestion_sudo)
def _prepare_track_suggestion_values(self, track, track_suggestion):
return {
'current_track': {
'name': track.name,
'website_image_url': track.website_image_url,
},
'suggestion': {
'id': track_suggestion.id,
'name': track_suggestion.name,
'speaker_name': track_suggestion.partner_name,
'website_url': track_suggestion.website_url
}
}
| 39.702703 | 1,469 |
422 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
"name": "Fetchmail Gmail",
"version": "1.0",
"category": "Hidden",
"description": "Google authentication for incoming mail server",
"depends": [
"google_gmail",
"fetchmail",
],
"data": ["views/fetchmail_server_views.xml"],
"auto_install": True,
"license": "LGPL-3",
}
| 26.375 | 422 |
1,714 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models, _
from odoo.exceptions import UserError
class FetchmailServer(models.Model):
_name = 'fetchmail.server'
_inherit = ['fetchmail.server', 'google.gmail.mixin']
@api.constrains('use_google_gmail_service', 'server_type')
def _check_use_google_gmail_service(self):
if any(server.use_google_gmail_service and server.server_type != 'imap' for server in self):
raise UserError(_('Gmail authentication only supports IMAP server type.'))
@api.onchange('use_google_gmail_service')
def _onchange_use_google_gmail_service(self):
"""Set the default configuration for a IMAP Gmail server."""
if self.use_google_gmail_service:
self.server = 'imap.gmail.com'
self.server_type = 'imap'
self.is_ssl = True
self.port = 993
else:
self.google_gmail_authorization_code = False
self.google_gmail_refresh_token = False
self.google_gmail_access_token = False
self.google_gmail_access_token_expiration = False
def _imap_login(self, connection):
"""Authenticate the IMAP connection.
If the mail server is Gmail, we use the OAuth2 authentication protocol.
"""
self.ensure_one()
if self.use_google_gmail_service:
auth_string = self._generate_oauth2_string(self.user, self.google_gmail_refresh_token)
connection.authenticate('XOAUTH2', lambda x: auth_string)
connection.select('INBOX')
else:
super(FetchmailServer, self)._imap_login(connection)
| 40.809524 | 1,714 |
2,583 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Advanced Events',
'category': 'Marketing',
'summary': 'Sponsors, Tracks, Agenda, Event News',
'version': '1.3',
'description': "",
'depends': ['website_event'],
'data': [
'security/ir.model.access.csv',
'security/event_track_security.xml',
'data/event_data.xml',
'data/mail_data.xml',
'data/mail_template_data.xml',
'data/event_track_data.xml',
'views/mail_templates.xml',
'views/event_templates.xml',
'views/event_track_templates_agenda.xml',
'views/event_track_templates_list.xml',
'views/event_track_templates_reminder.xml',
'views/event_track_templates_page.xml',
'views/event_track_templates_proposal.xml',
'views/website_templates.xml',
'views/event_track_views.xml',
'views/event_track_location_views.xml',
'views/event_track_tag_views.xml',
'views/event_track_stage_views.xml',
'views/event_track_visitor_views.xml',
'views/event_event_views.xml',
'views/event_type_views.xml',
'views/res_config_settings_view.xml',
'views/website_visitor_views.xml',
'views/event_menus.xml',
],
'demo': [
'data/event_demo.xml',
'data/event_track_location_demo.xml',
'data/event_track_tag_demo.xml',
'data/event_track_demo.xml',
'data/event_track_demo_description.xml',
'data/event_track_visitor_demo.xml',
],
'assets': {
'web.assets_frontend': [
'website_event_track/static/src/scss/event_track_templates.scss',
'website_event_track/static/src/scss/event_track_templates_online.scss',
'website_event_track/static/src/scss/pwa_frontend.scss',
'website_event_track/static/src/js/website_event_track.js',
'website_event_track/static/src/js/website_event_track_proposal_form.js',
'website_event_track/static/src/js/website_event_track_proposal_form_tags.js',
'website_event_track/static/src/js/event_track_reminder.js',
'website_event_track/static/src/js/event_track_timer.js',
'website_event_track/static/src/js/website_event_pwa_widget.js',
'website_event_track/static/lib/idb-keyval/idb-keyval.js',
],
'web.assets_qweb': [
'website_event_track/static/src/xml/event_track_proposal_templates.xml',
],
},
'license': 'LGPL-3',
}
| 41 | 2,583 |
3,584 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta
from odoo import fields
from odoo.addons.website_event.tests.common import TestWebsiteEventCommon
from odoo.tests.common import users
class TestEventWebsiteTrack(TestWebsiteEventCommon):
def _get_menus(self):
return super(TestEventWebsiteTrack, self)._get_menus() | set(['Talks', 'Agenda', 'Talk Proposals'])
@users('user_eventmanager')
def test_create_menu(self):
vals = {
'name': 'TestEvent',
'date_begin': fields.Datetime.to_string(datetime.today() + timedelta(days=1)),
'date_end': fields.Datetime.to_string(datetime.today() + timedelta(days=15)),
'registration_ids': [(0, 0, {
'partner_id': self.user_eventuser.partner_id.id,
'name': 'test_reg',
})],
'website_menu': True,
'community_menu': True,
'website_track': True,
'website_track_proposal': True,
}
event = self.env['event.event'].create(vals)
self._assert_website_menus(event)
event.write({
'website_track': False,
'website_track_proposal': False,
})
self._assert_website_menus(event, ['Introduction', 'Location', 'Register', 'Community'], menus_out=['Talks', 'Agenda', 'Talk Proposals'])
@users('user_event_web_manager')
def test_menu_management_frontend(self):
vals = {
'name': 'TestEvent',
'date_begin': fields.Datetime.to_string(datetime.today() + timedelta(days=1)),
'date_end': fields.Datetime.to_string(datetime.today() + timedelta(days=15)),
'website_menu': True,
'community_menu': True,
'website_track': True,
'website_track_proposal': True,
}
event = self.env['event.event'].create(vals)
self.assertTrue(event.website_track)
self.assertTrue(event.website_track_proposal)
self._assert_website_menus(event, self._get_menus())
introduction_menu = event.menu_id.child_id.filtered(lambda menu: menu.name == 'Introduction')
introduction_menu.unlink()
self._assert_website_menus(event, ['Location', 'Register', 'Community', 'Talks', 'Agenda', 'Talk Proposals'], menus_out=["Introduction"])
menus = event.menu_id.child_id.filtered(lambda menu: menu.name in ['Agenda', 'Talk Proposals'])
menus.unlink()
self.assertTrue(event.website_track)
self.assertFalse(event.website_track_proposal)
menus = event.menu_id.child_id.filtered(lambda menu: menu.name in ['Talks'])
menus.unlink()
self.assertFalse(event.website_track)
self.assertFalse(event.website_track_proposal)
self._assert_website_menus(event, ['Location', 'Register', 'Community'], menus_out=["Introduction", "Talks", "Agenda", "Talk Proposals"])
event.write({'website_track_proposal': True})
self.assertFalse(event.website_track)
self.assertTrue(event.website_track_proposal)
self._assert_website_menus(event, ['Location', 'Register', 'Community', 'Talk Proposals'], menus_out=["Introduction", "Talks", "Agenda"])
event.write({'website_track': True})
self.assertTrue(event.website_track)
self.assertTrue(event.website_track_proposal)
self._assert_website_menus(event, ['Location', 'Register', 'Community', 'Talks', 'Agenda', 'Talk Proposals'], menus_out=["Introduction"])
| 44.8 | 3,584 |
9,115 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta
from unittest.mock import patch
from odoo import fields
from odoo.addons.website.models.website_visitor import WebsiteVisitor
from odoo.addons.website_event.tests.common import TestEventOnlineCommon
from odoo.tests.common import users
class TestTrackData(TestEventOnlineCommon):
@users('user_eventmanager')
def test_track_partner_sync(self):
""" Test registration computed fields about partner """
test_email = '"Nibbler In Space" <[email protected]>'
test_phone = '0456001122'
test_bio = '<p>UserInput</p>'
# test_bio_void = '<p><br/></p>'
event = self.env['event.event'].browse(self.event_0.ids)
customer = self.env['res.partner'].browse(self.event_customer.id)
# take all from partner
new_track = self.env['event.track'].create({
'event_id': event.id,
'name': 'Mega Track',
'partner_id': customer.id,
})
self.assertEqual(new_track.partner_id, customer)
self.assertEqual(new_track.partner_name, customer.name)
self.assertEqual(new_track.partner_email, customer.email)
self.assertEqual(new_track.partner_phone, customer.phone)
self.assertEqual(new_track.partner_biography, customer.website_description)
self.assertIn(customer.name, new_track.partner_biography, 'Low-level test: ensure correctly updated')
# partial update
new_track = self.env['event.track'].create({
'event_id': event.id,
'name': 'Mega Track',
'partner_id': customer.id,
'partner_name': 'Nibbler In Space',
'partner_email': test_email,
})
self.assertEqual(new_track.partner_id, customer)
self.assertEqual(
new_track.partner_name, 'Nibbler In Space',
'Track should take user input over computed partner value')
self.assertEqual(
new_track.partner_email, test_email,
'Track should take user input over computed partner value')
self.assertEqual(
new_track.partner_phone, customer.phone,
'Track should take partner value if not user input')
# already filled information should not be updated
new_track = self.env['event.track'].create({
'event_id': event.id,
'name': 'Mega Track',
'partner_name': 'Nibbler In Space',
'partner_phone': test_phone,
'partner_biography': test_bio,
})
self.assertEqual(new_track.partner_name, 'Nibbler In Space')
self.assertEqual(new_track.partner_email, False)
self.assertEqual(new_track.partner_phone, test_phone)
self.assertEqual(new_track.partner_biography, test_bio)
new_track.write({'partner_id': customer.id})
self.assertEqual(new_track.partner_id, customer)
self.assertEqual(
new_track.partner_name, 'Nibbler In Space',
'Track customer should not take over existing value')
self.assertEqual(
new_track.partner_email, customer.email,
'Track customer should take over empty value')
self.assertEqual(
new_track.partner_phone, test_phone,
'Track customer should not take over existing value')
# contacts fields should be updated with track customer
new_track = self.env['event.track'].create({
'event_id': event.id,
'name': 'Mega Track',
'contact_phone': test_phone,
})
self.assertEqual(new_track.contact_email, False)
self.assertEqual(new_track.contact_phone, test_phone)
new_track.write({'partner_id': customer.id})
self.assertEqual(new_track.partner_id, customer)
self.assertEqual(
new_track.contact_email, customer.email,
'Track customer should take over empty contact email value')
self.assertEqual(
new_track.contact_phone, customer.phone,
'Track customer should take over existing contact phone value')
class TestTrackSuggestions(TestEventOnlineCommon):
def test_track_suggestion(self):
[location_1, location_2] = self.env['event.track.location'].create([
{'name': 'Location 1'},
{'name': 'Location 2'},
])
[tag_1, tag_2, tag_3, tag_4] = self.env['event.track.tag'].create([
{'name': 'Tag 1'}, {'name': 'Tag 2'}, {'name': 'Tag 3'}, {'name': 'Tag 4'}
])
date = fields.Datetime.from_string(datetime.now().strftime('%Y-%m-%d %H:00:00'))
[track_1, track_2, track_3, track_4, track_5, track_6] = self.env['event.track'].create([{
'name': 'Track 1',
'location_id': location_1.id,
'event_id': self.event_0.id,
'tag_ids': [(4, tag_1.id), (4, tag_2.id)],
'date': date + timedelta(hours=-1),
}, {
'name': 'Track 2',
'location_id': location_2.id,
'event_id': self.event_0.id,
'date': date,
}, {
'name': 'Track 3',
'location_id': location_2.id,
'event_id': self.event_0.id,
'tag_ids': [(4, tag_1.id), (4, tag_3.id), (4, tag_4.id)],
'date': date,
}, {
'name': 'Track 4',
'event_id': self.event_0.id,
'tag_ids': [(4, tag_1.id), (4, tag_2.id)],
'date': date,
}, {
'name': 'Track 5',
'event_id': self.event_0.id,
'tag_ids': [(4, tag_1.id), (4, tag_3.id)],
'wishlisted_by_default': True,
'date': date,
}, {
'name': 'Track 6',
'location_id': location_1.id,
'event_id': self.event_0.id,
'tag_ids': [(4, tag_1.id), (4, tag_3.id)],
'date': date,
}])
emp_visitor = self.env['website.visitor'].create({
'name': 'Visitor',
'partner_id': self.user_employee.partner_id.id
})
visitor_track = self.env['event.track.visitor'].create({
'visitor_id': emp_visitor.id,
'track_id': track_3.id,
'is_wishlisted': True,
})
with patch.object(WebsiteVisitor, '_get_visitor_from_request', lambda *args, **kwargs: emp_visitor), \
self.with_user('user_employee'):
current_track = self.env['event.track'].browse(track_1.id)
all_suggestions = current_track._get_track_suggestions()
self.assertEqual(
all_suggestions.ids,
(track_3 + track_5 + track_4 + track_6 + track_2).ids # whlst / wishlst def / tags count / location
)
track_suggestion = current_track._get_track_suggestions(limit=1)
self.assertEqual(track_suggestion, track_3,
'Returned track should be the manually wishlisted one')
# remove wishlist, keynote should be top
visitor_track.unlink()
track_suggestion = current_track._get_track_suggestions(limit=1)
self.assertEqual(
track_suggestion, track_5,
'Returned track should be the default wishlisted one')
# toggle wishlisted by default off through blacklist
track_5_visitor = self.env['event.track.visitor'].sudo().create({
'visitor_id': emp_visitor.id,
'track_id': track_5.id,
'is_blacklisted': True,
})
track_suggestion = current_track._get_track_suggestions(limit=1)
self.assertEqual(
track_suggestion, track_4,
'Returned track should the one with the most common tags as keynote is blacklisted')
track_5_visitor.unlink()
# remove keynote default, now based on tags
track_5.write({'wishlisted_by_default': False})
# all_suggestions.invalidate_cache(fnames=['is_reminder_on'])
track_suggestion = current_track._get_track_suggestions(limit=1)
self.assertEqual(
track_suggestion, track_4,
'Returned track should the one with the most common tags')
# remove tags, now based on location
all_suggestions.sudo().write({'tag_ids': [(5,)]})
track_suggestion = current_track._get_track_suggestions(limit=1)
self.assertEqual(
track_suggestion, track_6,
'Returned track should the one with matching location')
# remove location, now based o random
all_suggestions.sudo().write({'location_id': False})
track_suggestion = current_track._get_track_suggestions(limit=1)
self.assertTrue(
track_suggestion in [track_2, track_3, track_4, track_5, track_6],
"Returned track should the a random one (but not the one we're trying to get suggestion for)")
| 43.61244 | 9,115 |
818 |
py
|
PYTHON
|
15.0
|
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# -*- coding: utf-8 -*-
from odoo.tests import tagged
from odoo.tests.common import TransactionCase
@tagged('post_install', '-at_install')
class TestWebsiteEvent(TransactionCase):
def test_event_app_name(self):
website0 = self.env['website'].create({'name': 'Foo'})
self.assertEqual(website0.events_app_name, 'Foo Events')
website1 = self.env['website'].create({'name': 'Foo', 'events_app_name': 'Bar Events'})
self.assertEqual(website1.events_app_name, 'Bar Events')
website2 = self.env['website'].create({'name': 'Foo'})
self.assertEqual(website2.events_app_name, 'Foo Events')
website2.write({'name': 'Bar'})
self.assertEqual(website2.events_app_name, 'Foo Events')
| 38.952381 | 818 |
2,960 |
py
|
PYTHON
|
15.0
|
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# -*- coding: utf-8 -*-
from odoo.tests import tagged
from odoo.tests.common import TransactionCase
from odoo import tools
from io import BytesIO
from PIL import Image
import base64
@tagged('post_install', '-at_install')
class TestWebsite(TransactionCase):
def test_event_app_name(self):
website0 = self.env['website'].create({'name': 'Foo'})
self.assertEqual(website0.events_app_name, 'Foo Events')
website1 = self.env['website'].create({'name': 'Foo', 'events_app_name': 'Bar Events'})
self.assertEqual(website1.events_app_name, 'Bar Events')
website2 = self.env['website'].create({'name': 'Foo'})
self.assertEqual(website2.events_app_name, 'Foo Events')
website2.write({'name': 'Bar'})
self.assertEqual(website2.events_app_name, 'Foo Events')
def test_compute_app_icon(self):
# Generate image data for JPEG
jpeg_image = Image.new('RGB', (60, 30), color=(73, 109, 137))
jpeg_io = BytesIO()
jpeg_image.save(jpeg_io, format='JPEG')
jpeg_image_data = jpeg_io.getvalue()
# Generate image data for JPG
jpg_image = Image.new('RGB', (60, 30), color=(73, 109, 137))
jpg_io = BytesIO()
jpg_image.save(jpg_io, format='JPEG')
jpg_image_data = jpg_io.getvalue()
# Generate image data for PNG
png_image = Image.new('RGB', (60, 30), color=(73, 109, 137))
png_io = BytesIO()
png_image.save(png_io, format='PNG')
png_image_data = png_io.getvalue()
# Generate image data for SVG
svg_image_data = b"""<svg xmlns="http://www.w3.org/2000/svg" width="60" height="30" version="1.1">
<rect width="100%" height="100%" fill="rgb(73, 109, 137)"/>
</svg>
"""
# Image data and their respective expected types
image_data = {
'png': png_image_data,
'jpg': jpg_image_data,
'jpeg': jpeg_image_data,
'svg': svg_image_data
}
for expected_type, image_data in image_data.items():
# Create a website record
website = self.env['website'].create({
'name': 'Test Website',
'favicon': base64.b64encode(image_data)
})
# Call the method to compute app_icon
website._compute_app_icon()
if expected_type in ['jpeg', 'png', 'jpg']:
# Check if app_icon is set
self.assertTrue(website.app_icon)
# Check if app_icon is a valid image
image = tools.base64_to_image(website.app_icon)
self.assertEqual(image.format.lower(), 'png')
else:
# For SVG images, ensure that the app_icon is not set
self.assertFalse(website.app_icon)
| 37.948718 | 2,960 |
297 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class TrackLocation(models.Model):
_name = "event.track.location"
_description = 'Event Track Location'
name = fields.Char('Location', required=True)
| 27 | 297 |
2,323 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import _, api, fields, models
class TrackStage(models.Model):
_name = 'event.track.stage'
_description = 'Event Track Stage'
_order = 'sequence, id'
name = fields.Char(string='Stage Name', required=True, translate=True)
sequence = fields.Integer(string='Sequence', default=1)
mail_template_id = fields.Many2one(
'mail.template', string='Email Template',
domain=[('model', '=', 'event.track')],
help="If set an email will be sent to the customer when the track reaches this step.")
# legends
color = fields.Integer(string='Color')
description = fields.Text(string='Description', translate=True)
legend_blocked = fields.Char('Red Kanban Label', default=lambda s: _('Blocked'), translate=True)
legend_done = fields.Char('Green Kanban Label', default=lambda s: _('Ready for Next Stage'), translate=True)
legend_normal = fields.Char('Grey Kanban Label', default=lambda s: _('In Progress'), translate=True)
# pipe
fold = fields.Boolean(
string='Folded in Kanban',
help='This stage is folded in the kanban view when there are no records in that stage to display.')
is_visible_in_agenda = fields.Boolean(
string='Visible in agenda', compute='_compute_is_visible_in_agenda', store=True,
help='If checked, the related tracks will be visible in the frontend.')
is_fully_accessible = fields.Boolean(
string='Fully accessible', compute='_compute_is_fully_accessible', store=True,
help='If checked, automatically publish tracks so that access links to customers are provided.')
is_cancel = fields.Boolean(string='Canceled Stage')
@api.depends('is_cancel', 'is_fully_accessible')
def _compute_is_visible_in_agenda(self):
for record in self:
if record.is_cancel:
record.is_visible_in_agenda = False
elif record.is_fully_accessible:
record.is_visible_in_agenda = True
@api.depends('is_cancel', 'is_visible_in_agenda')
def _compute_is_fully_accessible(self):
for record in self:
if record.is_cancel or not record.is_visible_in_agenda:
record.is_fully_accessible = False
| 48.395833 | 2,323 |
837 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class EventType(models.Model):
_inherit = 'event.type'
website_track = fields.Boolean(
string='Tracks on Website', compute='_compute_website_track_menu_data',
readonly=False, store=True)
website_track_proposal = fields.Boolean(
string='Tracks Proposals on Website', compute='_compute_website_track_menu_data',
readonly=False, store=True)
@api.depends('website_menu')
def _compute_website_track_menu_data(self):
""" Simply activate or de-activate all menus at once. """
for event_type in self:
event_type.website_track = event_type.website_menu
event_type.website_track_proposal = event_type.website_menu
| 38.045455 | 837 |
1,247 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class WebsiteMenu(models.Model):
_inherit = "website.menu"
def unlink(self):
""" Override to synchronize event configuration fields with menu deletion.
This should be cleaned in upcoming versions. """
event_updates = {}
website_event_menus = self.env['website.event.menu'].search([('menu_id', 'in', self.ids)])
for event_menu in website_event_menus:
to_update = event_updates.setdefault(event_menu.event_id, list())
# specifically check for /track in menu URL; to avoid unchecking track field when removing
# agenda page that has also menu_type='track'
if event_menu.menu_type == 'track' and '/track' in event_menu.menu_id.url:
to_update.append('website_track')
# call super that resumes the unlink of menus entries (including website event menus)
res = super(WebsiteMenu, self).unlink()
# update events
for event, to_update in event_updates.items():
if to_update:
event.write(dict((fname, False) for fname in to_update))
return res
| 41.566667 | 1,247 |
914 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from random import randint
from odoo import fields, models
class TrackTag(models.Model):
_name = "event.track.tag"
_description = 'Event Track Tag'
_order = "category_id, sequence, name"
def _default_color(self):
return randint(1, 11)
name = fields.Char('Tag Name', required=True)
track_ids = fields.Many2many('event.track', string='Tracks')
color = fields.Integer(
string='Color Index', default=lambda self: self._default_color(),
help="Note that colorless tags won't be available on the website.")
sequence = fields.Integer('Sequence', default=10)
category_id = fields.Many2one('event.track.tag.category', string="Category", ondelete="set null")
_sql_constraints = [
('name_uniq', 'unique (name)', "Tag name already exists !"),
]
| 33.851852 | 914 |
4,949 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.addons.http_routing.models.ir_http import slug
class Event(models.Model):
_inherit = "event.event"
track_ids = fields.One2many('event.track', 'event_id', 'Tracks')
track_count = fields.Integer('Track Count', compute='_compute_track_count')
website_track = fields.Boolean(
'Tracks on Website', compute='_compute_website_track',
readonly=False, store=True)
website_track_proposal = fields.Boolean(
'Proposals on Website', compute='_compute_website_track_proposal',
readonly=False, store=True)
track_menu_ids = fields.One2many('website.event.menu', 'event_id', string='Event Tracks Menus', domain=[('menu_type', '=', 'track')])
track_proposal_menu_ids = fields.One2many('website.event.menu', 'event_id', string='Event Proposals Menus', domain=[('menu_type', '=', 'track_proposal')])
allowed_track_tag_ids = fields.Many2many('event.track.tag', relation='event_allowed_track_tags_rel', string='Available Track Tags')
tracks_tag_ids = fields.Many2many(
'event.track.tag', relation='event_track_tags_rel', string='Track Tags',
compute='_compute_tracks_tag_ids', store=True)
def _compute_track_count(self):
data = self.env['event.track'].read_group([('stage_id.is_cancel', '!=', True)], ['event_id'], ['event_id'])
result = dict((data['event_id'][0], data['event_id_count']) for data in data)
for event in self:
event.track_count = result.get(event.id, 0)
@api.depends('event_type_id', 'website_menu')
def _compute_website_track(self):
""" Propagate event_type configuration (only at change); otherwise propagate
website_menu updated value. Also force True is track_proposal changes. """
for event in self:
if event.event_type_id and event.event_type_id != event._origin.event_type_id:
event.website_track = event.event_type_id.website_track
elif event.website_menu and (event.website_menu != event._origin.website_menu or not event.website_track):
event.website_track = True
elif not event.website_menu:
event.website_track = False
@api.depends('event_type_id', 'website_track')
def _compute_website_track_proposal(self):
""" Propagate event_type configuration (only at change); otherwise propagate
website_track updated value (both together True or False at update). """
for event in self:
if event.event_type_id and event.event_type_id != event._origin.event_type_id:
event.website_track_proposal = event.event_type_id.website_track_proposal
elif event.website_track != event._origin.website_track or not event.website_track or not event.website_track_proposal:
event.website_track_proposal = event.website_track
@api.depends('track_ids.tag_ids', 'track_ids.tag_ids.color')
def _compute_tracks_tag_ids(self):
for event in self:
event.tracks_tag_ids = event.track_ids.mapped('tag_ids').filtered(lambda tag: tag.color != 0).ids
# ------------------------------------------------------------
# WEBSITE MENU MANAGEMENT
# ------------------------------------------------------------
def toggle_website_track(self, val):
self.website_track = val
def toggle_website_track_proposal(self, val):
self.website_track_proposal = val
def _get_menu_update_fields(self):
return super(Event, self)._get_menu_update_fields() + ['website_track', 'website_track_proposal']
def _update_website_menus(self, menus_update_by_field=None):
super(Event, self)._update_website_menus(menus_update_by_field=menus_update_by_field)
for event in self:
if event.menu_id and (not menus_update_by_field or event in menus_update_by_field.get('website_track')):
event._update_website_menu_entry('website_track', 'track_menu_ids', 'track')
if event.menu_id and (not menus_update_by_field or event in menus_update_by_field.get('website_track_proposal')):
event._update_website_menu_entry('website_track_proposal', 'track_proposal_menu_ids', 'track_proposal')
def _get_menu_type_field_matching(self):
res = super(Event, self)._get_menu_type_field_matching()
res['track_proposal'] = 'website_track_proposal'
return res
def _get_website_menu_entries(self):
self.ensure_one()
return super(Event, self)._get_website_menu_entries() + [
(_('Talks'), '/event/%s/track' % slug(self), False, 10, 'track'),
(_('Agenda'), '/event/%s/agenda' % slug(self), False, 70, 'track'),
(_('Talk Proposals'), '/event/%s/track_proposal' % slug(self), False, 15, 'track_proposal')
]
| 54.384615 | 4,949 |
406 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class EventMenu(models.Model):
_inherit = "website.event.menu"
menu_type = fields.Selection(
selection_add=[('track', 'Event Tracks Menus'), ('track_proposal', 'Event Proposals Menus')],
ondelete={'track': 'cascade', 'track_proposal': 'cascade'})
| 33.833333 | 406 |
327 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
events_app_name = fields.Char('Events App Name', related='website_id.events_app_name', readonly=False)
| 32.7 | 327 |
28,058 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import timedelta
from pytz import utc
from random import randint
from odoo import api, fields, models
from odoo.addons.http_routing.models.ir_http import slug
from odoo.osv import expression
from odoo.tools.mail import is_html_empty
from odoo.tools.translate import _, html_translate
class Track(models.Model):
_name = "event.track"
_description = 'Event Track'
_order = 'priority, date'
_inherit = ['mail.thread', 'mail.activity.mixin', 'website.seo.metadata', 'website.published.mixin']
@api.model
def _get_default_stage_id(self):
return self.env['event.track.stage'].search([], limit=1).id
# description
name = fields.Char('Title', required=True, translate=True)
event_id = fields.Many2one('event.event', 'Event', required=True)
active = fields.Boolean(default=True)
user_id = fields.Many2one('res.users', 'Responsible', tracking=True, default=lambda self: self.env.user)
company_id = fields.Many2one('res.company', related='event_id.company_id')
tag_ids = fields.Many2many('event.track.tag', string='Tags')
description = fields.Html(translate=html_translate, sanitize_attributes=False, sanitize_form=False)
color = fields.Integer('Color')
priority = fields.Selection([
('0', 'Low'), ('1', 'Medium'),
('2', 'High'), ('3', 'Highest')],
'Priority', required=True, default='1')
# management
stage_id = fields.Many2one(
'event.track.stage', string='Stage', ondelete='restrict',
index=True, copy=False, default=_get_default_stage_id,
group_expand='_read_group_stage_ids',
required=True, tracking=True)
legend_blocked = fields.Char(related='stage_id.legend_blocked',
string='Kanban Blocked Explanation', readonly=True)
legend_done = fields.Char(related='stage_id.legend_done',
string='Kanban Valid Explanation', readonly=True)
legend_normal = fields.Char(related='stage_id.legend_normal',
string='Kanban Ongoing Explanation', readonly=True)
kanban_state = fields.Selection([
('normal', 'Grey'),
('done', 'Green'),
('blocked', 'Red')], string='Kanban State',
copy=False, default='normal', required=True,
help="A track's kanban state indicates special situations affecting it:\n"
" * Grey is the default situation\n"
" * Red indicates something is preventing the progress of this track\n"
" * Green indicates the track is ready to be pulled to the next stage")
kanban_state_label = fields.Char(
string='Kanban State Label', compute='_compute_kanban_state_label', store=True,
tracking=True)
partner_id = fields.Many2one('res.partner', 'Contact', help="Contact of the track, may be different from speaker.")
# speaker information
partner_name = fields.Char(
string='Name', compute='_compute_partner_name',
readonly=False, store=True, tracking=10,
help='Speaker name is used for public display and may vary from contact name')
partner_email = fields.Char(
string='Email', compute='_compute_partner_email',
readonly=False, store=True, tracking=20,
help='Speaker email is used for public display and may vary from contact email')
partner_phone = fields.Char(
string='Phone', compute='_compute_partner_phone',
readonly=False, store=True, tracking=30,
help='Speaker phone is used for public display and may vary from contact phone')
partner_biography = fields.Html(
string='Biography', compute='_compute_partner_biography',
sanitize_attributes=False,
readonly=False, store=True)
partner_function = fields.Char(
'Job Position', compute='_compute_partner_function',
store=True, readonly=False)
partner_company_name = fields.Char(
'Company Name', compute='_compute_partner_company_name',
readonly=False, store=True)
partner_tag_line = fields.Char(
'Tag Line', compute='_compute_partner_tag_line',
help='Description of the partner (name, function and company name)')
image = fields.Image(
string="Speaker Photo", compute="_compute_partner_image",
readonly=False, store=True,
max_width=256, max_height=256)
# contact information
contact_email = fields.Char(
string='Contact Email', compute='_compute_contact_email',
readonly=False, store=True, tracking=20,
help="Contact email is private and used internally")
contact_phone = fields.Char(
string='Contact Phone', compute='_compute_contact_phone',
readonly=False, store=True, tracking=30,
help="Contact phone is private and used internally")
location_id = fields.Many2one('event.track.location', 'Location')
# time information
date = fields.Datetime('Track Date')
date_end = fields.Datetime('Track End Date', compute='_compute_end_date', store=True)
duration = fields.Float('Duration', default=0.5, help="Track duration in hours.")
is_track_live = fields.Boolean(
'Is Track Live', compute='_compute_track_time_data',
help="Track has started and is ongoing")
is_track_soon = fields.Boolean(
'Is Track Soon', compute='_compute_track_time_data',
help="Track begins soon")
is_track_today = fields.Boolean(
'Is Track Today', compute='_compute_track_time_data',
help="Track begins today")
is_track_upcoming = fields.Boolean(
'Is Track Upcoming', compute='_compute_track_time_data',
help="Track is not yet started")
is_track_done = fields.Boolean(
'Is Track Done', compute='_compute_track_time_data',
help="Track is finished")
track_start_remaining = fields.Integer(
'Minutes before track starts', compute='_compute_track_time_data',
help="Remaining time before track starts (seconds)")
track_start_relative = fields.Integer(
'Minutes compare to track start', compute='_compute_track_time_data',
help="Relative time compared to track start (seconds)")
# frontend description
website_image = fields.Image(string="Website Image", max_width=1024, max_height=1024)
website_image_url = fields.Char(
string='Image URL', compute='_compute_website_image_url',
compute_sudo=True, store=False)
# wishlist / visitors management
event_track_visitor_ids = fields.One2many(
'event.track.visitor', 'track_id', string="Track Visitors",
groups="event.group_event_user")
is_reminder_on = fields.Boolean('Is Reminder On', compute='_compute_is_reminder_on')
wishlist_visitor_ids = fields.Many2many(
'website.visitor', string="Visitor Wishlist",
compute="_compute_wishlist_visitor_ids", compute_sudo=True,
search="_search_wishlist_visitor_ids",
groups="event.group_event_user")
wishlist_visitor_count = fields.Integer(
string="# Wishlisted",
compute="_compute_wishlist_visitor_ids", compute_sudo=True,
groups="event.group_event_user")
wishlisted_by_default = fields.Boolean(
string='Always Wishlisted',
help="""If set, the talk will be set as favorite for each attendee registered to the event.""")
# Call to action
website_cta = fields.Boolean('Magic Button',
help="Display a Call to Action button to your Attendees while they watch your Track.")
website_cta_title = fields.Char('Button Title')
website_cta_url = fields.Char('Button Target URL')
website_cta_delay = fields.Integer('Button appears')
# time information for CTA
is_website_cta_live = fields.Boolean(
'Is CTA Live', compute='_compute_cta_time_data',
help="CTA button is available")
website_cta_start_remaining = fields.Integer(
'Minutes before CTA starts', compute='_compute_cta_time_data',
help="Remaining time before CTA starts (seconds)")
@api.depends('name')
def _compute_website_url(self):
super(Track, self)._compute_website_url()
for track in self:
if track.id:
track.website_url = '/event/%s/track/%s' % (slug(track.event_id), slug(track))
# STAGES
@api.depends('stage_id', 'kanban_state')
def _compute_kanban_state_label(self):
for track in self:
if track.kanban_state == 'normal':
track.kanban_state_label = track.stage_id.legend_normal
elif track.kanban_state == 'blocked':
track.kanban_state_label = track.stage_id.legend_blocked
else:
track.kanban_state_label = track.stage_id.legend_done
# SPEAKER
@api.depends('partner_id')
def _compute_partner_name(self):
for track in self:
if track.partner_id and not track.partner_name:
track.partner_name = track.partner_id.name
@api.depends('partner_id')
def _compute_partner_email(self):
for track in self:
if track.partner_id and not track.partner_email:
track.partner_email = track.partner_id.email
@api.depends('partner_id')
def _compute_partner_phone(self):
for track in self:
if track.partner_id and not track.partner_phone:
track.partner_phone = track.partner_id.phone
@api.depends('partner_id')
def _compute_partner_biography(self):
for track in self:
if not track.partner_biography:
track.partner_biography = track.partner_id.website_description
elif track.partner_id and is_html_empty(track.partner_biography) and \
not is_html_empty(track.partner_id.website_description):
track.partner_biography = track.partner_id.website_description
@api.depends('partner_id')
def _compute_partner_function(self):
for track in self:
if track.partner_id and not track.partner_function:
track.partner_function = track.partner_id.function
@api.depends('partner_id', 'partner_id.company_type')
def _compute_partner_company_name(self):
for track in self:
if track.partner_id.company_type == 'company':
track.partner_company_name = track.partner_id.name
elif not track.partner_company_name:
track.partner_company_name = track.partner_id.parent_id.name
@api.depends('partner_name', 'partner_function', 'partner_company_name')
def _compute_partner_tag_line(self):
for track in self:
if not track.partner_name:
track.partner_tag_line = False
continue
tag_line = track.partner_name
if track.partner_function:
if track.partner_company_name:
tag_line = _('%(name)s, %(function)s at %(company)s',
name=track.partner_name,
function=track.partner_function,
company=track.partner_company_name
)
else:
tag_line = '%s, %s' % (track.partner_name, track.partner_function)
elif track.partner_company_name:
tag_line = _('%(name)s from %(company)s',
name=tag_line,
company=track.partner_company_name
)
track.partner_tag_line = tag_line
@api.depends('partner_id')
def _compute_partner_image(self):
for track in self:
if not track.image:
track.image = track.partner_id.image_256
# CONTACT
@api.depends('partner_id', 'partner_id.email')
def _compute_contact_email(self):
for track in self:
if track.partner_id:
track.contact_email = track.partner_id.email
@api.depends('partner_id', 'partner_id.phone')
def _compute_contact_phone(self):
for track in self:
if track.partner_id:
track.contact_phone = track.partner_id.phone
# TIME
@api.depends('date', 'duration')
def _compute_end_date(self):
for track in self:
if track.date:
delta = timedelta(minutes=60 * track.duration)
track.date_end = track.date + delta
else:
track.date_end = False
# FRONTEND DESCRIPTION
@api.depends('image', 'partner_id.image_256')
def _compute_website_image_url(self):
for track in self:
if track.website_image:
track.website_image_url = self.env['website'].image_url(track, 'website_image', size=1024)
else:
track.website_image_url = '/website_event_track/static/src/img/event_track_default_%d.jpeg' % (track.id % 2)
# WISHLIST / VISITOR MANAGEMENT
@api.depends('wishlisted_by_default', 'event_track_visitor_ids.visitor_id',
'event_track_visitor_ids.partner_id', 'event_track_visitor_ids.is_wishlisted',
'event_track_visitor_ids.is_blacklisted')
@api.depends_context('uid')
def _compute_is_reminder_on(self):
current_visitor = self.env['website.visitor']._get_visitor_from_request(force_create=False)
if self.env.user._is_public() and not current_visitor:
for track in self:
track.is_reminder_on = track.wishlisted_by_default
else:
if self.env.user._is_public():
domain = [('visitor_id', '=', current_visitor.id)]
elif current_visitor:
domain = [
'|',
('partner_id', '=', self.env.user.partner_id.id),
('visitor_id', '=', current_visitor.id)
]
else:
domain = [('partner_id', '=', self.env.user.partner_id.id)]
event_track_visitors = self.env['event.track.visitor'].sudo().search_read(
expression.AND([
domain,
[('track_id', 'in', self.ids)]
]), fields=['track_id', 'is_wishlisted', 'is_blacklisted']
)
wishlist_map = {
track_visitor['track_id'][0]: {
'is_wishlisted': track_visitor['is_wishlisted'],
'is_blacklisted': track_visitor['is_blacklisted']
} for track_visitor in event_track_visitors
}
for track in self:
if wishlist_map.get(track.id):
track.is_reminder_on = wishlist_map.get(track.id)['is_wishlisted'] or (track.wishlisted_by_default and not wishlist_map[track.id]['is_blacklisted'])
else:
track.is_reminder_on = track.wishlisted_by_default
@api.depends('event_track_visitor_ids.visitor_id', 'event_track_visitor_ids.is_wishlisted')
def _compute_wishlist_visitor_ids(self):
results = self.env['event.track.visitor'].read_group(
[('track_id', 'in', self.ids), ('is_wishlisted', '=', True)],
['track_id', 'visitor_id:array_agg'],
['track_id']
)
visitor_ids_map = {result['track_id'][0]: result['visitor_id'] for result in results}
for track in self:
track.wishlist_visitor_ids = visitor_ids_map.get(track.id, [])
track.wishlist_visitor_count = len(visitor_ids_map.get(track.id, []))
def _search_wishlist_visitor_ids(self, operator, operand):
if operator == "not in":
raise NotImplementedError("Unsupported 'Not In' operation on track wishlist visitors")
track_visitors = self.env['event.track.visitor'].sudo().search([
('visitor_id', operator, operand),
('is_wishlisted', '=', True)
])
return [('id', 'in', track_visitors.track_id.ids)]
# TIME
@api.depends('date', 'date_end')
def _compute_track_time_data(self):
""" Compute start and remaining time for track itself. Do everything in
UTC as we compute only time deltas here. """
now_utc = utc.localize(fields.Datetime.now().replace(microsecond=0))
for track in self:
if not track.date:
track.is_track_live = track.is_track_soon = track.is_track_today = track.is_track_upcoming = track.is_track_done = False
track.track_start_relative = track.track_start_remaining = 0
continue
date_begin_utc = utc.localize(track.date, is_dst=False)
date_end_utc = utc.localize(track.date_end, is_dst=False)
track.is_track_live = date_begin_utc <= now_utc < date_end_utc
track.is_track_soon = (date_begin_utc - now_utc).total_seconds() < 30*60 if date_begin_utc > now_utc else False
track.is_track_today = date_begin_utc.date() == now_utc.date()
track.is_track_upcoming = date_begin_utc > now_utc
track.is_track_done = date_end_utc <= now_utc
if date_begin_utc >= now_utc:
track.track_start_relative = int((date_begin_utc - now_utc).total_seconds())
track.track_start_remaining = track.track_start_relative
else:
track.track_start_relative = int((now_utc - date_begin_utc).total_seconds())
track.track_start_remaining = 0
@api.depends('date', 'date_end', 'website_cta', 'website_cta_delay')
def _compute_cta_time_data(self):
""" Compute start and remaining time for track itself. Do everything in
UTC as we compute only time deltas here. """
now_utc = utc.localize(fields.Datetime.now().replace(microsecond=0))
for track in self:
if not track.website_cta:
track.is_website_cta_live = track.website_cta_start_remaining = False
continue
date_begin_utc = utc.localize(track.date, is_dst=False) + timedelta(minutes=track.website_cta_delay or 0)
date_end_utc = utc.localize(track.date_end, is_dst=False)
track.is_website_cta_live = date_begin_utc <= now_utc <= date_end_utc
if date_begin_utc >= now_utc:
td = date_begin_utc - now_utc
track.website_cta_start_remaining = int(td.total_seconds())
else:
track.website_cta_start_remaining = 0
# ------------------------------------------------------------
# CRUD
# ------------------------------------------------------------
@api.model_create_multi
def create(self, vals_list):
for values in vals_list:
if values.get('website_cta_url'):
values['website_cta_url'] = self.env['res.partner']._clean_website(values['website_cta_url'])
tracks = super(Track, self).create(vals_list)
for track in tracks:
email_values = {} if self.env.user.email else {'email_from': self.env.company.catchall_formatted}
track.event_id.message_post_with_view(
'website_event_track.event_track_template_new',
values={
'track': track,
'is_html_empty': is_html_empty,
},
subtype_id=self.env.ref('website_event_track.mt_event_track').id,
**email_values,
)
track._synchronize_with_stage(track.stage_id)
return tracks
def write(self, vals):
if vals.get('website_cta_url'):
vals['website_cta_url'] = self.env['res.partner']._clean_website(vals['website_cta_url'])
if 'stage_id' in vals and 'kanban_state' not in vals:
vals['kanban_state'] = 'normal'
if vals.get('stage_id'):
stage = self.env['event.track.stage'].browse(vals['stage_id'])
self._synchronize_with_stage(stage)
res = super(Track, self).write(vals)
return res
@api.model
def _read_group_stage_ids(self, stages, domain, order):
""" Always display all stages """
return stages.search([], order=order)
def _synchronize_with_stage(self, stage):
if stage.is_fully_accessible:
self.is_published = True
elif stage.is_cancel:
self.is_published = False
# ------------------------------------------------------------
# MESSAGING
# ------------------------------------------------------------
def _message_get_default_recipients(self):
return {
track.id: {
'partner_ids': [],
'email_to': track.contact_email or track.partner_email,
'email_cc': False
} for track in self
}
def _message_get_suggested_recipients(self):
recipients = super(Track, self)._message_get_suggested_recipients()
for track in self:
if track.partner_id:
if track.partner_id not in recipients:
track._message_add_suggested_recipient(recipients, partner=track.partner_id, reason=_('Contact'))
else:
# Priority: contact information then speaker information
if track.contact_email and track.contact_email != track.partner_id.email:
track._message_add_suggested_recipient(recipients, email=track.contact_email, reason=_('Contact Email'))
if not track.contact_email and track.partner_email and track.partner_email != track.partner_id.email:
track._message_add_suggested_recipient(recipients, email=track.partner_email, reason=_('Speaker Email'))
return recipients
def _message_post_after_hook(self, message, msg_vals):
# OVERRIDE
# If no partner is set on track when sending a message, then we create one from suggested contact selected.
# If one or more have been created from chatter (Suggested Recipients) we search for the expected one and write the partner_id on track.
if msg_vals.get('partner_ids') and not self.partner_id:
# Contact(s) created from chatter set on track : we verify if at least one is the expected contact
# linked to the track. (created from contact_email if any, then partner_email if any)
main_email = self.contact_email or self.partner_email
if main_email:
new_partner = message.partner_ids.filtered(lambda partner: partner.email == main_email)
if new_partner:
main_email_string = 'contact_email' if self.contact_email else 'partner_email'
self.search([
('partner_id', '=', False),
(main_email_string, '=', new_partner.email),
('stage_id.is_cancel', '=', False),
]).write({'partner_id': new_partner.id})
return super(Track, self)._message_post_after_hook(message, msg_vals)
def _track_template(self, changes):
res = super(Track, self)._track_template(changes)
track = self[0]
if 'stage_id' in changes and track.stage_id.mail_template_id:
res['stage_id'] = (track.stage_id.mail_template_id, {
'composition_mode': 'comment',
'auto_delete_message': True,
'subtype_id': self.env['ir.model.data']._xmlid_to_res_id('mail.mt_note'),
'email_layout_xmlid': 'mail.mail_notification_light'
})
return res
def _track_subtype(self, init_values):
self.ensure_one()
if 'kanban_state' in init_values and self.kanban_state == 'blocked':
return self.env.ref('website_event_track.mt_track_blocked')
elif 'kanban_state' in init_values and self.kanban_state == 'done':
return self.env.ref('website_event_track.mt_track_ready')
return super(Track, self)._track_subtype(init_values)
# ------------------------------------------------------------
# ACTION
# ------------------------------------------------------------
def open_track_speakers_list(self):
return {
'name': _('Speakers'),
'domain': [('id', 'in', self.mapped('partner_id').ids)],
'view_mode': 'kanban,form',
'res_model': 'res.partner',
'view_id': False,
'type': 'ir.actions.act_window',
}
def get_backend_menu_id(self):
return self.env.ref('event.event_main_menu').id
# ------------------------------------------------------------
# TOOLS
# ------------------------------------------------------------
def _get_event_track_visitors(self, force_create=False):
self.ensure_one()
force_visitor_create = self.env.user._is_public()
visitor_sudo = self.env['website.visitor']._get_visitor_from_request(force_create=force_visitor_create)
if visitor_sudo:
visitor_sudo._update_visitor_last_visit()
if self.env.user._is_public():
domain = [('visitor_id', '=', visitor_sudo.id)]
elif visitor_sudo:
domain = [
'|',
('partner_id', '=', self.env.user.partner_id.id),
('visitor_id', '=', visitor_sudo.id)
]
else:
domain = [('partner_id', '=', self.env.user.partner_id.id)]
track_visitors = self.env['event.track.visitor'].sudo().search(
expression.AND([domain, [('track_id', 'in', self.ids)]])
)
missing = self - track_visitors.track_id
if missing and force_create:
track_visitors += self.env['event.track.visitor'].sudo().create([{
'visitor_id': visitor_sudo.id,
'partner_id': self.env.user.partner_id.id if not self.env.user._is_public() else False,
'track_id': track.id,
} for track in missing])
return track_visitors
def _get_track_suggestions(self, restrict_domain=None, limit=None):
""" Returns the next tracks suggested after going to the current one
given by self. Tracks always belong to the same event.
Heuristic is
* live first;
* then ordered by start date, finished being sent to the end;
* wishlisted (manually or by default);
* tag matching with current track;
* location matching with current track;
* finally a random to have an "equivalent wave" randomly given;
:param restrict_domain: an additional domain to restrict candidates;
:param limit: number of tracks to return;
"""
self.ensure_one()
base_domain = [
'&',
('event_id', '=', self.event_id.id),
('id', '!=', self.id),
]
if restrict_domain:
base_domain = expression.AND([
base_domain,
restrict_domain
])
track_candidates = self.search(base_domain, limit=None, order='date asc')
if not track_candidates:
return track_candidates
track_candidates = track_candidates.sorted(
lambda track:
(track.is_published,
track.track_start_remaining == 0 # First get the tracks that started less than 10 minutes ago ...
and track.track_start_relative < (10 * 60)
and not track.is_track_done, # ... AND not finished
track.track_start_remaining > 0, # Then the one that will begin later (the sooner come first)
-1 * track.track_start_remaining,
track.is_reminder_on,
not track.wishlisted_by_default,
len(track.tag_ids & self.tag_ids),
track.location_id == self.location_id,
randint(0, 20),
), reverse=True
)
return track_candidates[:limit]
| 45.401294 | 28,058 |
2,029 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from PIL import Image
from odoo import api, fields, models
from odoo.exceptions import ValidationError
from odoo.tools import ImageProcess
from odoo.tools.translate import _
class Website(models.Model):
_inherit = "website"
app_icon = fields.Image(string='Website App Icon', compute='_compute_app_icon', store=True, readonly=True, help='This field holds the image used as mobile app icon on the website (PNG format).')
events_app_name = fields.Char(string='Events App Name', compute='_compute_events_app_name', store=True, readonly=False, help="This fields holds the Event's Progressive Web App name.")
@api.depends('name')
def _compute_events_app_name(self):
for website in self:
if not website.events_app_name:
website.events_app_name = _('%s Events') % website.name
@api.constrains('events_app_name')
def _check_events_app_name(self):
for website in self:
if not website.events_app_name:
raise ValidationError(_('"Events App Name" field is required.'))
@api.depends('favicon')
def _compute_app_icon(self):
""" Computes a squared image based on the favicon to be used as mobile webapp icon.
App Icon should be in PNG format and size of at least 512x512.
If the favicon is an SVG image, it will be skipped and the app_icon will be set to False.
"""
for website in self:
image = ImageProcess(website.favicon) if website.favicon else None
if not (image and image.image):
website.app_icon = False
continue
w, h = image.image.size
square_size = w if w > h else h
image.crop_resize(square_size, square_size)
image.image = image.image.resize((512, 512))
image.operationsCount += 1
website.app_icon = image.image_base64(output_format='PNG')
| 41.408163 | 2,029 |
1,397 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class TrackVisitor(models.Model):
""" Table linking track and visitors. """
_name = 'event.track.visitor'
_description = 'Track / Visitor Link'
_table = 'event_track_visitor'
_rec_name = 'track_id'
_order = 'track_id'
partner_id = fields.Many2one(
'res.partner', string='Partner', compute='_compute_partner_id',
index=True, ondelete='set null', readonly=False, store=True)
visitor_id = fields.Many2one(
'website.visitor', string='Visitor', index=True, ondelete='cascade')
track_id = fields.Many2one(
'event.track', string='Track',
index=True, required=True, ondelete='cascade')
is_wishlisted = fields.Boolean(string="Is Wishlisted")
is_blacklisted = fields.Boolean(string="Is reminder off", help="As key track cannot be un-favorited, this field store the partner choice to remove the reminder for key tracks.")
@api.depends('visitor_id')
def _compute_partner_id(self):
for track_visitor in self:
if track_visitor.visitor_id.partner_id and not track_visitor.partner_id:
track_visitor.partner_id = track_visitor.visitor_id.partner_id
elif not track_visitor.partner_id:
track_visitor.partner_id = False
| 43.65625 | 1,397 |
477 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class TrackTagCategory(models.Model):
_name = "event.track.tag.category"
_description = 'Event Track Tag Category'
_order = "sequence"
name = fields.Char("Name", required=True, translate=True)
sequence = fields.Integer('Sequence', default=10)
tag_ids = fields.One2many('event.track.tag', 'category_id', string="Tags")
| 34.071429 | 477 |
3,786 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class WebsiteVisitor(models.Model):
_name = 'website.visitor'
_inherit = ['website.visitor']
event_track_visitor_ids = fields.One2many(
'event.track.visitor', 'visitor_id', string="Track Visitors",
groups='event.group_event_user')
event_track_wishlisted_ids = fields.Many2many(
'event.track', string="Wishlisted Tracks",
compute="_compute_event_track_wishlisted_ids", compute_sudo=True,
search="_search_event_track_wishlisted_ids",
groups="event.group_event_user")
event_track_wishlisted_count = fields.Integer(
string="# Wishlisted",
compute="_compute_event_track_wishlisted_ids", compute_sudo=True,
groups='event.group_event_user')
@api.depends('parent_id', 'event_track_visitor_ids.track_id', 'event_track_visitor_ids.is_wishlisted')
def _compute_event_track_wishlisted_ids(self):
# include parent's track visitors in a visitor o2m field. We don't add
# child one as child should not have track visitors (moved to the parent)
all_visitors = self + self.parent_id
results = self.env['event.track.visitor'].read_group(
[('visitor_id', 'in', all_visitors.ids), ('is_wishlisted', '=', True)],
['visitor_id', 'track_id:array_agg'],
['visitor_id']
)
track_ids_map = {result['visitor_id'][0]: result['track_id'] for result in results}
for visitor in self:
visitor_track_ids = track_ids_map.get(visitor.id, [])
parent_track_ids = track_ids_map.get(visitor.parent_id.id, [])
visitor.event_track_wishlisted_ids = visitor_track_ids + [track_id for track_id in parent_track_ids if track_id not in visitor_track_ids]
visitor.event_track_wishlisted_count = len(visitor.event_track_wishlisted_ids)
def _search_event_track_wishlisted_ids(self, operator, operand):
""" Search visitors with terms on wishlisted tracks. E.g. [('event_track_wishlisted_ids',
'in', [1, 2])] should return visitors having wishlisted tracks 1, 2 as
well as their children for notification purpose. """
if operator == "not in":
raise NotImplementedError("Unsupported 'Not In' operation on track wishlist visitors")
track_visitors = self.env['event.track.visitor'].sudo().search([
('track_id', operator, operand),
('is_wishlisted', '=', True)
])
if track_visitors:
visitors = track_visitors.visitor_id
# search children, even archived one, to contact them
children = self.env['website.visitor'].with_context(
active_test=False
).sudo().search([('parent_id', 'in', visitors.ids)])
visitor_ids = (visitors + children).ids
else:
visitor_ids = []
return [('id', 'in', visitor_ids)]
def _link_to_partner(self, partner, update_values=None):
""" Propagate partner update to track_visitor records """
if partner:
track_visitor_wo_partner = self.event_track_visitor_ids.filtered(lambda track_visitor: not track_visitor.partner_id)
if track_visitor_wo_partner:
track_visitor_wo_partner.partner_id = partner
super(WebsiteVisitor, self)._link_to_partner(partner, update_values=update_values)
def _link_to_visitor(self, target, keep_unique=True):
""" Override linking process to link wishlist to the final visitor. """
self.event_track_visitor_ids.visitor_id = target.id
return super(WebsiteVisitor, self)._link_to_visitor(target, keep_unique=keep_unique)
| 50.48 | 3,786 |
474 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.website_event.controllers.main import WebsiteEventController
class EventOnlineController(WebsiteEventController):
def _get_registration_confirm_values(self, event, attendees_sudo):
values = super(EventOnlineController, self)._get_registration_confirm_values(event, attendees_sudo)
values['hide_sponsors'] = True
return values
| 39.5 | 474 |
2,834 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
import pytz
from odoo import http
from odoo.addons.http_routing.models.ir_http import url_for
from odoo.http import request
from odoo.modules.module import get_module_resource
from odoo.tools import ustr
from odoo.tools.translate import _
class TrackManifest(http.Controller):
@http.route('/event/manifest.webmanifest', type='http', auth='public', methods=['GET'], website=True, sitemap=False)
def webmanifest(self):
""" Returns a WebManifest describing the metadata associated with a web application.
Using this metadata, user agents can provide developers with means to create user
experiences that are more comparable to that of a native application.
"""
website = request.website
manifest = {
'name': website.events_app_name,
'short_name': website.events_app_name,
'description': _('%s Online Events Application') % website.company_id.name,
'scope': url_for('/event'),
'start_url': url_for('/event'),
'display': 'standalone',
'background_color': '#ffffff',
'theme_color': '#875A7B',
}
icon_sizes = ['192x192', '512x512']
manifest['icons'] = [{
'src': website.image_url(website, 'app_icon', size=size),
'sizes': size,
'type': 'image/png',
} for size in icon_sizes]
body = json.dumps(manifest, default=ustr)
response = request.make_response(body, [
('Content-Type', 'application/manifest+json'),
])
return response
@http.route('/event/service-worker.js', type='http', auth='public', methods=['GET'], website=True, sitemap=False)
def service_worker(self):
""" Returns a ServiceWorker javascript file scoped for website_event
"""
sw_file = get_module_resource('website_event_track', 'static/src/js/service_worker.js')
with open(sw_file, 'r') as fp:
body = fp.read()
js_cdn_url = 'undefined'
if request.website.cdn_activated:
cdn_url = request.website.cdn_url.replace('"','%22').replace('\x5c','%5C')
js_cdn_url = '"%s"' % cdn_url
body = body.replace('__ODOO_CDN_URL__', js_cdn_url)
response = request.make_response(body, [
('Content-Type', 'text/javascript'),
('Service-Worker-Allowed', url_for('/event')),
])
return response
@http.route('/event/offline', type='http', auth='public', methods=['GET'], website=True, sitemap=False)
def offline(self):
""" Returns the offline page used by the 'website_event' PWA
"""
return request.render('website_event_track.pwa_offline')
| 41.676471 | 2,834 |
25,723 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from ast import literal_eval
from collections import defaultdict
from datetime import timedelta
from pytz import timezone, utc
from werkzeug.exceptions import Forbidden, NotFound
import babel
import babel.dates
import base64
import json
import pytz
from odoo import exceptions, http, fields, tools, _
from odoo.http import request
from odoo.osv import expression
from odoo.tools import is_html_empty, plaintext2html
from odoo.tools.misc import babel_locale_parse
class EventTrackController(http.Controller):
def _get_event_tracks_agenda_domain(self, event):
""" Base domain for displaying track names (preview). The returned search
domain will select the tracks that belongs to a track stage that should
be visible in the agenda (see: 'is_visible_in_agenda'). Published tracks
are also displayed whatever their stage. """
agenda_domain = [
'&',
('event_id', '=', event.id),
'|',
('is_published', '=', True),
('stage_id.is_visible_in_agenda', '=', True),
]
return agenda_domain
def _get_event_tracks_domain(self, event):
""" Base domain for displaying tracks. The returned search domain will
select the tracks that belongs to a track stage that should be visible
in the agenda (see: 'is_visible_in_agenda'). When the user is a visitor,
the domain will contain an additional condition that will remove the
unpublished tracks from the search results."""
search_domain_base = self._get_event_tracks_agenda_domain(event)
if not request.env.user.has_group('event.group_event_registration_desk'):
search_domain_base = expression.AND([
search_domain_base,
[('is_published', '=', True)]
])
return search_domain_base
# ------------------------------------------------------------
# TRACK LIST VIEW
# ------------------------------------------------------------
@http.route([
'''/event/<model("event.event"):event>/track''',
'''/event/<model("event.event"):event>/track/tag/<model("event.track.tag"):tag>'''
], type='http', auth="public", website=True, sitemap=False)
def event_tracks(self, event, tag=None, **searches):
""" Main route
:param event: event whose tracks are about to be displayed;
:param tag: deprecated: search for a specific tag
:param searches: frontend search dict, containing
* 'search': search string;
* 'tags': list of tag IDs for filtering;
"""
return request.render(
"website_event_track.tracks_session",
self._event_tracks_get_values(event, tag=tag, **searches)
)
def _event_tracks_get_values(self, event, tag=None, **searches):
# init and process search terms
searches.setdefault('search', '')
searches.setdefault('search_wishlist', '')
searches.setdefault('tags', '')
search_domain = self._get_event_tracks_agenda_domain(event)
# search on content
if searches.get('search'):
search_domain = expression.AND([
search_domain,
[('name', 'ilike', searches['search'])]
])
# search on tags
search_tags = self._get_search_tags(searches['tags'])
if not search_tags and tag: # backward compatibility
search_tags = tag
if search_tags:
# Example: You filter on age: 10-12 and activity: football.
# Doing it this way allows to only get events who are tagged "age: 10-12" AND "activity: football".
# Add another tag "age: 12-15" to the search and it would fetch the ones who are tagged:
# ("age: 10-12" OR "age: 12-15") AND "activity: football
grouped_tags = dict()
for search_tag in search_tags:
grouped_tags.setdefault(search_tag.category_id, list()).append(search_tag)
search_domain_items = [
[('tag_ids', 'in', [tag.id for tag in grouped_tags[group]])]
for group in grouped_tags
]
search_domain = expression.AND([
search_domain,
*search_domain_items
])
# fetch data to display with TZ set for both event and tracks
now_tz = utc.localize(fields.Datetime.now().replace(microsecond=0), is_dst=False).astimezone(timezone(event.date_tz))
today_tz = now_tz.date()
event = event.with_context(tz=event.date_tz or 'UTC')
tracks_sudo = event.env['event.track'].sudo().search(search_domain, order='is_published desc, date asc')
tag_categories = request.env['event.track.tag.category'].sudo().search([])
# filter on wishlist (as post processing due to costly search on is_reminder_on)
if searches.get('search_wishlist'):
tracks_sudo = tracks_sudo.filtered(lambda track: track.is_reminder_on)
# organize categories for display: announced, live, soon and day-based
tracks_announced = tracks_sudo.filtered(lambda track: not track.date)
tracks_wdate = tracks_sudo - tracks_announced
date_begin_tz_all = list(set(
dt.date()
for dt in self._get_dt_in_event_tz(tracks_wdate.mapped('date'), event)
))
date_begin_tz_all.sort()
tracks_sudo_live = tracks_wdate.filtered(lambda track: track.is_track_live)
tracks_sudo_soon = tracks_wdate.filtered(lambda track: not track.is_track_live and track.is_track_soon)
tracks_by_day = []
for display_date in date_begin_tz_all:
matching_tracks = tracks_wdate.filtered(lambda track: self._get_dt_in_event_tz([track.date], event)[0].date() == display_date)
tracks_by_day.append({'date': display_date, 'name': display_date, 'tracks': matching_tracks})
if tracks_announced:
tracks_announced = tracks_announced.sorted('wishlisted_by_default', reverse=True)
tracks_by_day.append({'date': False, 'name': _('Coming soon'), 'tracks': tracks_announced})
for tracks_group in tracks_by_day:
# the tracks group is folded if all tracks are done (and if it's not "today")
tracks_group['default_collapsed'] = (today_tz != tracks_group['date']) and all(
track.is_track_done and not track.is_track_live
for track in tracks_group['tracks']
)
# return rendering values
return {
# event information
'event': event,
'main_object': event,
# tracks display information
'tracks': tracks_sudo,
'tracks_by_day': tracks_by_day,
'tracks_live': tracks_sudo_live,
'tracks_soon': tracks_sudo_soon,
'today_tz': today_tz,
# search information
'searches': searches,
'search_key': searches['search'],
'search_wishlist': searches['search_wishlist'],
'search_tags': search_tags,
'tag_categories': tag_categories,
# environment
'is_html_empty': is_html_empty,
'hostname': request.httprequest.host.split(':')[0],
'is_event_user': request.env.user.has_group('event.group_event_user'),
}
# ------------------------------------------------------------
# AGENDA VIEW
# ------------------------------------------------------------
@http.route(['''/event/<model("event.event"):event>/agenda'''], type='http', auth="public", website=True, sitemap=False)
def event_agenda(self, event, tag=None, **post):
event = event.with_context(tz=event.date_tz or 'UTC')
vals = {
'event': event,
'main_object': event,
'tag': tag,
'is_event_user': request.env.user.has_group('event.group_event_user'),
}
vals.update(self._prepare_calendar_values(event))
return request.render("website_event_track.agenda_online", vals)
def _prepare_calendar_values(self, event):
""" This methods slit the day (max end time - min start time) into
15 minutes time slots. For each time slot, we assign the tracks that
start at this specific time slot, and we add the number of time slot
that the track covers (track duration / 15 min). The calendar will be
divided into rows of 15 min, and the talks will cover the corresponding
number of rows (15 min slots). """
event = event.with_context(tz=event.date_tz or 'UTC')
local_tz = pytz.timezone(event.date_tz or 'UTC')
lang_code = request.env.context.get('lang')
base_track_domain = expression.AND([
self._get_event_tracks_agenda_domain(event),
[('date', '!=', False)]
])
tracks_sudo = request.env['event.track'].sudo().search(base_track_domain)
locations = list(set(track.location_id for track in tracks_sudo))
locations.sort(key=lambda x: x.id)
# First split day by day (based on start time)
time_slots_by_tracks = {track: self._split_track_by_days(track, local_tz) for track in tracks_sudo}
# extract all the tracks time slots
track_time_slots = set().union(*(time_slot.keys() for time_slot in [time_slots for time_slots in time_slots_by_tracks.values()]))
# extract unique days
days = list(set(time_slot.date() for time_slot in track_time_slots))
days.sort()
# Create the dict that contains the tracks at the correct time_slots / locations coordinates
tracks_by_days = dict.fromkeys(days, 0)
time_slots_by_day = dict((day, dict(start=set(), end=set())) for day in days)
tracks_by_rounded_times = dict((time_slot, dict((location, {}) for location in locations)) for time_slot in track_time_slots)
for track, time_slots in time_slots_by_tracks.items():
start_date = fields.Datetime.from_string(track.date).replace(tzinfo=pytz.utc).astimezone(local_tz)
end_date = start_date + timedelta(hours=(track.duration or 0.25))
for time_slot, duration in time_slots.items():
tracks_by_rounded_times[time_slot][track.location_id][track] = {
'rowspan': duration, # rowspan
'start_date': self._get_locale_time(start_date, lang_code),
'end_date': self._get_locale_time(end_date, lang_code),
'occupied_cells': self._get_occupied_cells(track, duration, locations, local_tz)
}
# get all the time slots by day to determine the max duration of a day.
day = time_slot.date()
time_slots_by_day[day]['start'].add(time_slot)
time_slots_by_day[day]['end'].add(time_slot+timedelta(minutes=15*duration))
tracks_by_days[day] += 1
# split days into 15 minutes time slots
global_time_slots_by_day = dict((day, {}) for day in days)
for day, time_slots in time_slots_by_day.items():
start_time_slot = min(time_slots['start'])
end_time_slot = max(time_slots['end'])
time_slots_count = int(((end_time_slot - start_time_slot).total_seconds() / 3600) * 4)
current_time_slot = start_time_slot
for i in range(0, time_slots_count + 1):
global_time_slots_by_day[day][current_time_slot] = tracks_by_rounded_times.get(current_time_slot, {})
global_time_slots_by_day[day][current_time_slot]['formatted_time'] = self._get_locale_time(current_time_slot, lang_code)
current_time_slot = current_time_slot + timedelta(minutes=15)
# count the number of tracks by days
tracks_by_days = dict.fromkeys(days, 0)
locations_by_days = defaultdict(list)
for track in tracks_sudo:
track_day = fields.Datetime.from_string(track.date).replace(tzinfo=pytz.utc).astimezone(local_tz).date()
tracks_by_days[track_day] += 1
if track.location_id not in locations_by_days[track_day]:
locations_by_days[track_day].append(track.location_id)
for used_locations in locations_by_days.values():
used_locations.sort(key=lambda location: location.id if location else 0)
return {
'days': days,
'tracks_by_days': tracks_by_days,
'locations_by_days': locations_by_days,
'time_slots': global_time_slots_by_day,
'locations': locations # TODO: clean me in master, kept for retro-compatibility
}
def _get_locale_time(self, dt_time, lang_code):
""" Get locale time from datetime object
:param dt_time: datetime object
:param lang_code: language code (eg. en_US)
"""
locale = babel_locale_parse(lang_code)
return babel.dates.format_time(dt_time, format='short', locale=locale)
def time_slot_rounder(self, time, rounded_minutes):
""" Rounds to nearest hour by adding a timedelta hour if minute >= rounded_minutes
E.g. : If rounded_minutes = 15 -> 09:26:00 becomes 09:30:00
09:17:00 becomes 09:15:00
"""
return (time.replace(second=0, microsecond=0, minute=0, hour=time.hour)
+ timedelta(minutes=rounded_minutes * (time.minute // rounded_minutes)))
def _split_track_by_days(self, track, local_tz):
"""
Based on the track start_date and the duration,
split the track duration into :
start_time by day : number of time slot (15 minutes) that the track takes on that day.
E.g. : start date = 01-01-2000 10:00 PM and duration = 3 hours
return {
01-01-2000 10:00:00 PM: 8 (2 * 4),
01-02-2000 00:00:00 AM: 4 (1 * 4)
}
Also return a set of all the time slots
"""
start_date = fields.Datetime.from_string(track.date).replace(tzinfo=pytz.utc).astimezone(local_tz)
start_datetime = self.time_slot_rounder(start_date, 15)
end_datetime = self.time_slot_rounder(start_datetime + timedelta(hours=(track.duration or 0.25)), 15)
time_slots_count = int(((end_datetime - start_datetime).total_seconds() / 3600) * 4)
time_slots_by_day_start_time = {start_datetime: 0}
for i in range(0, time_slots_count):
# If the new time slot is still on the current day
next_day = (start_datetime + timedelta(days=1)).date()
if (start_datetime + timedelta(minutes=15*i)).date() <= next_day:
time_slots_by_day_start_time[start_datetime] += 1
else:
start_datetime = next_day.datetime()
time_slots_by_day_start_time[start_datetime] = 0
return time_slots_by_day_start_time
def _get_occupied_cells(self, track, rowspan, locations, local_tz):
"""
In order to use only once the cells that the tracks will occupy, we need to reserve those cells
(time_slot, location) coordinate. Those coordinated will be given to the template to avoid adding
blank cells where already occupied by a track.
"""
occupied_cells = []
start_date = fields.Datetime.from_string(track.date).replace(tzinfo=pytz.utc).astimezone(local_tz)
start_date = self.time_slot_rounder(start_date, 15)
for i in range(0, rowspan):
time_slot = start_date + timedelta(minutes=15*i)
if track.location_id:
occupied_cells.append((time_slot, track.location_id))
# when no location, reserve all locations
else:
occupied_cells += [(time_slot, location) for location in locations if location]
return occupied_cells
# ------------------------------------------------------------
# TRACK PAGE VIEW
# ------------------------------------------------------------
@http.route('''/event/<model("event.event", "[('website_track', '=', True)]"):event>/track/<model("event.track", "[('event_id', '=', event.id)]"):track>''',
type='http', auth="public", website=True, sitemap=True)
def event_track_page(self, event, track, **options):
track = self._fetch_track(track.id, allow_sudo=False)
return request.render(
"website_event_track.event_track_main",
self._event_track_page_get_values(event, track.sudo(), **options)
)
def _event_track_page_get_values(self, event, track, **options):
track = track.sudo()
option_widescreen = options.get('widescreen', False)
option_widescreen = bool(option_widescreen) if option_widescreen != '0' else False
# search for tracks list
tracks_other = track._get_track_suggestions(
restrict_domain=self._get_event_tracks_domain(track.event_id),
limit=10
)
return {
# event information
'event': event,
'main_object': track,
'track': track,
# sidebar
'tracks_other': tracks_other,
# options
'option_widescreen': option_widescreen,
# environment
'is_html_empty': is_html_empty,
'hostname': request.httprequest.host.split(':')[0],
'is_event_user': request.env.user.has_group('event.group_event_user'),
'user_event_manager': request.env.user.has_group('event.group_event_manager'),
}
@http.route("/event/track/toggle_reminder", type="json", auth="public", website=True)
def track_reminder_toggle(self, track_id, set_reminder_on):
""" Set a reminder a track for current visitor. Track visitor is created or updated
if it already exists. Exception made if un-favoriting and no track_visitor
record found (should not happen unless manually done).
:param boolean set_reminder_on:
If True, set as a favorite, otherwise un-favorite track;
If the track is a Key Track (wishlisted_by_default):
if set_reminder_on = False, blacklist the track_partner
otherwise, un-blacklist the track_partner
"""
track = self._fetch_track(track_id, allow_sudo=True)
force_create = set_reminder_on or track.wishlisted_by_default
event_track_partner = track._get_event_track_visitors(force_create=force_create)
visitor_sudo = event_track_partner.visitor_id
if not track.wishlisted_by_default:
if not event_track_partner or event_track_partner.is_wishlisted == set_reminder_on: # ignore if new state = old state
return {'error': 'ignored'}
event_track_partner.is_wishlisted = set_reminder_on
else:
if not event_track_partner or event_track_partner.is_blacklisted != set_reminder_on: # ignore if new state = old state
return {'error': 'ignored'}
event_track_partner.is_blacklisted = not set_reminder_on
result = {'reminderOn': set_reminder_on}
if request.httprequest.cookies.get('visitor_uuid', '') != visitor_sudo.access_token:
result['visitor_uuid'] = visitor_sudo.access_token
return result
# ------------------------------------------------------------
# TRACK PROPOSAL
# ------------------------------------------------------------
@http.route(['''/event/<model("event.event"):event>/track_proposal'''], type='http', auth="public", website=True, sitemap=False)
def event_track_proposal(self, event, **post):
return request.render("website_event_track.event_track_proposal", {'event': event, 'main_object': event})
@http.route(['''/event/<model("event.event"):event>/track_proposal/post'''], type='http', auth="public", methods=['POST'], website=True)
def event_track_proposal_post(self, event, **post):
if not event.can_access_from_current_website():
return json.dumps({'error': 'forbidden'})
# Only accept existing tag indices. Use search instead of browse + exists:
# this prevents users to register colorless tags if not allowed to (ACL).
input_tag_indices = [int(tag_id) for tag_id in post['tags'].split(',') if tag_id]
valid_tag_indices = request.env['event.track.tag'].search([('id', 'in', input_tag_indices)]).ids
contact = request.env['res.partner']
visitor_partner = request.env['website.visitor']._get_visitor_from_request().partner_id
# Contact name is required. Therefore, empty contacts are not considered here. At least one of contact_phone
# and contact_email must be filled. Email is verified. If the post tries to create contact with no valid entry,
# raise exception. If normalized email is the same as logged partner, use its partner_id on track instead.
# This prevents contact duplication. Otherwise, create new contact with contact additional info of post.
if post.get('add_contact_information'):
valid_contact_email = tools.email_normalize(post.get('contact_email'))
# Here, the phone is not formatted. To format it, one needs a country. Based on a country, from geoip for instance.
# The problem is that one could propose a track in country A with phone number of country B. Validity is therefore
# quite tricky. We accept any format of contact_phone. Could be improved with select country phone widget.
if valid_contact_email or post.get('contact_phone'):
if visitor_partner and valid_contact_email == visitor_partner.email_normalized:
contact = visitor_partner
else:
contact = request.env['res.partner'].sudo().create({
'email': valid_contact_email,
'name': post.get('contact_name'),
'phone': post.get('contact_phone'),
})
else:
return json.dumps({'error': 'invalidFormInputs'})
# If the speaker email is the same as logged user's, then also uses its partner on track, same as above.
else:
valid_speaker_email = tools.email_normalize(post['partner_email'])
if visitor_partner and valid_speaker_email == visitor_partner.email_normalized:
contact = visitor_partner
track = request.env['event.track'].with_context({'mail_create_nosubscribe': True}).sudo().create({
'name': post['track_name'],
'partner_id': contact.id,
'partner_name': post['partner_name'],
'partner_email': post['partner_email'],
'partner_phone': post['partner_phone'],
'partner_function': post['partner_function'],
'contact_phone': contact.phone,
'contact_email': contact.email,
'event_id': event.id,
'tag_ids': [(6, 0, valid_tag_indices)],
'description': plaintext2html(post['description']),
'partner_biography': plaintext2html(post['partner_biography']),
'user_id': False,
'image': base64.b64encode(post['image'].read()) if post.get('image') else False,
})
if request.env.user != request.website.user_id:
track.sudo().message_subscribe(partner_ids=request.env.user.partner_id.ids)
return json.dumps({'success': True})
# ACL : This route is necessary since rpc search_read method in js is not accessible to all users (e.g. public user).
@http.route(['''/event/track_tag/search_read'''], type='json', auth="public", website=True)
def website_event_track_fetch_tags(self, domain, fields):
return request.env['event.track.tag'].search_read(domain, fields)
# ------------------------------------------------------------
# TOOLS
# ------------------------------------------------------------
def _fetch_track(self, track_id, allow_sudo=False):
track = request.env['event.track'].browse(track_id).exists()
if not track:
raise NotFound()
try:
track.check_access_rights('read')
track.check_access_rule('read')
except exceptions.AccessError:
if not allow_sudo:
raise Forbidden()
track = track.sudo()
event = track.event_id
# JSON RPC have no website in requests
if hasattr(request, 'website_id') and not event.can_access_from_current_website():
raise NotFound()
try:
event.check_access_rights('read')
event.check_access_rule('read')
except exceptions.AccessError:
raise Forbidden()
return track
def _get_search_tags(self, tag_search):
# TDE FIXME: make me generic (slides, event, ...)
try:
tag_ids = literal_eval(tag_search)
except Exception:
tags = request.env['event.track.tag'].sudo()
else:
# perform a search to filter on existing / valid tags implicitly
tags = request.env['event.track.tag'].sudo().search([('id', 'in', tag_ids)])
return tags
def _get_dt_in_event_tz(self, datetimes, event):
tz_name = event.date_tz
return [
utc.localize(dt, is_dst=False).astimezone(timezone(tz_name))
for dt in datetimes
]
| 48.625709 | 25,723 |
1,011 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Forum on Courses',
'category': 'Website/eLearning',
'version': '1.0',
'summary': 'Allows to link forum on a course',
'description': """A Slide channel can be linked to forum. Also, profiles from slide and forum are regrouped together""",
'depends': [
'website_slides',
'website_forum'
],
'data': [
'security/ir.model.access.csv',
'security/website_slides_forum_security.xml',
'views/forum_views.xml',
'views/slide_channel_views.xml',
'views/website_slides_menu_views.xml',
'views/website_slides_templates.xml',
'views/website_slides_forum_templates.xml'
],
'demo': [
'data/slide_channel_demo.xml',
],
'auto_install': True,
'assets': {
'web.assets_frontend': [
'website_slides_forum/static/src/js/**/*',
],
},
'license': 'LGPL-3',
}
| 30.636364 | 1,011 |
1,503 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class Channel(models.Model):
_inherit = 'slide.channel'
forum_id = fields.Many2one('forum.forum', 'Course Forum')
forum_total_posts = fields.Integer('Number of active forum posts', related="forum_id.total_posts")
_sql_constraints = [
('forum_uniq', 'unique (forum_id)', "Only one course per forum!"),
]
def action_redirect_to_forum(self):
self.ensure_one()
action = self.env["ir.actions.actions"]._for_xml_id("website_forum.action_forum_post")
action['view_mode'] = 'tree'
action['context'] = {
'create': False
}
action['domain'] = [('forum_id', '=', self.forum_id.id)]
return action
@api.model_create_multi
def create(self, vals_list):
channels = super(Channel, self.with_context(mail_create_nosubscribe=True)).create(vals_list)
channels.forum_id.privacy = False
return channels
def write(self, vals):
old_forum = self.forum_id
res = super(Channel, self).write(vals)
if 'forum_id' in vals:
self.forum_id.privacy = False
if old_forum != self.forum_id:
old_forum.write({
'privacy': 'private',
'authorized_group_id': self.env.ref('website_slides.group_website_slides_officer').id,
})
return res
| 33.4 | 1,503 |
891 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class Forum(models.Model):
_inherit = 'forum.forum'
slide_channel_ids = fields.One2many('slide.channel', 'forum_id', 'Courses', help="Edit the course linked to this forum on the course form.")
slide_channel_id = fields.Many2one('slide.channel', 'Course', compute='_compute_slide_channel_id', store=True)
visibility = fields.Selection(related='slide_channel_id.visibility', help="Forum linked to a Course, the visibility is the one applied on the course.")
@api.depends('slide_channel_ids')
def _compute_slide_channel_id(self):
for forum in self:
if forum.slide_channel_ids:
forum.slide_channel_id = forum.slide_channel_ids[0]
else:
forum.slide_channel_id = None
| 44.55 | 891 |
1,211 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.http import request
from odoo.addons.website_slides.controllers.main import WebsiteSlides
class WebsiteSlidesForum(WebsiteSlides):
def _slide_channel_prepare_values(self, **kwargs):
channel = super(WebsiteSlidesForum, self)._slide_channel_prepare_values(**kwargs)
if bool(kwargs.get('link_forum')):
forum = request.env['forum.forum'].create({
'name': kwargs.get('name')
})
channel['forum_id'] = forum.id
return channel
# Profile
# ---------------------------------------------------
def _prepare_user_profile_parameters(self, **post):
post = super(WebsiteSlidesForum, self)._prepare_user_profile_parameters(**post)
if post.get('channel_id'):
channel = request.env['slide.channel'].browse(int(post.get('channel_id')))
if channel.forum_id:
post.update({
'forum_id': channel.forum_id.id,
'no_forum': False
})
else:
post.update({'no_forum': True})
return post
| 36.69697 | 1,211 |
1,533 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
"name": """Indian - E-invoicing""",
"version": "1.03.00",
"icon": "/l10n_in/static/description/icon.png",
"category": "Accounting/Localizations/EDI",
"depends": [
"account_edi",
"l10n_in",
"iap",
],
"description": """
Indian - E-invoicing
====================
To submit invoicing through API to the government.
We use "Tera Software Limited" as GSP
Step 1: First you need to create an API username and password in the E-invoice portal.
Step 2: Switch to company related to that GST number
Step 3: Set that username and password in Odoo (Goto: Invoicing/Accounting -> Configuration -> Settings -> Customer Invoices or find "E-invoice" in search bar)
Step 4: Repeat steps 1,2,3 for all GSTIN you have in odoo. If you have a multi-company with the same GST number then perform step 1 for the first company only.
For the creation of API username and password please ref this document: <https://service.odoo.co.in/einvoice_create_api_user>
""",
"data": [
"data/account_edi_data.xml",
"views/res_config_settings_views.xml",
"views/edi_pdf_report.xml",
"views/account_move_views.xml",
],
"demo": [
"demo/demo_company.xml",
],
"installable": True,
# only applicable for taxpayers turnover higher than Rs.50 crore so auto_install is False
"auto_install": False,
"application": False,
"license": "LGPL-3",
}
| 38.325 | 1,533 |
16,593 |
py
|
PYTHON
|
15.0
|
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.account.tests.common import AccountTestInvoicingCommon
from odoo.tests import tagged
@tagged("post_install_l10n", "post_install", "-at_install")
class TestEdiJson(AccountTestInvoicingCommon):
@classmethod
def setUpClass(cls, chart_template_ref="l10n_in.indian_chart_template_standard"):
super().setUpClass(chart_template_ref=chart_template_ref)
cls.env['ir.config_parameter'].set_param('l10n_in_edi.manage_invoice_negative_lines', True)
cls.maxDiff = None
cls.company_data["company"].write({
"street": "Block no. 401",
"street2": "Street 2",
"city": "City 1",
"zip": "500001",
"state_id": cls.env.ref("base.state_in_ts").id,
"country_id": cls.env.ref("base.in").id,
"vat": "36AABCT1332L011",
})
cls.partner_a.write({
"vat": "36BBBFF5679L8ZR",
"street": "Block no. 401",
"street2": "Street 2",
"city": "City 2",
"zip": "500001",
"state_id": cls.env.ref("base.state_in_ts").id,
"country_id": cls.env.ref("base.in").id,
"l10n_in_gst_treatment": "regular",
})
cls.product_a.write({"l10n_in_hsn_code": "01111"})
cls.product_a2 = cls.env['product.product'].create({
'name': 'product_a2',
'uom_id': cls.env.ref('uom.product_uom_unit').id,
'lst_price': 1000.0,
'standard_price': 1000.0,
'property_account_income_id': cls.company_data['default_account_revenue'].id,
'property_account_expense_id': cls.company_data['default_account_expense'].id,
'taxes_id': [(6, 0, cls.tax_sale_a.ids)],
'supplier_taxes_id': [(6, 0, cls.tax_purchase_a.ids)],
"l10n_in_hsn_code": "01111",
})
cls.product_a_discount = cls.env['product.product'].create({
'name': 'product_a discount',
'uom_id': cls.env.ref('uom.product_uom_unit').id,
'lst_price': 400.0,
'standard_price': 400.0,
'property_account_income_id': cls.company_data['default_account_revenue'].id,
'property_account_expense_id': cls.company_data['default_account_expense'].id,
'taxes_id': [(6, 0, cls.tax_sale_a.ids)],
'supplier_taxes_id': [(6, 0, cls.tax_purchase_a.ids)],
"l10n_in_hsn_code": "01111",
})
gst_with_cess = cls.env.ref("l10n_in.%s_sgst_sale_12" % (cls.company_data["company"].id)
) + cls.env.ref("l10n_in.%s_cess_5_plus_1591_sale" % (cls.company_data["company"].id))
product_with_cess = cls.env["product.product"].create({
"name": "product_with_cess",
"uom_id": cls.env.ref("uom.product_uom_unit").id,
"lst_price": 1000.0,
"standard_price": 800.0,
"property_account_income_id": cls.company_data["default_account_revenue"].id,
"property_account_expense_id": cls.company_data["default_account_expense"].id,
"taxes_id": [(6, 0, gst_with_cess.ids)],
"supplier_taxes_id": [(6, 0, cls.tax_purchase_a.ids)],
"l10n_in_hsn_code": "02222",
})
cls.invoice = cls.init_invoice("out_invoice", post=False, products=cls.product_a + product_with_cess)
cls.invoice.write({
"invoice_line_ids": [(1, l_id, {"discount": 10}) for l_id in cls.invoice.invoice_line_ids.ids]})
cls.invoice.action_post()
cls.invoice_full_discount = cls.init_invoice("out_invoice", post=False, products=cls.product_a)
cls.invoice_full_discount.write({
"invoice_line_ids": [(1, l_id, {"discount": 100}) for l_id in cls.invoice_full_discount.invoice_line_ids.ids]})
cls.invoice_full_discount.action_post()
cls.invoice_zero_qty = cls.init_invoice("out_invoice", post=False, products=cls.product_a)
cls.invoice_zero_qty.write({
"invoice_line_ids": [(1, l_id, {"quantity": 0}) for l_id in cls.invoice_zero_qty.invoice_line_ids.ids]})
cls.invoice_zero_qty.action_post()
cls.invoice_negative_unit_price = cls.init_invoice("out_invoice", post=False, products=cls.product_a + cls.product_a_discount + product_with_cess)
cls.invoice_negative_unit_price.write({
"invoice_line_ids": [
(1, cls.invoice_negative_unit_price.invoice_line_ids[0].id, {"price_unit": 1000}),
(1, cls.invoice_negative_unit_price.invoice_line_ids[1].id, {"price_unit": -400}),
]})
cls.invoice_negative_unit_price.action_post()
cls.invoice_negative_qty = cls.init_invoice("out_invoice", post=False, products=cls.product_a + cls.product_a_discount + product_with_cess)
cls.invoice_negative_qty.write({
"invoice_line_ids": [
(1, cls.invoice_negative_qty.invoice_line_ids[0].id, {"price_unit": 1000}),
(1, cls.invoice_negative_qty.invoice_line_ids[1].id, {"price_unit": 400, 'quantity': -1}),
]})
cls.invoice_negative_qty.action_post()
cls.invoice_negative_unit_price_and_qty = cls.init_invoice("out_invoice", post=False, products=cls.product_a + cls.product_a_discount + product_with_cess)
cls.invoice_negative_unit_price_and_qty.write({
"invoice_line_ids": [
(1, cls.invoice_negative_unit_price_and_qty.invoice_line_ids[0].id, {"price_unit": -1000, 'quantity': -1}),
(1, cls.invoice_negative_unit_price_and_qty.invoice_line_ids[1].id, {"price_unit": -400}),
]})
cls.invoice_negative_unit_price_and_qty.action_post()
cls.invoice_negative_with_discount = cls.init_invoice("out_invoice", post=False, products=cls.product_a + cls.product_a_discount)
cls.invoice_negative_with_discount.write({
"invoice_line_ids": [
(1, cls.invoice_negative_with_discount.invoice_line_ids[0].id, {"price_unit": 2000, 'discount': 50}),
(1, cls.invoice_negative_with_discount.invoice_line_ids[1].id, {"price_unit": -400}),
]})
cls.invoice_negative_with_discount.action_post()
cls.invoice_negative_more_than_max_line = cls.init_invoice("out_invoice", post=False, products=cls.product_a + cls.product_a2 + cls.product_a_discount)
cls.invoice_negative_more_than_max_line.write({
"invoice_line_ids": [
(1, cls.invoice_negative_more_than_max_line.invoice_line_ids[0].id, {"price_unit": 2000, 'discount': 50}),
(1, cls.invoice_negative_more_than_max_line.invoice_line_ids[1].id, {"price_unit": 1000}),
(1, cls.invoice_negative_more_than_max_line.invoice_line_ids[2].id, {"price_unit": -1100}),
]})
cls.invoice_negative_more_than_max_line.action_post()
def test_edi_json(self):
json_value = self.env["account.edi.format"]._l10n_in_edi_generate_invoice_json(self.invoice)
expected = {
"Version": "1.1",
"TranDtls": {"TaxSch": "GST", "SupTyp": "B2B", "RegRev": "N", "IgstOnIntra": "N"},
"DocDtls": {"Typ": "INV", "No": "INV/2019/00001", "Dt": "01/01/2019"},
"SellerDtls": {
"LglNm": "company_1_data",
"Addr1": "Block no. 401",
"Addr2": "Street 2",
"Loc": "City 1",
"Pin": 500001,
"Stcd": "36",
"GSTIN": "36AABCT1332L011"},
"BuyerDtls": {
"LglNm": "partner_a",
"Addr1": "Block no. 401",
"Addr2": "Street 2",
"Loc": "City 2",
"Pin": 500001,
"Stcd": "36",
"POS": "36",
"GSTIN": "36BBBFF5679L8ZR"},
"ItemList": [
{
"SlNo": "1", "PrdDesc": "product_a", "IsServc": "N", "HsnCd": "01111", "Qty": 1.0,
"Unit": "UNT", "UnitPrice": 1000.0, "TotAmt": 1000.0, "Discount": 100.0, "AssAmt": 900.0,
"GstRt": 5.0, "IgstAmt": 0.0, "CgstAmt": 22.5, "SgstAmt": 22.5, "CesRt": 0.0, "CesAmt": 0.0,
"CesNonAdvlAmt": 0.0, "StateCesRt": 0.0, "StateCesAmt": 0.0, "StateCesNonAdvlAmt": 0.0,
"OthChrg": 0.0, "TotItemVal": 945.0
},
{
"SlNo": "2", "PrdDesc": "product_with_cess", "IsServc": "N", "HsnCd": "02222", "Qty": 1.0,
"Unit": "UNT", "UnitPrice": 1000.0, "TotAmt": 1000.0, "Discount": 100.0, "AssAmt": 900.0,
"GstRt": 12.0, "IgstAmt": 0.0, "CgstAmt": 54.0, "SgstAmt": 54.0, "CesRt": 5.0, "CesAmt": 45.0,
"CesNonAdvlAmt": 1.59, "StateCesRt": 0.0, "StateCesAmt": 0.0, "StateCesNonAdvlAmt": 0.0,
"OthChrg": 0.0, "TotItemVal": 1054.59
}
],
"ValDtls": {
"AssVal": 1800.0, "CgstVal": 76.5, "SgstVal": 76.5, "IgstVal": 0.0, "CesVal": 46.59,
"StCesVal": 0.0, "RndOffAmt": 0.0, "TotInvVal": 1999.59
}
}
self.assertDictEqual(json_value, expected, "Indian EDI send json value is not matched")
#=================================== Full discount test =====================================
json_value = self.env["account.edi.format"]._l10n_in_edi_generate_invoice_json(self.invoice_full_discount)
expected.update({
"DocDtls": {"Typ": "INV", "No": "INV/2019/00002", "Dt": "01/01/2019"},
"ItemList": [{
"SlNo": "1", "PrdDesc": "product_a", "IsServc": "N", "HsnCd": "01111", "Qty": 1.0,
"Unit": "UNT", "UnitPrice": 1000.0, "TotAmt": 1000.0, "Discount": 1000.0, "AssAmt": 0.0,
"GstRt": 0.0, "IgstAmt": 0.0, "CgstAmt": 0.0, "SgstAmt": 0.0, "CesRt": 0.0, "CesAmt": 0.0,
"CesNonAdvlAmt": 0.0, "StateCesRt": 0.0, "StateCesAmt": 0.0, "StateCesNonAdvlAmt": 0.0,
"OthChrg": 0.0, "TotItemVal": 0.0}],
"ValDtls": {"AssVal": 0.0, "CgstVal": 0.0, "SgstVal": 0.0, "IgstVal": 0.0, "CesVal": 0.0,
"StCesVal": 0.0, "RndOffAmt": 0.0, "TotInvVal": 0.0}
})
self.assertDictEqual(json_value, expected, "Indian EDI with 100% discount sent json value is not matched")
#=================================== Zero quantity test =============================================
json_value = self.env["account.edi.format"]._l10n_in_edi_generate_invoice_json(self.invoice_zero_qty)
expected.update({
"DocDtls": {"Typ": "INV", "No": "INV/2019/00003", "Dt": "01/01/2019"},
"ItemList": [{
"SlNo": "1", "PrdDesc": "product_a", "IsServc": "N", "HsnCd": "01111", "Qty": 0.0,
"Unit": "UNT", "UnitPrice": 1000.0, "TotAmt": 0.0, "Discount": 0.0, "AssAmt": 0.0,
"GstRt": 0.0, "IgstAmt": 0.0, "CgstAmt": 0.0, "SgstAmt": 0.0, "CesRt": 0.0, "CesAmt": 0.0,
"CesNonAdvlAmt": 0.0, "StateCesRt": 0.0, "StateCesAmt": 0.0, "StateCesNonAdvlAmt": 0.0,
"OthChrg": 0.0, "TotItemVal": 0.0}],
})
self.assertDictEqual(json_value, expected, "Indian EDI with 0(zero) quantity sent json value is not matched")
#=================================== Negative unit price test =============================================
json_value = self.env["account.edi.format"]._l10n_in_edi_generate_invoice_json(self.invoice_negative_unit_price)
expected.update({
"DocDtls": {"Typ": "INV", "No": "INV/2019/00004", "Dt": "01/01/2019"},
"ItemList": [
{
"SlNo": "1", "PrdDesc": "product_a", "IsServc": "N", "HsnCd": "01111", "Qty": 1.0,
"Unit": "UNT", "UnitPrice": 1000.0, "TotAmt": 1000.0, "Discount": 400.0, "AssAmt": 600.0,
"GstRt": 5.0, "IgstAmt": 0.0, "CgstAmt": 15.0, "SgstAmt": 15.0, "CesRt": 0.0, "CesAmt": 0.0,
"CesNonAdvlAmt": 0.0, "StateCesRt": 0.0, "StateCesAmt": 0.0, "StateCesNonAdvlAmt": 0.0,
"OthChrg": 0.0, "TotItemVal": 630.0
},
{
"SlNo": "3", "PrdDesc": "product_with_cess", "IsServc": "N", "HsnCd": "02222", "Qty": 1.0,
"Unit": "UNT", "UnitPrice": 1000.0, "TotAmt": 1000.0, "Discount": 0.0, "AssAmt": 1000.0,
"GstRt": 12.0, "IgstAmt": 0.0, "CgstAmt": 60.0, "SgstAmt": 60.0, "CesRt": 5.0, "CesAmt": 50.0,
"CesNonAdvlAmt": 1.59, "StateCesRt": 0.0, "StateCesAmt": 0.0, "StateCesNonAdvlAmt": 0.0,
"OthChrg": 0.0, "TotItemVal": 1171.59
}
],
"ValDtls": {
"AssVal": 1600.0, "CgstVal": 75.0, "SgstVal": 75.0, "IgstVal": 0.0, "CesVal": 51.59,
"StCesVal": 0.0, "RndOffAmt": 0.0, "TotInvVal": 1801.59
},
})
self.assertDictEqual(json_value, expected, "Indian EDI with negative unit price sent json value is not matched")
expected.update({"DocDtls": {"Typ": "INV", "No": "INV/2019/00005", "Dt": "01/01/2019"}})
json_value = self.env["account.edi.format"]._l10n_in_edi_generate_invoice_json(self.invoice_negative_qty)
self.assertDictEqual(json_value, expected, "Indian EDI with negative quantity sent json value is not matched")
expected.update({"DocDtls": {"Typ": "INV", "No": "INV/2019/00006", "Dt": "01/01/2019"}})
json_value = self.env["account.edi.format"]._l10n_in_edi_generate_invoice_json(self.invoice_negative_unit_price_and_qty)
self.assertDictEqual(json_value, expected, "Indian EDI with negative unit price and quantity sent json value is not matched")
expected.update({
"DocDtls": {"Typ": "INV", "No": "INV/2019/00007", "Dt": "01/01/2019"},
"ItemList": [{
"SlNo": "1", "PrdDesc": "product_a", "IsServc": "N", "HsnCd": "01111", "Qty": 1.0,
"Unit": "UNT", "UnitPrice": 2000.0, "TotAmt": 2000.0, "Discount": 1400.0, "AssAmt": 600.0,
"GstRt": 5.0, "IgstAmt": 0.0, "CgstAmt": 15.0, "SgstAmt": 15.0, "CesRt": 0.0, "CesAmt": 0.0,
"CesNonAdvlAmt": 0.0, "StateCesRt": 0.0, "StateCesAmt": 0.0, "StateCesNonAdvlAmt": 0.0,
"OthChrg": 0.0, "TotItemVal": 630.0
}],
"ValDtls": {
"AssVal": 600.0, "CgstVal": 15.0, "SgstVal": 15.0, "IgstVal": 0.0, "CesVal": 0.0,
"StCesVal": 0.0, "RndOffAmt": 0.0, "TotInvVal": 630.0
},
})
json_value = self.env["account.edi.format"]._l10n_in_edi_generate_invoice_json(self.invoice_negative_with_discount)
self.assertDictEqual(json_value, expected, "Indian EDI with negative unit price and quantity sent json value is not matched")
expected.update({
"DocDtls": {"Typ": "INV", "No": "INV/2019/00008", "Dt": "01/01/2019"},
"ItemList": [{
"SlNo": "1", "PrdDesc": "product_a", "IsServc": "N", "HsnCd": "01111", "Qty": 1.0,
"Unit": "UNT", "UnitPrice": 2000.0, "TotAmt": 2000.0, "Discount": 2000.0, "AssAmt": 0.0,
"GstRt": 5.0, "IgstAmt": 0.0, "CgstAmt": 0.0, "SgstAmt": 0.0, "CesRt": 0.0, "CesAmt": 0.0,
"CesNonAdvlAmt": 0.0, "StateCesRt": 0.0, "StateCesAmt": 0.0, "StateCesNonAdvlAmt": 0.0,
"OthChrg": 0.0, "TotItemVal": 0.0
},
{
"SlNo": "2", "PrdDesc": "product_a2", "IsServc": "N", "HsnCd": "01111", "Qty": 1.0,
"Unit": "UNT", "UnitPrice": 1000.0, "TotAmt": 1000.0, "Discount": 100.0, "AssAmt": 900.0,
"GstRt": 5.0, "IgstAmt": 0.0, "CgstAmt": 22.5, "SgstAmt": 22.5, "CesRt": 0.0, "CesAmt": 0.0,
"CesNonAdvlAmt": 0.0, "StateCesRt": 0.0, "StateCesAmt": 0.0, "StateCesNonAdvlAmt": 0.0,
"OthChrg": 0.0, "TotItemVal": 945.0
}],
"ValDtls": {
"AssVal": 900.0, "CgstVal": 22.5, "SgstVal": 22.5, "IgstVal": 0.0, "CesVal": 0.0,
"StCesVal": 0.0, "RndOffAmt": 0.0, "TotInvVal": 945.0
},
})
json_value = self.env['account.edi.format']._l10n_in_edi_generate_invoice_json(self.invoice_negative_more_than_max_line)
self.assertDictEqual(json_value, expected, "Indian EDI with negative value more than max line sent json value is not matched")
| 62.146067 | 16,593 |
2,990 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
from odoo import api, fields, models, _
from odoo.exceptions import UserError
class AccountMove(models.Model):
_inherit = "account.move"
l10n_in_edi_cancel_reason = fields.Selection(selection=[
("1", "Duplicate"),
("2", "Data Entry Mistake"),
("3", "Order Cancelled"),
("4", "Others"),
], string="Cancel reason", copy=False)
l10n_in_edi_cancel_remarks = fields.Char("Cancel remarks", copy=False)
l10n_in_edi_show_cancel = fields.Boolean(compute="_compute_l10n_in_edi_show_cancel", string="E-invoice(IN) is sent?")
@api.depends('edi_document_ids')
def _compute_l10n_in_edi_show_cancel(self):
for invoice in self:
invoice.l10n_in_edi_show_cancel = bool(invoice.edi_document_ids.filtered(
lambda i: i.edi_format_id.code == "in_einvoice_1_03"
and i.state in ("sent", "to_cancel", "cancelled")
))
def button_cancel_posted_moves(self):
"""Mark the edi.document related to this move to be canceled."""
reason_and_remarks_not_set = self.env["account.move"]
for move in self:
send_l10n_in_edi = move.edi_document_ids.filtered(lambda doc: doc.edi_format_id.code == "in_einvoice_1_03")
# check submitted E-invoice does not have reason and remarks
# because it's needed to cancel E-invoice
if send_l10n_in_edi and (not move.l10n_in_edi_cancel_reason or not move.l10n_in_edi_cancel_remarks):
reason_and_remarks_not_set += move
if reason_and_remarks_not_set:
raise UserError(_(
"To cancel E-invoice set cancel reason and remarks at Other info tab in invoices: \n%s",
("\n".join(reason_and_remarks_not_set.mapped("name"))),
))
return super().button_cancel_posted_moves()
def _get_l10n_in_edi_response_json(self):
self.ensure_one()
l10n_in_edi = self.edi_document_ids.filtered(lambda i: i.edi_format_id.code == "in_einvoice_1_03"
and i.state in ("sent", "to_cancel"))
if l10n_in_edi:
return json.loads(l10n_in_edi.attachment_id.raw.decode("utf-8"))
else:
return {}
@api.model
def _l10n_in_edi_is_managing_invoice_negative_lines_allowed(self):
""" Negative lines are not allowed by the Indian government making some features unavailable like sale_coupon
or global discounts. This method allows odoo to distribute the negative discount lines to each others lines
with same HSN code making such features available even for Indian people.
:return: True if odoo needs to distribute the negative discount lines, False otherwise.
"""
param_name = 'l10n_in_edi.manage_invoice_negative_lines'
return bool(self.env['ir.config_parameter'].sudo().get_param(param_name))
| 47.460317 | 2,990 |
37,107 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
import json
import pytz
import markupsafe
from collections import defaultdict
from odoo import models, fields, api, _
from odoo.tools import html_escape, float_is_zero, float_compare
from odoo.exceptions import AccessError, ValidationError
from odoo.addons.iap import jsonrpc
import logging
_logger = logging.getLogger(__name__)
DEFAULT_IAP_ENDPOINT = "https://l10n-in-edi.api.odoo.com"
DEFAULT_IAP_TEST_ENDPOINT = "https://l10n-in-edi-demo.api.odoo.com"
class AccountEdiFormat(models.Model):
_inherit = "account.edi.format"
def _is_enabled_by_default_on_journal(self, journal):
self.ensure_one()
if self.code == "in_einvoice_1_03":
return journal.company_id.country_id.code == 'IN'
return super()._is_enabled_by_default_on_journal(journal)
def _is_required_for_invoice(self, invoice):
self.ensure_one()
if self.code == "in_einvoice_1_03":
return invoice.is_sale_document() and invoice.country_code == 'IN' and invoice.l10n_in_gst_treatment in (
"regular",
"composition",
"overseas",
"special_economic_zone",
"deemed_export",
)
return super()._is_required_for_invoice(invoice)
def _needs_web_services(self):
self.ensure_one()
return self.code == "in_einvoice_1_03" or super()._needs_web_services()
def _get_invoice_edi_content(self, move):
if self.code != "in_einvoice_1_03":
return super()._get_invoice_edi_content(move)
json_dump = json.dumps(self._l10n_in_edi_generate_invoice_json(move))
return json_dump.encode()
def _l10n_in_edi_extract_digits(self, string):
if not string:
return string
matches = re.findall(r"\d+", string)
result = "".join(matches)
return result
def _check_move_configuration(self, move):
if self.code != "in_einvoice_1_03":
return super()._check_move_configuration(move)
error_message = []
error_message += self._l10n_in_validate_partner(move.partner_id)
error_message += self._l10n_in_validate_partner(move.company_id.partner_id, is_company=True)
if not re.match("^.{1,16}$", move.name):
error_message.append(_("Invoice number should not be more than 16 characters"))
for line in move.invoice_line_ids.filtered(lambda line: not (line.display_type or line.is_rounding_line)):
if line.price_subtotal < 0:
# Line having a negative amount is not allowed.
if not move._l10n_in_edi_is_managing_invoice_negative_lines_allowed():
raise ValidationError(_("Invoice lines having a negative amount are not allowed to generate the IRN. "
"Please create a credit note instead."))
if line.product_id:
hsn_code = self._l10n_in_edi_extract_digits(line.product_id.l10n_in_hsn_code)
if not hsn_code:
error_message.append(_("HSN code is not set in product %s", line.product_id.name))
elif not re.match("^[0-9]+$", hsn_code):
error_message.append(_(
"Invalid HSN Code (%s) in product %s", hsn_code, line.product_id.name
))
else:
error_message.append(_("product is required to get HSN code"))
return error_message
def _l10n_in_edi_get_iap_buy_credits_message(self, company):
base_url = "https://iap-sandbox.odoo.com/iap/1/credit" if not company.sudo().l10n_in_edi_production_env else ""
url = self.env["iap.account"].get_credits_url(service_name="l10n_in_edi", base_url=base_url)
return markupsafe.Markup("""<p><b>%s</b></p><p>%s <a href="%s">%s</a></p>""") % (
_("You have insufficient credits to send this document!"),
_("Please buy more credits and retry: "),
url,
_("Buy Credits")
)
def _post_invoice_edi(self, invoices):
if self.code != "in_einvoice_1_03":
return super()._post_invoice_edi(invoices)
response = {}
res = {}
generate_json = self._l10n_in_edi_generate_invoice_json(invoices)
response = self._l10n_in_edi_generate(invoices.company_id, generate_json)
if response.get("error"):
error = response["error"]
error_codes = [e.get("code") for e in error]
if "1005" in error_codes:
# Invalid token eror then create new token and send generate request again.
# This happen when authenticate called from another odoo instance with same credentials (like. Demo/Test)
authenticate_response = self._l10n_in_edi_authenticate(invoices.company_id)
if not authenticate_response.get("error"):
error = []
response = self._l10n_in_edi_generate(invoices.company_id, generate_json)
if response.get("error"):
error = response["error"]
error_codes = [e.get("code") for e in error]
if "2150" in error_codes:
# Get IRN by details in case of IRN is already generated
# this happens when timeout from the Government portal but IRN is generated
response = self._l10n_in_edi_get_irn_by_details(invoices.company_id, {
"doc_type": invoices.move_type == "out_refund" and "CRN" or "INV",
"doc_num": invoices.name,
"doc_date": invoices.invoice_date and invoices.invoice_date.strftime("%d/%m/%Y") or False,
})
if not response.get("error"):
error = []
odoobot = self.env.ref("base.partner_root")
invoices.message_post(author_id=odoobot.id, body=_(
"Somehow this invoice had been submited to government before." \
"<br/>Normally, this should not happen too often" \
"<br/>Just verify value of invoice by uploade json to government website " \
"<a href='https://einvoice1.gst.gov.in/Others/VSignedInvoice'>here<a>."
))
if "no-credit" in error_codes:
res[invoices] = {
"success": False,
"error": self._l10n_in_edi_get_iap_buy_credits_message(invoices.company_id),
"blocking_level": "error",
}
elif error:
error_message = "<br/>".join(["[%s] %s" % (e.get("code"), html_escape(e.get("message"))) for e in error])
res[invoices] = {
"success": False,
"error": error_message,
"blocking_level": ("404" in error_codes) and "warning" or "error",
}
if not response.get("error"):
json_dump = json.dumps(response.get("data"))
json_name = "%s_einvoice.json" % (invoices.name.replace("/", "_"))
attachment = self.env["ir.attachment"].create({
"name": json_name,
"raw": json_dump.encode(),
"res_model": "account.move",
"res_id": invoices.id,
"mimetype": "application/json",
})
res[invoices] = {"success": True, "attachment": attachment}
return res
def _cancel_invoice_edi(self, invoices):
if self.code != "in_einvoice_1_03":
return super()._cancel_invoice_edi(invoices)
res = {}
for invoice in invoices:
l10n_in_edi_response_json = invoice._get_l10n_in_edi_response_json()
cancel_json = {
"Irn": l10n_in_edi_response_json.get("Irn"),
"CnlRsn": invoice.l10n_in_edi_cancel_reason,
"CnlRem": invoice.l10n_in_edi_cancel_remarks,
}
response = self._l10n_in_edi_cancel(invoice.company_id, cancel_json)
if response.get("error"):
error = response["error"]
error_codes = [e.get("code") for e in error]
if "1005" in error_codes:
# Invalid token eror then create new token and send generate request again.
# This happen when authenticate called from another odoo instance with same credentials (like. Demo/Test)
authenticate_response = self._l10n_in_edi_authenticate(invoice.company_id)
if not authenticate_response.get("error"):
error = []
response = self._l10n_in_edi_cancel(invoice.company_id, cancel_json)
if response.get("error"):
error = response["error"]
error_codes = [e.get("code") for e in error]
if "9999" in error_codes:
response = {}
odoobot = self.env.ref("base.partner_root")
invoices.message_post(author_id=odoobot.id, body=_(
"Somehow this invoice had been cancelled to government before." \
"<br/>Normally, this should not happen too often" \
"<br/>Just verify by logging into government website " \
"<a href='https://einvoice1.gst.gov.in'>here<a>."
))
if "no-credit" in error_codes:
res[invoice] = {
"success": False,
"error": self._l10n_in_edi_get_iap_buy_credits_message(invoice.company_id),
"blocking_level": "error",
}
else:
error_message = "<br/>".join(["[%s] %s" % (e.get("code"), html_escape(e.get("message"))) for e in error])
res[invoice] = {
"success": False,
"error": error_message,
"blocking_level": ("404" in error_codes) and "warning" or "error",
}
if not response.get("error"):
json_dump = json.dumps(response.get("data", {}))
json_name = "%s_cancel_einvoice.json" % (invoice.name.replace("/", "_"))
attachment = self.env["ir.attachment"].create({
"name": json_name,
"raw": json_dump.encode(),
"res_model": "account.move",
"res_id": invoice.id,
"mimetype": "application/json",
})
res[invoice] = {"success": True, "attachment": attachment}
return res
def _l10n_in_validate_partner(self, partner, is_company=False):
self.ensure_one()
message = []
if not re.match("^.{3,100}$", partner.street or ""):
message.append(_("\n- Street required min 3 and max 100 characters"))
if partner.street2 and not re.match("^.{3,100}$", partner.street2):
message.append(_("\n- Street2 should be min 3 and max 100 characters"))
if not re.match("^.{3,100}$", partner.city or ""):
message.append(_("\n- City required min 3 and max 100 characters"))
if not re.match("^.{3,50}$", partner.state_id.name or ""):
message.append(_("\n- State required min 3 and max 50 characters"))
if partner.country_id.code == "IN" and not re.match("^[0-9]{6,}$", partner.zip or ""):
message.append(_("\n- Zip code required 6 digits"))
if partner.phone and not re.match("^[0-9]{10,12}$",
self._l10n_in_edi_extract_digits(partner.phone)
):
message.append(_("\n- Mobile number should be minimum 10 or maximum 12 digits"))
if partner.email and (
not re.match(r"^[a-zA-Z0-9+_.-]+@[a-zA-Z0-9.-]+$", partner.email)
or not re.match("^.{6,100}$", partner.email)
):
message.append(_("\n- Email address should be valid and not more then 100 characters"))
return message
def _get_l10n_in_edi_saler_buyer_party(self, move):
return {
"seller_details": move.company_id.partner_id,
"dispatch_details": move._l10n_in_get_warehouse_address() or move.company_id.partner_id,
"buyer_details": move.partner_id,
"ship_to_details": move._l10n_in_get_shipping_partner(),
}
@api.model
def _get_l10n_in_edi_partner_details(self, partner, set_vat=True, set_phone_and_email=True,
is_overseas=False, pos_state_id=False):
"""
Create the dictionary based partner details
if set_vat is true then, vat(GSTIN) and legal name(LglNm) is added
if set_phone_and_email is true then phone and email is add
if set_pos is true then state code from partner or passed state_id is added as POS(place of supply)
if is_overseas is true then pin is 999999 and GSTIN(vat) is URP and Stcd is .
if pos_state_id is passed then we use set POS
"""
partner_details = {
"Addr1": partner.street or "",
"Loc": partner.city or "",
"Pin": int(self._l10n_in_edi_extract_digits(partner.zip)),
"Stcd": partner.state_id.l10n_in_tin or "",
}
if partner.street2:
partner_details.update({"Addr2": partner.street2})
if set_phone_and_email:
if partner.email:
partner_details.update({"Em": partner.email})
if partner.phone:
partner_details.update({"Ph": self._l10n_in_edi_extract_digits(partner.phone)})
if pos_state_id:
partner_details.update({"POS": pos_state_id.l10n_in_tin or ""})
if set_vat:
partner_details.update({
"LglNm": partner.commercial_partner_id.name,
"GSTIN": partner.vat or "URP",
})
else:
partner_details.update({"Nm": partner.name or partner.commercial_partner_id.name})
# For no country I would suppose it is India, so not sure this is super right
if is_overseas and (not partner.country_id or partner.country_id.code != 'IN'):
partner_details.update({
"GSTIN": "URP",
"Pin": 999999,
"Stcd": "96",
"POS": "96",
})
return partner_details
@api.model
def _l10n_in_round_value(self, amount, precision_digits=2):
"""
This method is call for rounding.
If anything is wrong with rounding then we quick fix in method
"""
value = round(amount, precision_digits)
# avoid -0.0
return value if value else 0.0
def _get_l10n_in_edi_line_details(self, index, line, line_tax_details, sign):
"""
Create the dictionary with line details
return {
account.move.line('1'): {....},
account.move.line('2'): {....},
....
}
"""
tax_details_by_code = self._get_l10n_in_tax_details_by_line_code(line_tax_details.get("tax_details", {}))
quantity = line.quantity
full_discount_or_zero_quantity = line.discount == 100.00 or float_is_zero(quantity, 3)
if full_discount_or_zero_quantity:
unit_price_in_inr = line.currency_id._convert(
line.price_unit,
line.company_currency_id,
line.company_id,
line.date or fields.Date.context_today(self)
)
else:
unit_price_in_inr = ((line.balance / (1 - (line.discount / 100))) / quantity) * sign
if unit_price_in_inr < 0 and quantity < 0:
# If unit price and quantity both is negative then
# We set unit price and quantity as positive because
# government does not accept negative in qty or unit price
unit_price_in_inr = unit_price_in_inr * -1
quantity = quantity * -1
return {
"SlNo": str(index),
"PrdDesc": line.name.replace("\n", ""),
"IsServc": line.product_id.type == "service" and "Y" or "N",
"HsnCd": self._l10n_in_edi_extract_digits(line.product_id.l10n_in_hsn_code),
"Qty": self._l10n_in_round_value(quantity or 0.0, 3),
"Unit": line.product_uom_id.l10n_in_code and line.product_uom_id.l10n_in_code.split("-")[0] or "OTH",
# Unit price in company currency and tax excluded so its different then price_unit
"UnitPrice": self._l10n_in_round_value(unit_price_in_inr, 3),
# total amount is before discount
"TotAmt": self._l10n_in_round_value(unit_price_in_inr * quantity),
"Discount": self._l10n_in_round_value((unit_price_in_inr * quantity) * (line.discount / 100)),
"AssAmt": self._l10n_in_round_value(line.balance * sign),
"GstRt": self._l10n_in_round_value(tax_details_by_code.get("igst_rate", 0.00) or (
tax_details_by_code.get("cgst_rate", 0.00) + tax_details_by_code.get("sgst_rate", 0.00)), 3),
"IgstAmt": self._l10n_in_round_value(tax_details_by_code.get("igst_amount", 0.00) * sign),
"CgstAmt": self._l10n_in_round_value(tax_details_by_code.get("cgst_amount", 0.00) * sign),
"SgstAmt": self._l10n_in_round_value(tax_details_by_code.get("sgst_amount", 0.00) * sign),
"CesRt": self._l10n_in_round_value(tax_details_by_code.get("cess_rate", 0.00), 3),
"CesAmt": self._l10n_in_round_value(tax_details_by_code.get("cess_amount", 0.00) * sign),
"CesNonAdvlAmt": self._l10n_in_round_value(
tax_details_by_code.get("cess_non_advol_amount", 0.00) * sign),
"StateCesRt": self._l10n_in_round_value(tax_details_by_code.get("state_cess_rate_amount", 0.00), 3),
"StateCesAmt": self._l10n_in_round_value(tax_details_by_code.get("state_cess_amount", 0.00) * sign),
"StateCesNonAdvlAmt": self._l10n_in_round_value(
tax_details_by_code.get("state_cess_non_advol_amount", 0.00) * sign),
"OthChrg": self._l10n_in_round_value(tax_details_by_code.get("other_amount", 0.00) * sign),
"TotItemVal": self._l10n_in_round_value(
(line.balance + line_tax_details.get("tax_amount", 0.00)) * sign),
}
def _l10n_in_edi_generate_invoice_json_managing_negative_lines(self, invoice, json_payload):
"""Set negative lines against positive lines as discount with same HSN code and tax rate
With negative lines
product name | hsn code | unit price | qty | discount | total
=============================================================
product A | 123456 | 1000 | 1 | 100 | 900
product B | 123456 | 1500 | 2 | 0 | 3000
Discount | 123456 | -300 | 1 | 0 | -300
Converted to without negative lines
product name | hsn code | unit price | qty | discount | total
=============================================================
product A | 123456 | 1000 | 1 | 100 | 900
product B | 123456 | 1500 | 2 | 300 | 2700
totally discounted lines are kept as 0, though
"""
def discount_group_key(line_vals):
return "%s-%s"%(line_vals['HsnCd'], line_vals['GstRt'])
def put_discount_on(discount_line_vals, other_line_vals):
discount = discount_line_vals['AssAmt'] * -1
discount_to_allow = other_line_vals['AssAmt']
if float_compare(discount_to_allow, discount, precision_rounding=invoice.currency_id.rounding) < 0:
# Update discount line, needed when discount is more then max line, in short remaining_discount is not zero
discount_line_vals.update({
'AssAmt': self._l10n_in_round_value(discount_line_vals['AssAmt'] + other_line_vals['AssAmt']),
'IgstAmt': self._l10n_in_round_value(discount_line_vals['IgstAmt'] + other_line_vals['IgstAmt']),
'CgstAmt': self._l10n_in_round_value(discount_line_vals['CgstAmt'] + other_line_vals['CgstAmt']),
'SgstAmt': self._l10n_in_round_value(discount_line_vals['SgstAmt'] + other_line_vals['SgstAmt']),
'CesAmt': self._l10n_in_round_value(discount_line_vals['CesAmt'] + other_line_vals['CesAmt']),
'CesNonAdvlAmt': self._l10n_in_round_value(discount_line_vals['CesNonAdvlAmt'] + other_line_vals['CesNonAdvlAmt']),
'StateCesAmt': self._l10n_in_round_value(discount_line_vals['StateCesAmt'] + other_line_vals['StateCesAmt']),
'StateCesNonAdvlAmt': self._l10n_in_round_value(discount_line_vals['StateCesNonAdvlAmt'] + other_line_vals['StateCesNonAdvlAmt']),
'OthChrg': self._l10n_in_round_value(discount_line_vals['OthChrg'] + other_line_vals['OthChrg']),
'TotItemVal': self._l10n_in_round_value(discount_line_vals['TotItemVal'] + other_line_vals['TotItemVal']),
})
other_line_vals.update({
'Discount': self._l10n_in_round_value(other_line_vals['Discount'] + discount_to_allow),
'AssAmt': 0.00,
'IgstAmt': 0.00,
'CgstAmt': 0.00,
'SgstAmt': 0.00,
'CesAmt': 0.00,
'CesNonAdvlAmt': 0.00,
'StateCesAmt': 0.00,
'StateCesNonAdvlAmt': 0.00,
'OthChrg': 0.00,
'TotItemVal': 0.00,
})
return False
other_line_vals.update({
'Discount': self._l10n_in_round_value(other_line_vals['Discount'] + discount),
'AssAmt': self._l10n_in_round_value(other_line_vals['AssAmt'] + discount_line_vals['AssAmt']),
'IgstAmt': self._l10n_in_round_value(other_line_vals['IgstAmt'] + discount_line_vals['IgstAmt']),
'CgstAmt': self._l10n_in_round_value(other_line_vals['CgstAmt'] + discount_line_vals['CgstAmt']),
'SgstAmt': self._l10n_in_round_value(other_line_vals['SgstAmt'] + discount_line_vals['SgstAmt']),
'CesAmt': self._l10n_in_round_value(other_line_vals['CesAmt'] + discount_line_vals['CesAmt']),
'CesNonAdvlAmt': self._l10n_in_round_value(other_line_vals['CesNonAdvlAmt'] + discount_line_vals['CesNonAdvlAmt']),
'StateCesAmt': self._l10n_in_round_value(other_line_vals['StateCesAmt'] + discount_line_vals['StateCesAmt']),
'StateCesNonAdvlAmt': self._l10n_in_round_value(other_line_vals['StateCesNonAdvlAmt'] + discount_line_vals['StateCesNonAdvlAmt']),
'OthChrg': self._l10n_in_round_value(other_line_vals['OthChrg'] + discount_line_vals['OthChrg']),
'TotItemVal': self._l10n_in_round_value(other_line_vals['TotItemVal'] + discount_line_vals['TotItemVal']),
})
return True
discount_lines = []
for discount_line in json_payload['ItemList'].copy(): #to be sure to not skip in the loop:
if discount_line['AssAmt'] < 0:
discount_lines.append(discount_line)
json_payload['ItemList'].remove(discount_line)
if not discount_lines:
return json_payload
lines_grouped_and_sorted = defaultdict(list)
for line in sorted(json_payload['ItemList'], key=lambda i: i['AssAmt'], reverse=True):
lines_grouped_and_sorted[discount_group_key(line)].append(line)
for discount_line in discount_lines:
apply_discount_on_lines = lines_grouped_and_sorted.get(discount_group_key(discount_line), [])
for apply_discount_on in apply_discount_on_lines:
if put_discount_on(discount_line, apply_discount_on):
break
return json_payload
def _l10n_in_edi_generate_invoice_json(self, invoice):
tax_details = self._l10n_in_prepare_edi_tax_details(invoice)
saler_buyer = self._get_l10n_in_edi_saler_buyer_party(invoice)
tax_details_by_code = self._get_l10n_in_tax_details_by_line_code(tax_details.get("tax_details", {}))
sign = invoice.is_inbound() and -1 or 1
is_intra_state = invoice.l10n_in_state_id == invoice.company_id.state_id
is_overseas = invoice.l10n_in_gst_treatment == "overseas"
lines = invoice.invoice_line_ids.filtered(lambda line: not (line.display_type or line.is_rounding_line))
invoice_line_tax_details = tax_details.get("invoice_line_tax_details")
json_payload = {
"Version": "1.1",
"TranDtls": {
"TaxSch": "GST",
"SupTyp": self._l10n_in_get_supply_type(invoice, tax_details_by_code),
"RegRev": tax_details_by_code.get("is_reverse_charge") and "Y" or "N",
"IgstOnIntra": is_intra_state and tax_details_by_code.get("igst") and "Y" or "N"},
"DocDtls": {
"Typ": invoice.move_type == "out_refund" and "CRN" or "INV",
"No": invoice.name,
"Dt": invoice.invoice_date.strftime("%d/%m/%Y")},
"SellerDtls": self._get_l10n_in_edi_partner_details(saler_buyer.get("seller_details")),
"BuyerDtls": self._get_l10n_in_edi_partner_details(
saler_buyer.get("buyer_details"), pos_state_id=invoice.l10n_in_state_id, is_overseas=is_overseas),
"ItemList": [
self._get_l10n_in_edi_line_details(index, line, invoice_line_tax_details.get(line, {}), sign)
for index, line in enumerate(lines, start=1)
],
"ValDtls": {
"AssVal": self._l10n_in_round_value(tax_details.get("base_amount") * sign),
"CgstVal": self._l10n_in_round_value(tax_details_by_code.get("cgst_amount", 0.00) * sign),
"SgstVal": self._l10n_in_round_value(tax_details_by_code.get("sgst_amount", 0.00) * sign),
"IgstVal": self._l10n_in_round_value(tax_details_by_code.get("igst_amount", 0.00) * sign),
"CesVal": self._l10n_in_round_value((
tax_details_by_code.get("cess_amount", 0.00)
+ tax_details_by_code.get("cess_non_advol_amount", 0.00)) * sign,
),
"StCesVal": self._l10n_in_round_value((
tax_details_by_code.get("state_cess_amount", 0.00)
+ tax_details_by_code.get("state_cess_non_advol_amount", 0.00)) * sign,
),
"RndOffAmt": self._l10n_in_round_value(
sum(line.balance for line in invoice.invoice_line_ids if line.is_rounding_line)),
"TotInvVal": self._l10n_in_round_value(
(tax_details.get("base_amount") + tax_details.get("tax_amount")) * sign),
},
}
if invoice.company_currency_id != invoice.currency_id:
json_payload["ValDtls"].update({
"TotInvValFc": self._l10n_in_round_value(
(tax_details.get("base_amount_currency") + tax_details.get("tax_amount_currency")) * sign)
})
if saler_buyer.get("seller_details") != saler_buyer.get("dispatch_details"):
json_payload.update({
"DispDtls": self._get_l10n_in_edi_partner_details(saler_buyer.get("dispatch_details"),
set_vat=False, set_phone_and_email=False)
})
if saler_buyer.get("buyer_details") != saler_buyer.get("ship_to_details"):
json_payload.update({
"ShipDtls": self._get_l10n_in_edi_partner_details(saler_buyer.get("ship_to_details"), is_overseas=is_overseas)
})
if is_overseas:
json_payload.update({
"ExpDtls": {
"RefClm": tax_details_by_code.get("igst") and "Y" or "N",
"ForCur": invoice.currency_id.name,
"CntCode": saler_buyer.get("buyer_details").country_id.code or "",
}
})
if invoice.l10n_in_shipping_bill_number:
json_payload["ExpDtls"].update({
"ShipBNo": invoice.l10n_in_shipping_bill_number,
})
if invoice.l10n_in_shipping_bill_date:
json_payload["ExpDtls"].update({
"ShipBDt": invoice.l10n_in_shipping_bill_date.strftime("%d/%m/%Y"),
})
if invoice.l10n_in_shipping_port_code_id:
json_payload["ExpDtls"].update({
"Port": invoice.l10n_in_shipping_port_code_id.code
})
if not invoice._l10n_in_edi_is_managing_invoice_negative_lines_allowed():
return json_payload
return self._l10n_in_edi_generate_invoice_json_managing_negative_lines(invoice, json_payload)
@api.model
def _l10n_in_prepare_edi_tax_details(self, move, in_foreign=False, filter_invl_to_apply=None):
def l10n_in_grouping_key_generator(tax_values):
base_line = tax_values["base_line_id"]
tax_line = tax_values["tax_line_id"]
line_code = "other"
tax_report_line_sc = self.env.ref("l10n_in.tax_report_line_state_cess", False)
if any(tag in tax_line.tax_tag_ids for tag in self.env.ref("l10n_in.tax_report_line_cess").sudo().tag_ids):
if tax_line.tax_line_id.amount_type != "percent":
line_code = "cess_non_advol"
else:
line_code = "cess"
elif tax_report_line_sc and any(tag in tax_line.tax_tag_ids for tag in tax_report_line_sc.sudo().tag_ids):
if tax_line.tax_line_id.amount_type != "percent":
line_code = "state_cess_non_advol"
else:
line_code = "state_cess"
else:
for gst in ["cgst", "sgst", "igst"]:
tag_ids = self.env.ref("l10n_in.tax_report_line_%s"%(gst)).sudo().tag_ids
if any(tag in tax_line.tax_tag_ids for tag in tag_ids):
line_code = gst
return {
"tax": tax_values["tax_id"],
"base_product_id": base_line.product_id,
"tax_product_id": tax_line.product_id,
"base_product_uom_id": base_line.product_uom_id,
"tax_product_uom_id": tax_line.product_uom_id,
"line_code": line_code,
}
def l10n_in_filter_to_apply(tax_values):
if tax_values["base_line_id"].is_rounding_line:
return False
return True
return move._prepare_edi_tax_details(
filter_to_apply=l10n_in_filter_to_apply,
grouping_key_generator=l10n_in_grouping_key_generator,
filter_invl_to_apply=filter_invl_to_apply,
)
@api.model
def _get_l10n_in_tax_details_by_line_code(self, tax_details):
l10n_in_tax_details = {}
for tax_detail in tax_details.values():
if tax_detail["tax"].l10n_in_reverse_charge:
l10n_in_tax_details.setdefault("is_reverse_charge", True)
l10n_in_tax_details.setdefault("%s_rate" % (tax_detail["line_code"]), tax_detail["tax"].amount)
l10n_in_tax_details.setdefault("%s_amount" % (tax_detail["line_code"]), 0.00)
l10n_in_tax_details.setdefault("%s_amount_currency" % (tax_detail["line_code"]), 0.00)
l10n_in_tax_details["%s_amount" % (tax_detail["line_code"])] += tax_detail["tax_amount"]
l10n_in_tax_details["%s_amount_currency" % (tax_detail["line_code"])] += tax_detail["tax_amount_currency"]
return l10n_in_tax_details
def _l10n_in_get_supply_type(self, move, tax_details_by_code):
supply_type = "B2B"
if move.l10n_in_gst_treatment in ("overseas", "special_economic_zone") and tax_details_by_code.get("igst_amount"):
supply_type = move.l10n_in_gst_treatment == "overseas" and "EXPWP" or "SEZWP"
elif move.l10n_in_gst_treatment in ("overseas", "special_economic_zone"):
supply_type = move.l10n_in_gst_treatment == "overseas" and "EXPWOP" or "SEZWOP"
elif move.l10n_in_gst_treatment == "deemed_export":
supply_type = "DEXP"
return supply_type
#================================ API methods ===========================
@api.model
def _l10n_in_edi_no_config_response(self):
return {'error': [{
'code': '0',
'message': _(
"Unable to send e-Invoice."
"Create an API user in NIC portal, and set it using the top menu: Configuration > Settings."
)}
]}
@api.model
def _l10n_in_edi_get_token(self, company):
sudo_company = company.sudo()
if sudo_company.l10n_in_edi_username and sudo_company._l10n_in_edi_token_is_valid():
return sudo_company.l10n_in_edi_token
elif sudo_company.l10n_in_edi_username and sudo_company.l10n_in_edi_password:
self._l10n_in_edi_authenticate(company)
return sudo_company.l10n_in_edi_token
return False
@api.model
def _l10n_in_edi_connect_to_server(self, company, url_path, params):
user_token = self.env["iap.account"].get("l10n_in_edi")
params.update({
"account_token": user_token.account_token,
"dbuuid": self.env["ir.config_parameter"].sudo().get_param("database.uuid"),
"username": company.sudo().l10n_in_edi_username,
"gstin": company.vat,
})
if company.sudo().l10n_in_edi_production_env:
default_endpoint = DEFAULT_IAP_ENDPOINT
else:
default_endpoint = DEFAULT_IAP_TEST_ENDPOINT
endpoint = self.env["ir.config_parameter"].sudo().get_param("l10n_in_edi.endpoint", default_endpoint)
url = "%s%s" % (endpoint, url_path)
try:
return jsonrpc(url, params=params, timeout=25)
except AccessError as e:
_logger.warning("Connection error: %s", e.args[0])
return {
"error": [{
"code": "404",
"message": _("Unable to connect to the online E-invoice service."
"The web service may be temporary down. Please try again in a moment.")
}]
}
@api.model
def _l10n_in_edi_authenticate(self, company):
params = {"password": company.sudo().l10n_in_edi_password}
response = self._l10n_in_edi_connect_to_server(company, url_path="/iap/l10n_in_edi/1/authenticate", params=params)
# validity data-time in Indian standard time(UTC+05:30) so remove that gap and store in odoo
if "data" in response:
tz = pytz.timezone("Asia/Kolkata")
local_time = tz.localize(fields.Datetime.to_datetime(response["data"]["TokenExpiry"]))
utc_time = local_time.astimezone(pytz.utc)
company.sudo().l10n_in_edi_token_validity = fields.Datetime.to_string(utc_time)
company.sudo().l10n_in_edi_token = response["data"]["AuthToken"]
return response
@api.model
def _l10n_in_edi_generate(self, company, json_payload):
token = self._l10n_in_edi_get_token(company)
if not token:
return self._l10n_in_edi_no_config_response()
params = {
"auth_token": token,
"json_payload": json_payload,
}
return self._l10n_in_edi_connect_to_server(company, url_path="/iap/l10n_in_edi/1/generate", params=params)
@api.model
def _l10n_in_edi_get_irn_by_details(self, company, json_payload):
token = self._l10n_in_edi_get_token(company)
if not token:
return self._l10n_in_edi_no_config_response()
params = {
"auth_token": token,
}
params.update(json_payload)
return self._l10n_in_edi_connect_to_server(
company,
url_path="/iap/l10n_in_edi/1/getirnbydocdetails",
params=params,
)
@api.model
def _l10n_in_edi_cancel(self, company, json_payload):
token = self._l10n_in_edi_get_token(company)
if not token:
return self._l10n_in_edi_no_config_response()
params = {
"auth_token": token,
"json_payload": json_payload,
}
return self._l10n_in_edi_connect_to_server(company, url_path="/iap/l10n_in_edi/1/cancel", params=params)
| 53.161891 | 37,107 |
1,000 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class ResCompany(models.Model):
_inherit = "res.company"
l10n_in_edi_username = fields.Char("E-invoice (IN) Username", groups="base.group_system")
l10n_in_edi_password = fields.Char("E-invoice (IN) Password", groups="base.group_system")
l10n_in_edi_token = fields.Char("E-invoice (IN) Token", groups="base.group_system")
l10n_in_edi_token_validity = fields.Datetime("E-invoice (IN) Valid Until", groups="base.group_system")
l10n_in_edi_production_env = fields.Boolean(
string="E-invoice (IN) Is production OSE environment",
help="Enable the use of production credentials",
groups="base.group_system",
)
def _l10n_in_edi_token_is_valid(self):
self.ensure_one()
if self.l10n_in_edi_token and self.l10n_in_edi_token_validity > fields.Datetime.now():
return True
return False
| 41.666667 | 1,000 |
982 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields, _
from odoo.exceptions import UserError
class ResConfigSettings(models.TransientModel):
_inherit = "res.config.settings"
l10n_in_edi_username = fields.Char("Indian EDI username", related="company_id.l10n_in_edi_username", readonly=False)
l10n_in_edi_password = fields.Char("Indian EDI password", related="company_id.l10n_in_edi_password", readonly=False)
l10n_in_edi_production_env = fields.Boolean(
string="Indian EDI Testing Environment",
related="company_id.l10n_in_edi_production_env",
readonly=False
)
def l10n_in_edi_test(self):
self.env["account.edi.format"]._l10n_in_edi_authenticate(self.company_id)
if not self.company_id.sudo()._l10n_in_edi_token_is_valid():
raise UserError(_("Incorrect username or password, or the GST number on company does not match."))
| 44.636364 | 982 |
1,664 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Event Exhibitors',
'category': 'Marketing/Events',
'sequence': 1004,
'version': '1.1',
'summary': 'Event: manage sponsors and exhibitors',
'website': 'https://www.odoo.com/app/events',
'description': "",
'depends': [
'website_event',
'website_jitsi',
],
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'data/event_sponsor_data.xml',
'report/website_event_exhibitor_reports.xml',
'report/website_event_exhibitor_templates.xml',
'views/event_templates_sponsor.xml',
'views/event_sponsor_views.xml',
'views/event_event_views.xml',
'views/event_exhibitor_templates_list.xml',
'views/event_exhibitor_templates_page.xml',
'views/event_type_views.xml',
'views/event_menus.xml',
],
'demo': [
'data/event_demo.xml',
'data/event_sponsor_demo.xml',
],
'application': False,
'installable': True,
'assets': {
'web.assets_frontend': [
'website_event_exhibitor/static/src/scss/event_templates_sponsor.scss',
'website_event_exhibitor/static/src/scss/event_exhibitor_templates.scss',
'website_event_exhibitor/static/src/js/event_exhibitor_connect.js',
'website_event_exhibitor/static/src/js/event_sponsor_search.js',
],
'web.report_assets_common': [
'/website_event_exhibitor/static/src/scss/event_full_page_ticket_report.scss',
],
},
'license': 'LGPL-3',
}
| 33.959184 | 1,664 |
3,947 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from unittest.mock import patch
from odoo.addons.website_event_exhibitor.tests.common import TestEventExhibitorCommon
from odoo.fields import Datetime as FieldsDatetime
from odoo.tests.common import users
class TestSponsorData(TestEventExhibitorCommon):
@classmethod
def setUpClass(cls):
super(TestSponsorData, cls).setUpClass()
cls.sponsor_0.write({
'hour_from': 8.0,
'hour_to': 18.0,
})
cls.wevent_exhib_dt = patch(
'odoo.addons.website_event_exhibitor.models.event_sponsor.fields.Datetime',
wraps=FieldsDatetime
)
cls.mock_wevent_exhib_dt = cls.wevent_exhib_dt.start()
cls.mock_wevent_exhib_dt.now.return_value = cls.reference_now
cls.addClassCleanup(cls.wevent_exhib_dt.stop)
@users('user_eventmanager')
def test_event_date_computation(self):
""" Test date computation. Pay attention that mocks returns UTC values, meaning
we have to take into account Europe/Brussels offset """
event = self.env['event.event'].browse(self.event_0.id)
sponsor = self.env['event.sponsor'].browse(self.sponsor_0.id)
event.invalidate_cache(fnames=['is_ongoing'])
self.assertTrue(sponsor.is_in_opening_hours)
self.assertTrue(event.is_ongoing)
# After hour_from (9 > 8)
self.mock_wevent_dt.now.return_value = datetime(2020, 7, 6, 7, 0, 0)
self.mock_wevent_exhib_dt.now.return_value = datetime(2020, 7, 6, 7, 0, 0)
event.invalidate_cache(fnames=['is_ongoing'])
sponsor.invalidate_cache(fnames=['is_in_opening_hours'])
self.assertTrue(sponsor.is_in_opening_hours)
self.assertTrue(event.is_ongoing)
# At hour_from (8 = 8)
self.mock_wevent_dt.now.return_value = datetime(2020, 7, 6, 6, 0, 0)
self.mock_wevent_exhib_dt.now.return_value = datetime(2020, 7, 6, 6, 0, 0)
event.invalidate_cache(fnames=['is_ongoing'])
sponsor.invalidate_cache(fnames=['is_in_opening_hours'])
self.assertTrue(sponsor.is_in_opening_hours)
self.assertTrue(event.is_ongoing)
# Started but not opened (7h59 < 8)
self.mock_wevent_dt.now.return_value = datetime(2020, 7, 6, 5, 59, 59)
self.mock_wevent_exhib_dt.now.return_value = datetime(2020, 7, 6, 5, 59, 59)
event.invalidate_cache(fnames=['is_ongoing'])
sponsor.invalidate_cache(fnames=['is_in_opening_hours'])
self.assertFalse(sponsor.is_in_opening_hours)
self.assertTrue(event.is_ongoing)
# Evening event is not in opening hours (20 > 18)
self.mock_wevent_dt.now.return_value = datetime(2020, 7, 6, 18, 0, 0)
self.mock_wevent_exhib_dt.now.return_value = datetime(2020, 7, 6, 18, 0, 0)
event.invalidate_cache(fnames=['is_ongoing'])
sponsor.invalidate_cache(fnames=['is_in_opening_hours'])
self.assertFalse(sponsor.is_in_opening_hours)
self.assertTrue(event.is_ongoing)
# First day begins later
self.mock_wevent_dt.now.return_value = datetime(2020, 7, 5, 6, 30, 0)
self.mock_wevent_exhib_dt.now.return_value = datetime(2020, 7, 5, 6, 30, 0)
event.invalidate_cache(fnames=['is_ongoing'])
sponsor.invalidate_cache(fnames=['is_in_opening_hours'])
self.assertFalse(sponsor.is_in_opening_hours)
self.assertFalse(event.is_ongoing)
# End day finished sooner
self.mock_wevent_dt.now.return_value = datetime(2020, 7, 7, 13, 0, 1)
self.mock_wevent_exhib_dt.now.return_value = datetime(2020, 7, 7, 13, 0, 1)
event.invalidate_cache(fnames=['is_ongoing'])
sponsor.invalidate_cache(fnames=['is_in_opening_hours'])
self.assertFalse(sponsor.is_in_opening_hours)
self.assertFalse(event.is_ongoing)
| 45.367816 | 3,947 |
962 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.website_event.tests.common import TestEventOnlineCommon
class TestEventExhibitorCommon(TestEventOnlineCommon):
@classmethod
def setUpClass(cls):
super(TestEventExhibitorCommon, cls).setUpClass()
# Sponsorship data
cls.sponsor_type_0 = cls.env['event.sponsor.type'].create({
'name': 'GigaTop',
'sequence': 1,
})
cls.sponsor_0_partner = cls.env['res.partner'].create({
'name': 'EventSponsor',
'country_id': cls.env.ref('base.be').id,
'email': '[email protected]',
'phone': '04856112233',
})
cls.sponsor_0 = cls.env['event.sponsor'].create({
'partner_id': cls.sponsor_0_partner.id,
'event_id': cls.event_0.id,
'sponsor_type_id': cls.sponsor_type_0.id,
})
| 33.172414 | 962 |
560 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class EventType(models.Model):
_inherit = "event.type"
exhibitor_menu = fields.Boolean(
string='Showcase Exhibitors', compute='_compute_exhibitor_menu',
readonly=False, store=True,
help='Display exhibitors on website')
@api.depends('website_menu')
def _compute_exhibitor_menu(self):
for event_type in self:
event_type.exhibitor_menu = event_type.website_menu
| 31.111111 | 560 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.