size
int64 0
304k
| ext
stringclasses 1
value | lang
stringclasses 1
value | branch
stringclasses 1
value | content
stringlengths 0
304k
| avg_line_length
float64 0
238
| max_line_length
int64 0
304k
|
---|---|---|---|---|---|---|
3,054 |
py
|
PYTHON
|
15.0
|
from odoo import fields, models, api, _
from odoo.exceptions import UserError
EXEMPTION_REASON_CODES = [
('VATEX-SA-29', 'VATEX-SA-29 Financial services mentioned in Article 29 of the VAT Regulations.'),
('VATEX-SA-29-7', 'VATEX-SA-29-7 Life insurance services mentioned in Article 29 of the VAT.'),
('VATEX-SA-30', 'VATEX-SA-30 Real estate transactions mentioned in Article 30 of the VAT Regulations.'),
('VATEX-SA-32', 'VATEX-SA-32 Export of goods.'),
('VATEX-SA-33', 'VATEX-SA-33 Export of Services.'),
('VATEX-SA-34-1', 'VATEX-SA-34-1 The international transport of Goods.'),
('VATEX-SA-34-2', 'VATEX-SA-34-1 The international transport of Passengers.'),
('VATEX-SA-34-3', 'VATEX-SA-34-3 Services directly connected and incidental to a Supply of international passenger transport.'),
('VATEX-SA-34-4', 'VATEX-SA-34-4 Supply of a qualifying means of transport.'),
('VATEX-SA-34-5', 'VATEX-SA-34-5 Any services relating to Goods or passenger transportation, as defined in article twenty five of these Regulations.'),
('VATEX-SA-35', 'VATEX-SA-35 Medicines and medical equipment.'),
('VATEX-SA-36', 'VATEX-SA-36 Qualifying metals.'),
('VATEX-SA-EDU', 'VATEX-SA-EDU Private education to citizen.'),
('VATEX-SA-HEA', 'VATEX-SA-HEA Private healthcare to citizen.')
]
class AccountTax(models.Model):
_inherit = 'account.tax'
l10n_sa_is_retention = fields.Boolean("Is Retention", default=False,
help="Determines whether or not a tax counts as a Withholding Tax")
l10n_sa_exemption_reason_code = fields.Selection(string="Exemption Reason Code",
selection=EXEMPTION_REASON_CODES, help="Tax Exemption Reason Code (ZATCA)")
@api.onchange('amount')
def onchange_amount(self):
super().onchange_amount()
self.l10n_sa_is_retention = False
@api.constrains("l10n_sa_is_retention", "amount", "type_tax_use")
def _l10n_sa_constrain_is_retention(self):
for tax in self:
if tax.amount >= 0 and tax.l10n_sa_is_retention and tax.type_tax_use == 'sale':
raise UserError(_("Cannot set a tax to Retention if the amount is greater than or equal 0"))
class AccountTaxTemplate(models.Model):
_inherit = 'account.tax.template'
l10n_sa_is_retention = fields.Boolean("Is Retention", default=False,
help="Determines whether or not a tax counts as a Withholding Tax")
l10n_sa_exemption_reason_code = fields.Selection(string="Exemption Reason Code",
selection=EXEMPTION_REASON_CODES, help="Tax Exemption Reason Code (ZATCA)")
def _get_tax_vals(self, company, tax_template_to_tax):
# OVERRIDE
res = super()._get_tax_vals(company, tax_template_to_tax)
res['l10n_sa_is_retention'] = self.l10n_sa_is_retention
res['l10n_sa_exemption_reason_code'] = self.l10n_sa_exemption_reason_code
return res
| 52.655172 | 3,054 |
10,347 |
py
|
PYTHON
|
15.0
|
import uuid
import json
from markupsafe import Markup
from odoo import _, fields, models, api
from odoo.tools import float_repr
from datetime import datetime
from base64 import b64decode, b64encode
from lxml import etree
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
from cryptography.hazmat.backends import default_backend
from cryptography.x509 import load_der_x509_certificate
class AccountMove(models.Model):
_inherit = 'account.move'
l10n_sa_uuid = fields.Char(string='Document UUID (SA)', copy=False, help="Universally unique identifier of the Invoice")
l10n_sa_invoice_signature = fields.Char("Unsigned XML Signature", copy=False)
l10n_sa_chain_index = fields.Integer(
string="ZATCA chain index", copy=False, readonly=True,
help="Invoice index in chain, set if and only if an in-chain XML was submitted and did not error",
)
def _l10n_sa_is_simplified(self):
"""
Returns True if the customer is an individual, i.e: The invoice is B2C
:return:
"""
self.ensure_one()
return self.partner_id.company_type == 'person'
@api.depends('amount_total_signed', 'amount_tax_signed', 'l10n_sa_confirmation_datetime', 'company_id',
'company_id.vat', 'journal_id', 'journal_id.l10n_sa_production_csid_json',
'l10n_sa_invoice_signature', 'l10n_sa_chain_index')
def _compute_qr_code_str(self):
""" Override to update QR code generation in accordance with ZATCA Phase 2"""
for move in self:
move.l10n_sa_qr_code_str = ''
if move.country_code == 'SA' and move.move_type in ('out_invoice', 'out_refund') and move.l10n_sa_chain_index:
edi_format = self.env.ref('l10n_sa_edi.edi_sa_zatca')
zatca_document = move.edi_document_ids.filtered(lambda d: d.edi_format_id == edi_format)
if move._l10n_sa_is_simplified():
x509_cert = json.loads(move.journal_id.l10n_sa_production_csid_json)['binarySecurityToken']
xml_content = self.env.ref('l10n_sa_edi.edi_sa_zatca')._l10n_sa_generate_zatca_template(move)
qr_code_str = move._l10n_sa_get_qr_code(move.journal_id, xml_content, b64decode(x509_cert), move.l10n_sa_invoice_signature, move._l10n_sa_is_simplified())
move.l10n_sa_qr_code_str = b64encode(qr_code_str).decode()
elif zatca_document.state == 'sent' and zatca_document.attachment_id.datas:
document_xml = zatca_document.attachment_id.datas.decode()
root = etree.fromstring(b64decode(document_xml))
qr_node = root.xpath('//*[local-name()="ID"][text()="QR"]/following-sibling::*/*')[0]
move.l10n_sa_qr_code_str = qr_node.text
def _l10n_sa_get_qr_code_encoding(self, tag, field, int_length=1):
"""
Helper function to encode strings for the QR code generation according to ZATCA specs
"""
company_name_tag_encoding = tag.to_bytes(length=1, byteorder='big')
company_name_length_encoding = len(field).to_bytes(length=int_length, byteorder='big')
return company_name_tag_encoding + company_name_length_encoding + field
def _l10n_sa_check_refund_reason(self):
"""
Make sure credit/debit notes have a valid reason and reversal reference
"""
self.ensure_one()
return self.reversed_entry_id and self.ref
@api.model
def _l10n_sa_get_qr_code(self, journal_id, unsigned_xml, x509_cert, signature, is_b2c=False):
"""
Generate QR code string based on XML content of the Invoice UBL file, X509 Production Certificate
and company info.
:return b64 encoded QR code string
"""
def xpath_ns(expr):
return root.xpath(expr, namespaces=edi_format._l10n_sa_get_namespaces())[0].text.strip()
qr_code_str = ''
root = etree.fromstring(unsigned_xml)
edi_format = self.env['account.edi.xml.ubl_21.zatca']
# Indent XML content to avoid indentation mismatches
etree.indent(root, space=' ')
invoice_date = xpath_ns('//cbc:IssueDate')
invoice_time = xpath_ns('//cbc:IssueTime')
invoice_datetime = datetime.strptime(invoice_date + ' ' + invoice_time, '%Y-%m-%d %H:%M:%S')
if invoice_datetime and journal_id.company_id.vat and x509_cert:
prehash_content = etree.tostring(root)
invoice_hash = edi_format._l10n_sa_generate_invoice_xml_hash(prehash_content, 'digest')
amount_total = float(xpath_ns('//cbc:TaxInclusiveAmount'))
amount_tax = float(xpath_ns('//cac:TaxTotal/cbc:TaxAmount'))
x509_certificate = load_der_x509_certificate(b64decode(x509_cert), default_backend())
seller_name_enc = self._l10n_sa_get_qr_code_encoding(1, journal_id.company_id.display_name.encode())
seller_vat_enc = self._l10n_sa_get_qr_code_encoding(2, journal_id.company_id.vat.encode())
timestamp_enc = self._l10n_sa_get_qr_code_encoding(3,
invoice_datetime.strftime("%Y-%m-%dT%H:%M:%SZ").encode())
amount_total_enc = self._l10n_sa_get_qr_code_encoding(4, float_repr(abs(amount_total), 2).encode())
amount_tax_enc = self._l10n_sa_get_qr_code_encoding(5, float_repr(abs(amount_tax), 2).encode())
invoice_hash_enc = self._l10n_sa_get_qr_code_encoding(6, invoice_hash)
signature_enc = self._l10n_sa_get_qr_code_encoding(7, signature.encode())
public_key_enc = self._l10n_sa_get_qr_code_encoding(8,
x509_certificate.public_key().public_bytes(Encoding.DER,
PublicFormat.SubjectPublicKeyInfo))
qr_code_str = (seller_name_enc + seller_vat_enc + timestamp_enc + amount_total_enc +
amount_tax_enc + invoice_hash_enc + signature_enc + public_key_enc)
if is_b2c:
qr_code_str += self._l10n_sa_get_qr_code_encoding(9, x509_certificate.signature)
return qr_code_str
@api.depends('state', 'edi_document_ids.state')
def _compute_edi_show_cancel_button(self):
"""
Override to hide the EDI Cancellation button at all times for ZATCA Invoices
"""
super()._compute_edi_show_cancel_button()
for move in self.filtered(lambda m: m.is_invoice() and m.country_code == 'SA'):
move.edi_show_cancel_button = False
def _l10n_sa_generate_unsigned_data(self):
"""
Generate UUID and digital signature to be used during both Signing and QR code generation.
It is necessary to save the signature as it changes everytime it is generated and both the signing and the
QR code expect to have the same, identical signature.
"""
self.ensure_one()
edi_format = self.env.ref('l10n_sa_edi.edi_sa_zatca')
# Build the dict of values to be used for generating the Invoice XML content
# Set Invoice field values required for generating the XML content, hash and signature
self.l10n_sa_uuid = uuid.uuid4()
# We generate the XML content
xml_content = edi_format._l10n_sa_generate_zatca_template(self)
# Once the required values are generated, we hash the invoice, then use it to generate a Signature
invoice_hash_hex = self.env['account.edi.xml.ubl_21.zatca']._l10n_sa_generate_invoice_xml_hash(xml_content).decode()
self.l10n_sa_invoice_signature = edi_format._l10n_sa_get_digital_signature(self.journal_id.company_id,
invoice_hash_hex).decode()
return xml_content
def _l10n_sa_log_results(self, xml_content, response_data=None, error=False):
"""
Save submitted invoice XML hash in case of either Rejection or Acceptance.
"""
self.ensure_one()
self.journal_id.l10n_sa_latest_submission_hash = self.env['account.edi.xml.ubl_21.zatca']._l10n_sa_generate_invoice_xml_hash(
xml_content)
bootstrap_cls, title, content = ("success", _("Invoice Successfully Submitted to ZATCA"),
"" if (not error or not response_data) else response_data)
if error:
bootstrap_cls, title = ("danger", _("Invoice was rejected by ZATCA"))
content = Markup("""
<p class='mb-0'>
%s
</p>
<hr>
<p class='mb-0'>
%s
</p>
""") % (_('The invoice was rejected by ZATCA. Please, check the response below:'), response_data)
if response_data and response_data.get('validationResults', {}).get('warningMessages'):
bootstrap_cls, title = ("warning", _("Invoice was Accepted by ZATCA (with Warnings)"))
content = Markup("""
<p class='mb-0'>
%s
</p>
<hr>
<p class='mb-0'>
%s
</p>
""") % (_('The invoice was accepted by ZATCA, but returned warnings. Please, check the response below:'), "<br/>".join([Markup("<b>%s</b> : %s") % (m['code'], m['message']) for m in response_data['validationResults']['warningMessages']]))
self.message_post(body=Markup("""
<div role='alert' class='alert alert-%s'>
<h4 class='alert-heading'>%s</h4>%s
</div>
""") % (bootstrap_cls, title, content))
def _l10n_sa_is_in_chain(self):
"""
If the invoice was successfully posted and confirmed by the government, then this would return True.
If the invoice timed out, then its edi_document should still be in the 'to_send' state.
"""
zatca_doc_ids = self.edi_document_ids.filtered(lambda d: d.edi_format_id.code == 'sa_zatca')
return len(zatca_doc_ids) > 0 and not any(zatca_doc_ids.filtered(lambda d: d.state == 'to_send'))
| 53.890625 | 10,347 |
23,534 |
py
|
PYTHON
|
15.0
|
import json
from hashlib import sha256
from base64 import b64decode, b64encode
from lxml import etree
from datetime import date, datetime
from odoo import models, fields, _, api
from odoo.exceptions import UserError
from cryptography.hazmat.primitives.serialization import load_pem_private_key
from cryptography.hazmat.primitives.asymmetric.ec import ECDSA
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.backends import default_backend
from cryptography.x509 import load_der_x509_certificate
class AccountEdiFormat(models.Model):
_inherit = 'account.edi.format'
"""
Once the journal has been successfully onboarded, we can clear/report invoices through the ZATCA API:
A) STANDARD Invoice:
Make a call to the Clearance API '/invoices/clearance/single'.
This will validate the invoice, sign it and apply a QR code then return the result.
B) SIMPLIFIED Invoice:
Make a call to the Reporting API '/invoices/reporting/single'.
This will validate the invoice then return the result.
The X509 Certificate and password from the PCSID API need to be provided in the request headers.
"""
# ====== Helper Functions =======
def _l10n_sa_get_zatca_datetime(self, timestamp):
return fields.Datetime.context_timestamp(self.with_context(tz='Asia/Riyadh'), timestamp)
def _l10n_sa_xml_node_content(self, root, xpath, namespaces=None):
namespaces = namespaces or self.env['account.edi.xml.ubl_21.zatca']._l10n_sa_get_namespaces()
return etree.tostring(root.xpath(xpath, namespaces=namespaces)[0], with_tail=False,
encoding='utf-8', method='xml')
# ====== Xades Signing =======
@api.model
def _l10n_sa_get_digital_signature(self, company_id, invoice_hash):
"""
Generate an ECDSA SHA256 digital signature for the XML eInvoice
"""
decoded_hash = b64decode(invoice_hash).decode()
private_key = load_pem_private_key(company_id.sudo().l10n_sa_private_key, password=None, backend=default_backend())
signature = private_key.sign(decoded_hash.encode(), ECDSA(hashes.SHA256()))
return b64encode(signature)
def _l10n_sa_calculate_signed_properties_hash(self, issuer_name, serial_number, signing_time, public_key):
"""
Calculate the SHA256 value of the SignedProperties XML node. The algorithm used by ZATCA expects the indentation
of the nodes to start with 40 spaces, except for the root SignedProperties node.
"""
signed_properties = etree.fromstring(self.env.ref('l10n_sa_edi.export_sa_zatca_ubl_signed_properties')._render({
'issuer_name': issuer_name,
'serial_number': serial_number,
'signing_time': signing_time,
'public_key_hashing': public_key,
}))
etree.indent(signed_properties, space=' ')
signed_properties_split = etree.tostring(signed_properties).decode().split('\n')
signed_properties_final = ""
for index, line in enumerate(signed_properties_split):
if index == 0:
signed_properties_final += line
else:
signed_properties_final += (' ' * 36) + line
if index != len(signed_properties_final) - 1:
signed_properties_final += '\n'
signed_properties_final = etree.tostring(etree.fromstring(signed_properties_final))
return b64encode(sha256(signed_properties_final).hexdigest().encode()).decode()
def _l10n_sa_sign_xml(self, xml_content, certificate_str, signature):
"""
Function that signs XML content of a UBL document with a provided B64 encoded X509 certificate
"""
root = etree.fromstring(xml_content)
etree.indent(root, space=' ')
def _set_content(xpath, content):
node = root.xpath(xpath)[0]
node.text = content
b64_decoded_cert = b64decode(certificate_str)
x509_certificate = load_der_x509_certificate(b64decode(b64_decoded_cert.decode()), default_backend())
issuer_name = ', '.join([s.rfc4514_string() for s in x509_certificate.issuer.rdns[::-1]])
serial_number = str(x509_certificate.serial_number)
signing_time = self._l10n_sa_get_zatca_datetime(datetime.now()).strftime('%Y-%m-%dT%H:%M:%SZ')
public_key_hashing = b64encode(sha256(b64_decoded_cert).hexdigest().encode()).decode()
signed_properties_hash = self._l10n_sa_calculate_signed_properties_hash(issuer_name, serial_number,
signing_time, public_key_hashing)
_set_content("//*[local-name()='X509IssuerName']", issuer_name)
_set_content("//*[local-name()='X509SerialNumber']", serial_number)
_set_content("//*[local-name()='SignedSignatureProperties']/*[local-name()='SigningTime']", signing_time)
_set_content("//*[local-name()='SignedSignatureProperties']//*[local-name()='DigestValue']", public_key_hashing)
prehash_content = etree.tostring(root)
invoice_hash = self.env['account.edi.xml.ubl_21.zatca']._l10n_sa_generate_invoice_xml_hash(prehash_content,
'digest')
_set_content("//*[local-name()='SignatureValue']", signature)
_set_content("//*[local-name()='X509Certificate']", b64_decoded_cert.decode())
_set_content("//*[local-name()='SignatureInformation']//*[local-name()='DigestValue']", invoice_hash)
_set_content("//*[@URI='#xadesSignedProperties']/*[local-name()='DigestValue']", signed_properties_hash)
return etree.tostring(root, with_tail=False)
def _l10n_sa_assert_clearance_status(self, invoice, clearance_data):
"""
Assert Clearance status. To be overridden in case there are any other cases to be accounted for
"""
mode = 'reporting' if invoice._l10n_sa_is_simplified() else 'clearance'
if mode == 'clearance' and clearance_data.get('clearanceStatus', '') != 'CLEARED':
return {'error': _("Invoice could not be cleared: \r\n %s ") % clearance_data, 'blocking_level': 'error'}
elif mode == 'reporting' and clearance_data.get('reportingStatus', '') != 'REPORTED':
return {'error': _("Invoice could not be reported: \r\n %s ") % clearance_data, 'blocking_level': 'error'}
return clearance_data
# ====== UBL Document Rendering & Submission =======
def _l10n_sa_postprocess_zatca_template(self, xml_content):
"""
Post-process xml content generated according to the ZATCA UBL specifications. Specifically, this entails:
- Force the xmlns:ext namespace on the root element (Invoice). This is required, since, by default
the generated UBL file does not have any ext namespaced element, so the namespace is removed
since it is unused.
"""
# Append UBLExtensions to the XML content
ubl_extensions = etree.fromstring(self.env.ref('l10n_sa_edi.export_sa_zatca_ubl_extensions')._render())
root = etree.fromstring(xml_content)
root.insert(0, ubl_extensions)
# Force xmlns:ext namespace on UBl file
ns_map = {'ext': 'urn:oasis:names:specification:ubl:schema:xsd:CommonExtensionComponents-2'}
etree.cleanup_namespaces(root, top_nsmap=ns_map, keep_ns_prefixes=['ext'])
return etree.tostring(root, with_tail=False).decode()
def _l10n_sa_generate_zatca_template(self, invoice):
"""
Render the ZATCA UBL file
"""
xml_content, errors = self.env['account.edi.xml.ubl_21.zatca']._export_invoice(invoice)
if errors:
return {
'error': _("Could not generate Invoice UBL content: %s") % ", \n".join(errors),
'blocking_level': 'error'
}
return self._l10n_sa_postprocess_zatca_template(xml_content)
def _l10n_sa_submit_einvoice(self, invoice, signed_xml, PCSID_data):
"""
Submit a generated Invoice UBL file by making calls to the following APIs:
- A. Clearance API: Submit a standard Invoice to ZATCA for validation, returns signed UBL
- B. Reporting API: Submit a simplified Invoice to ZATCA for validation
"""
clearance_data = invoice.journal_id._l10n_sa_api_clearance(invoice, signed_xml.decode(), PCSID_data)
if clearance_data.get('json_errors'):
errors = [json.loads(j).get('validationResults', {}) for j in clearance_data['json_errors']]
error_msg = ''
is_warning = True
for error in errors:
validation_results = error.get('validationResults', {})
for err in validation_results.get('warningMessages', []):
error_msg += '\n - %s | %s' % (err['code'], err['message'])
for err in validation_results.get('errorMessages', []):
is_warning = False
error_msg += '\n - %s | %s' % (err['code'], err['message'])
return {
'error': error_msg,
'rejected': not is_warning,
'blocking_level': 'warning' if is_warning else 'error'
}
if not clearance_data.get('error'):
return self._l10n_sa_assert_clearance_status(invoice, clearance_data)
return clearance_data
def _l10n_sa_postprocess_einvoice_submission(self, invoice, signed_xml, clearance_data):
"""
Once an invoice has been successfully submitted, it is returned as a Cleared invoice, on which data
from ZATCA was applied. To be overridden to account for other cases, such as Reporting.
"""
if invoice._l10n_sa_is_simplified():
# if invoice is B2C, it is a SIMPLIFIED invoice, and thus it is only reported and returns
# no signed invoice. In this case, we just return the original content
return signed_xml.decode()
return b64decode(clearance_data['clearedInvoice']).decode()
def _l10n_sa_apply_qr_code(self, invoice, xml_content):
"""
Apply QR code on Invoice UBL content
"""
root = etree.fromstring(xml_content)
qr_code = invoice.l10n_sa_qr_code_str
qr_node = root.xpath('//*[local-name()="ID"][text()="QR"]/following-sibling::*/*')[0]
qr_node.text = qr_code
return etree.tostring(root, with_tail=False)
def _l10n_sa_get_signed_xml(self, invoice, unsigned_xml, x509_cert):
"""
Helper method to sign the provided XML, apply the QR code in the case if Simplified invoices (B2C), then
return the signed XML
"""
signed_xml = self._l10n_sa_sign_xml(unsigned_xml, x509_cert, invoice.l10n_sa_invoice_signature)
if invoice._l10n_sa_is_simplified():
return self._l10n_sa_apply_qr_code(invoice, signed_xml)
return signed_xml
def _l10n_sa_export_zatca_invoice(self, invoice, xml_content=None):
"""
Generate a ZATCA compliant UBL file, make API calls to authenticate, sign and include QR Code and
Cryptographic Stamp, then create an attachment with the final contents of the UBL file
"""
self.ensure_one()
# Prepare UBL invoice values and render XML file
unsigned_xml = xml_content or self._l10n_sa_generate_zatca_template(invoice)
# Load PCISD data and X509 certificate
try:
PCSID_data = invoice.journal_id._l10n_sa_api_get_pcsid()
except UserError as e:
return {'error': _("Could not generate PCSID values: \n") + e.args[0], 'blocking_level': 'error'}
x509_cert = PCSID_data['binarySecurityToken']
# Apply Signature/QR code on the generated XML document
try:
signed_xml = self._l10n_sa_get_signed_xml(invoice, unsigned_xml, x509_cert)
except UserError as e:
return {'error': _("Could not generate signed XML values: \n") + e.args[0], 'blocking_level': 'error'}
# Once the XML content has been generated and signed, we submit it to ZATCA
return self._l10n_sa_submit_einvoice(invoice, signed_xml, PCSID_data), signed_xml
def _l10n_sa_check_partner_missing_info(self, partner_id, fields_to_check):
"""
Helper function to check if ZATCA mandated partner fields are missing for a specified partner record
"""
missing = []
for field in fields_to_check:
field_value = partner_id[field[0]]
if not field_value or (len(field) == 3 and not field[2](partner_id, field_value)):
missing.append(field[1])
return missing
def _l10n_sa_check_seller_missing_info(self, invoice):
"""
Helper function to check if ZATCA mandated partner fields are missing for the seller
"""
partner_id = invoice.company_id.partner_id.commercial_partner_id
fields_to_check = [
('l10n_sa_edi_building_number', _('Building Number for the Buyer is required on Standard Invoices')),
('street2', _('Neighborhood for the Seller is required on Standard Invoices')),
('l10n_sa_additional_identification_scheme',
_('Additional Identification Scheme is required for the Seller, and must be one of CRN, MOM, MLS, SAG or OTH'),
lambda p, v: v in ('CRN', 'MOM', 'MLS', 'SAG', 'OTH')
),
('vat',
_('VAT is required when Identification Scheme is set to Tax Identification Number'),
lambda p, v: p.l10n_sa_additional_identification_scheme != 'TIN'
),
('state_id', _('State / Country subdivision'))
]
return self._l10n_sa_check_partner_missing_info(partner_id, fields_to_check)
def _l10n_sa_check_buyer_missing_info(self, invoice):
"""
Helper function to check if ZATCA mandated partner fields are missing for the buyer
"""
fields_to_check = []
if any(tax.l10n_sa_exemption_reason_code in ('VATEX-SA-HEA', 'VATEX-SA-EDU') for tax in
invoice.invoice_line_ids.filtered(
lambda line: not line.display_type).tax_ids):
fields_to_check += [
('l10n_sa_additional_identification_scheme',
_('Additional Identification Scheme is required for the Buyer if tax exemption reason is either '
'VATEX-SA-HEA or VATEX-SA-EDU, and its value must be NAT'), lambda p, v: v == 'NAT'),
('l10n_sa_additional_identification_number',
_('Additional Identification Number is required for commercial partners'),
lambda p, v: p.l10n_sa_additional_identification_scheme != 'TIN'
),
]
elif invoice.commercial_partner_id.l10n_sa_additional_identification_scheme == 'TIN':
fields_to_check += [
('vat', _('VAT is required when Identification Scheme is set to Tax Identification Number'))
]
if not invoice._l10n_sa_is_simplified() and invoice.partner_id.country_id.code == 'SA':
# If the invoice is a non-foreign, Standard (B2B), the Building Number and Neighborhood are required
fields_to_check += [
('l10n_sa_edi_building_number', _('Building Number for the Buyer is required on Standard Invoices')),
('street2', _('Neighborhood for the Buyer is required on Standard Invoices')),
]
return self._l10n_sa_check_partner_missing_info(invoice.commercial_partner_id, fields_to_check)
def _l10n_sa_post_zatca_edi(self, invoice): # no batch ensure that there is only one invoice
"""
Post invoice to ZATCA and return a dict of invoices and their success/attachment
"""
# Chain integrity check: chain head must have been REALLY posted, and did not time out
# When a submission times out, we reset the chain index of the invoice to False, so it has to be submitted again
# According to ZATCA, if we end up submitting the same invoice more than once, they will directly reach out
# to the taxpayer for clarifications
chain_head = invoice.journal_id._l10n_sa_get_last_posted_invoice()
if chain_head and chain_head != invoice and not chain_head._l10n_sa_is_in_chain():
return {
'error': f"ZATCA: Cannot post invoice while chain head ({chain_head.name}) has not been posted",
'blocking_level': 'error'
}
if not invoice.l10n_sa_chain_index:
# If the Invoice doesn't have a chain index, it means it either has not been submitted before,
# or it was submitted and rejected. Either way, we need to assign it a new Chain Index and regenerate
# the data that depends on it before submitting (UUID, XML content, signature)
invoice.l10n_sa_chain_index = invoice.journal_id._l10n_sa_edi_get_next_chain_index()
xml_content = invoice._l10n_sa_generate_unsigned_data()
# Generate Invoice name for attachment
attachment_name = self.env['account.edi.xml.ubl_21.zatca']._export_invoice_filename(invoice)
# Generate XML, sign it, then submit it to ZATCA
response_data, submitted_xml = self._l10n_sa_export_zatca_invoice(invoice, xml_content)
# Check for submission errors
if response_data.get('error'):
# If the request was rejected, we save the signed xml content as an attachment
if response_data.get('rejected'):
invoice._l10n_sa_log_results(submitted_xml, response_data, error=True)
# If the request returned an exception (Timeout, ValueError... etc.) it means we're not sure if the
# invoice was successfully cleared/reported, and thus we keep the Index Chain.
# Else, we recalculate the submission Index (ICV), UUID, XML content and Signature
if not response_data.get('excepted'):
invoice.l10n_sa_chain_index = False
return response_data
# Once submission is done with no errors, check submission status
cleared_xml = self._l10n_sa_postprocess_einvoice_submission(invoice, submitted_xml, response_data)
# Save the submitted/returned invoice XML content once the submission has been completed successfully
invoice._l10n_sa_log_results(cleared_xml.encode(), response_data)
return {
'success': True,
'attachment': self.env['ir.attachment'].create({
'name': attachment_name,
'raw': cleared_xml.encode(),
'res_model': 'account.move',
'res_id': invoice.id,
'mimetype': 'application/xml'
})
}
# ====== EDI Format Overrides =======
def _is_required_for_invoice(self, invoice):
"""
Override to add ZATCA edi checks on required invoices
"""
self.ensure_one()
if self.code != 'sa_zatca':
return super()._is_required_for_invoice(invoice)
return invoice.is_sale_document() and invoice.country_code == 'SA'
def _check_move_configuration(self, invoice):
"""
Override to add ZATCA compliance checks on the Invoice
"""
def _set_missing_partner_fields(missing_fields, name):
return _("- Please, set the following fields on the %s: %s") % (name, ', '.join(missing_fields))
journal = invoice.journal_id
company = invoice.company_id
errors = super()._check_move_configuration(invoice)
if self.code != 'sa_zatca' or company.country_id.code != 'SA':
return errors
if invoice.commercial_partner_id == invoice.company_id.partner_id.commercial_partner_id:
errors.append(_("- You cannot post invoices where the Seller is the Buyer"))
if not all(line.tax_ids for line in invoice.invoice_line_ids.filtered(lambda line: not line.display_type)):
errors.append(_("- Invoice lines should have at least one Tax applied."))
if not journal._l10n_sa_ready_to_submit_einvoices():
errors.append(
_("- Finish the Onboarding procees for journal %s by requesting the CSIDs and completing the checks.") % journal.name)
if not company._l10n_sa_check_organization_unit():
errors.append(
_("- The company VAT identification must contain 15 digits, with the first and last digits being '3' as per the BR-KSA-39 and BR-KSA-40 of ZATCA KSA business rule."))
if not company.sudo().l10n_sa_private_key:
errors.append(
_("- No Private Key was generated for company %s. A Private Key is mandatory in order to generate Certificate Signing Requests (CSR).") % company.name)
if not journal.l10n_sa_serial_number:
errors.append(
_("- No Serial Number was assigned for journal %s. A Serial Number is mandatory in order to generate Certificate Signing Requests (CSR).") % journal.name)
supplier_missing_info = self._l10n_sa_check_seller_missing_info(invoice)
customer_missing_info = self._l10n_sa_check_buyer_missing_info(invoice)
if supplier_missing_info:
errors.append(_set_missing_partner_fields(supplier_missing_info, _("Supplier")))
if customer_missing_info:
errors.append(_set_missing_partner_fields(customer_missing_info, _("Customer")))
if invoice.invoice_date > date.today():
errors.append(_("- Please, make sure the invoice date is set to either the same as or before Today."))
if invoice.move_type in ('in_refund', 'out_refund') and not invoice._l10n_sa_check_refund_reason():
errors.append(
_("- Please, make sure both the Reversed Entry and the Reversal Reason are specified when confirming a Credit/Debit note"))
return errors
def _needs_web_services(self):
"""
Override to add a check on edi document format code
"""
self.ensure_one()
return self.code == 'sa_zatca' or super()._needs_web_services()
def _is_compatible_with_journal(self, journal):
"""
Override to add a check on journal type & country code (SA)
"""
self.ensure_one()
if self.code != 'sa_zatca':
return super()._is_compatible_with_journal(journal)
return journal.type == 'sale' and journal.country_code == 'SA'
def _post_invoice_edi(self, invoices):
"""
Override to post ZATCA edi formats
"""
self.ensure_one()
invoice = invoices
if self.code != 'sa_zatca' or invoice.company_id.country_code != 'SA':
return super()._post_invoice_edi(invoices)
if not invoice.journal_id.l10n_sa_compliance_checks_passed:
return {invoice: {'error': _("ZATCA Compliance Checks need to be completed for the current journal "
"before invoices can be submitted to the Authority")}}
return {invoice: self._l10n_sa_post_zatca_edi(invoice)}
| 52.414254 | 23,534 |
36,898 |
py
|
PYTHON
|
15.0
|
import json
import requests
from markupsafe import Markup
from lxml import etree
from datetime import datetime
from base64 import b64encode, b64decode
from odoo import models, fields, service, _, api
from odoo.exceptions import UserError
from odoo.modules.module import get_module_resource
from requests.exceptions import HTTPError, RequestException
from cryptography import x509
from cryptography.x509 import ObjectIdentifier, load_der_x509_certificate
from cryptography.x509.oid import NameOID
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.serialization import Encoding, load_pem_private_key
from urllib.parse import urljoin
ZATCA_API_URLS = {
"sandbox": "https://gw-fatoora.zatca.gov.sa/e-invoicing/developer-portal/",
"preprod": "https://gw-fatoora.zatca.gov.sa/e-invoicing/simulation/",
"prod": "https://gw-fatoora.zatca.gov.sa/e-invoicing/core/",
"apis": {
"ccsid": "compliance",
"pcsid": "production/csids",
"compliance": "compliance/invoices",
"reporting": "invoices/reporting/single",
"clearance": "invoices/clearance/single",
}
}
CERT_TEMPLATE_NAME = {
'prod': b'\x0c\x12ZATCA-Code-Signing',
'sandbox': b'\x13\x15PREZATCA-Code-Signing',
'preprod': b'\x13\x15PREZATCA-Code-Signing',
}
# This SANDBOX_AUTH is only used for testing purposes, and is shared to all users of the sandbox environment
SANDBOX_AUTH = {
'binarySecurityToken': "TUlJRDFEQ0NBM21nQXdJQkFnSVRid0FBZTNVQVlWVTM0SS8rNVFBQkFBQjdkVEFLQmdncWhrak9QUVFEQWpCak1SVXdFd1lLQ1pJbWlaUHlMR1FCR1JZRmJHOWpZV3d4RXpBUkJnb0praWFKay9Jc1pBRVpGZ05uYjNZeEZ6QVZCZ29Ka2lhSmsvSXNaQUVaRmdkbGVIUm5ZWHAwTVJ3d0dnWURWUVFERXhOVVUxcEZTVTVXVDBsRFJTMVRkV0pEUVMweE1CNFhEVEl5TURZeE1qRTNOREExTWxvWERUSTBNRFl4TVRFM05EQTFNbG93U1RFTE1Ba0dBMVVFQmhNQ1UwRXhEakFNQmdOVkJBb1RCV0ZuYVd4bE1SWXdGQVlEVlFRTEV3MW9ZWGxoSUhsaFoyaHRiM1Z5TVJJd0VBWURWUVFERXdreE1qY3VNQzR3TGpFd1ZqQVFCZ2NxaGtqT1BRSUJCZ1VyZ1FRQUNnTkNBQVRUQUs5bHJUVmtvOXJrcTZaWWNjOUhEUlpQNGI5UzR6QTRLbTdZWEorc25UVmhMa3pVMEhzbVNYOVVuOGpEaFJUT0hES2FmdDhDL3V1VVk5MzR2dU1ObzRJQ0p6Q0NBaU13Z1lnR0ExVWRFUVNCZ0RCK3BId3dlakViTUJrR0ExVUVCQXdTTVMxb1lYbGhmREl0TWpNMGZETXRNVEV5TVI4d0hRWUtDWkltaVpQeUxHUUJBUXdQTXpBd01EYzFOVGc0TnpBd01EQXpNUTB3Q3dZRFZRUU1EQVF4TVRBd01SRXdEd1lEVlFRYURBaGFZWFJqWVNBeE1qRVlNQllHQTFVRUR3d1BSbTl2WkNCQ2RYTnphVzVsYzNNek1CMEdBMVVkRGdRV0JCU2dtSVdENmJQZmJiS2ttVHdPSlJYdkliSDlIakFmQmdOVkhTTUVHREFXZ0JSMllJejdCcUNzWjFjMW5jK2FyS2NybVRXMUx6Qk9CZ05WSFI4RVJ6QkZNRU9nUWFBL2hqMW9kSFJ3T2k4dmRITjBZM0pzTG5waGRHTmhMbWR2ZGk1ellTOURaWEowUlc1eWIyeHNMMVJUV2tWSlRsWlBTVU5GTFZOMVlrTkJMVEV1WTNKc01JR3RCZ2dyQmdFRkJRY0JBUVNCb0RDQm5UQnVCZ2dyQmdFRkJRY3dBWVppYUhSMGNEb3ZMM1J6ZEdOeWJDNTZZWFJqWVM1bmIzWXVjMkV2UTJWeWRFVnVjbTlzYkM5VVUxcEZhVzUyYjJsalpWTkRRVEV1WlhoMFoyRjZkQzVuYjNZdWJHOWpZV3hmVkZOYVJVbE9WazlKUTBVdFUzVmlRMEV0TVNneEtTNWpjblF3S3dZSUt3WUJCUVVITUFHR0gyaDBkSEE2THk5MGMzUmpjbXd1ZW1GMFkyRXVaMjkyTG5OaEwyOWpjM0F3RGdZRFZSMFBBUUgvQkFRREFnZUFNQjBHQTFVZEpRUVdNQlFHQ0NzR0FRVUZCd01DQmdnckJnRUZCUWNEQXpBbkJna3JCZ0VFQVlJM0ZRb0VHakFZTUFvR0NDc0dBUVVGQndNQ01Bb0dDQ3NHQVFVRkJ3TURNQW9HQ0NxR1NNNDlCQU1DQTBrQU1FWUNJUUNWd0RNY3E2UE8rTWNtc0JYVXovdjFHZGhHcDdycVNhMkF4VEtTdjgzOElBSWhBT0JOREJ0OSszRFNsaWpvVmZ4enJkRGg1MjhXQzM3c21FZG9HV1ZyU3BHMQ==",
'secret': "Xlj15LyMCgSC66ObnEO/qVPfhSbs3kDTjWnGheYhfSs="
}
class AccountJournal(models.Model):
_inherit = 'account.journal'
"""
In order to clear/report an invoice through the ZATCA API, we need to onboard each journal by following
three steps:
STEP 1:
Make a call to the Compliance CSID API '/compliance'.
This will return three things:
- X509 Compliance Cryptographic Stamp Identifier (CCSID/Certificate)
- Password (Secret)
- Compliance Request ID
STEP 2:
Make a call to the Compliance Checks API '/compliance/invoices', by passing the hashed xml content
of the files available in the tests/compliance folder. This will check if the provided
Standard/Simplified Invoices comply with UBL 2.1 standards in line with ZATCA specifications
STEP 3:
Make a call to the Production CSID API '/production/csids' including the Compliance Certificate,
Password and Request ID from STEP 1.
This will return three things:
- X509 Production Certificate
- Password (Secret)
- Production Request ID
"""
l10n_sa_csr = fields.Binary(attachment=True, copy=False, groups="base.group_system",
help="The Certificate Signing Request that is submitted to the Compliance API")
l10n_sa_csr_errors = fields.Html("Onboarding Errors", copy=False)
l10n_sa_compliance_csid_json = fields.Char("CCSID JSON", copy=False, groups="base.group_system",
help="Compliance CSID data received from the Compliance CSID API "
"in dumped json format")
l10n_sa_production_csid_json = fields.Char("PCSID JSON", copy=False, groups="base.group_system",
help="Production CSID data received from the Production CSID API "
"in dumped json format")
l10n_sa_production_csid_validity = fields.Datetime("PCSID Expiration", help="Production CSID expiration date",
compute="_l10n_sa_compute_production_csid_validity", store=True)
l10n_sa_compliance_checks_passed = fields.Boolean("Compliance Checks Done", default=False, copy=False,
help="Specifies if the Compliance Checks have been completed successfully")
l10n_sa_chain_sequence_id = fields.Many2one('ir.sequence', string='ZATCA account.move chain sequence',
readonly=True, copy=False)
l10n_sa_serial_number = fields.Char("Serial Number", copy=False,
help="The serial number of the Taxpayer solution unit. Provided by ZATCA")
l10n_sa_latest_submission_hash = fields.Char("Latest Submission Hash", copy=False,
help="Hash of the latest submitted invoice to be used as the Previous Invoice Hash (KSA-13)")
# ====== Utility Functions =======
def _l10n_sa_ready_to_submit_einvoices(self):
"""
Helper function to know if the required CSIDs have been obtained, and the compliance checks have been
completed
"""
self.ensure_one()
return self.sudo().l10n_sa_production_csid_json
# ====== CSR Generation =======
def _l10n_sa_csr_required_fields(self):
""" Return the list of fields required to generate a valid CSR as per ZATCA requirements """
return ['l10n_sa_private_key', 'vat', 'name', 'city', 'country_id', 'state_id']
def _l10n_sa_get_csr_str(self):
"""
Return s string representation of a ZATCA compliant CSR that will be sent to the Compliance API in order to get back
a signed X509 certificate
"""
self.ensure_one()
def _encode(s):
"""
Some of the information included in the CSR could be in arabic, and thus needs to be encoded in a
specific format in order to be compliant with the ZATCA CCSID/PCSID APIs
"""
return s.encode().decode('CP1252')
company_id = self.company_id
version_info = service.common.exp_version()
builder = x509.CertificateSigningRequestBuilder()
subject_names = (
# Country Name
(NameOID.COUNTRY_NAME, company_id.country_id.code),
# Organization Unit Name
(NameOID.ORGANIZATIONAL_UNIT_NAME, (company_id.vat or '')[:10]),
# Organization Name
(NameOID.ORGANIZATION_NAME, _encode(company_id.name)),
# Subject Common Name
(NameOID.COMMON_NAME, _encode(company_id.name)),
# Organization Identifier
(ObjectIdentifier('2.5.4.97'), company_id.vat),
# State/Province Name
(NameOID.STATE_OR_PROVINCE_NAME, _encode(company_id.state_id.name)),
# Locality Name
(NameOID.LOCALITY_NAME, _encode(company_id.city)),
)
# The CertificateSigningRequestBuilder instances are immutable, which is why everytime we modify one,
# we have to assign it back to itself to keep track of the changes
builder = builder.subject_name(x509.Name([
x509.NameAttribute(n[0], u'%s' % n[1]) for n in subject_names
]))
x509_alt_names_extension = x509.SubjectAlternativeName([
x509.DirectoryName(x509.Name([
# EGS Serial Number. Manufacturer or Solution Provider Name, Model or Version and Serial Number.
# To be written in the following format: "1-... |2-... |3-..."
x509.NameAttribute(ObjectIdentifier('2.5.4.4'), '1-Odoo|2-%s|3-%s' % (
version_info['server_version_info'][0], self.l10n_sa_serial_number)),
# Organisation Identifier (UID)
x509.NameAttribute(NameOID.USER_ID, company_id.vat),
# Invoice Type. 4-digit numerical input using 0 & 1
x509.NameAttribute(NameOID.TITLE, company_id._l10n_sa_get_csr_invoice_type()),
# Location
x509.NameAttribute(ObjectIdentifier('2.5.4.26'), _encode(company_id.street)),
# Industry
x509.NameAttribute(ObjectIdentifier('2.5.4.15'),
_encode(company_id.partner_id.industry_id.name or 'Other')),
]))
])
x509_extensions = (
# Add Certificate template name extension
(x509.UnrecognizedExtension(ObjectIdentifier('1.3.6.1.4.1.311.20.2'),
CERT_TEMPLATE_NAME[company_id.l10n_sa_api_mode]), False),
# Add alternative names extension
(x509_alt_names_extension, False),
)
for ext in x509_extensions:
builder = builder.add_extension(ext[0], critical=ext[1])
private_key = load_pem_private_key(company_id.l10n_sa_private_key, password=None, backend=default_backend())
request = builder.sign(private_key, hashes.SHA256(), default_backend())
return b64encode(request.public_bytes(Encoding.PEM)).decode()
def _l10n_sa_generate_csr(self):
"""
Generate a CSR for the Journal to be used for the Onboarding process and Invoice submissions
"""
self.ensure_one()
if any(not self.company_id[f] for f in self._l10n_sa_csr_required_fields()):
raise UserError(_("Please, make sure all the following fields have been correctly set on the Company: \n")
+ "\n".join(
" - %s" % self.company_id._fields[f].string for f in self._l10n_sa_csr_required_fields() if
not self.company_id[f]))
self._l10n_sa_reset_certificates()
self.l10n_sa_csr = self._l10n_sa_get_csr_str()
# ====== Certificate Methods =======
@api.depends('l10n_sa_production_csid_json')
def _l10n_sa_compute_production_csid_validity(self):
"""
Compute the expiration date of the Production certificate
"""
for journal in self:
journal.l10n_sa_production_csid_validity = False
if journal.l10n_sa_production_csid_json:
journal.l10n_sa_production_csid_validity = self._l10n_sa_get_pcsid_validity(
json.loads(journal.l10n_sa_production_csid_json))
def _l10n_sa_reset_certificates(self):
"""
Reset all certificate values, including CSR and compliance checks
"""
for journal in self.sudo():
journal.l10n_sa_csr = False
journal.l10n_sa_production_csid_json = False
journal.l10n_sa_compliance_csid_json = False
journal.l10n_sa_compliance_checks_passed = False
def _l10n_sa_api_onboard_journal(self, otp):
"""
Perform the onboarding for the journal. The onboarding consists of three steps:
1. Get the Compliance CSID
2. Perform the Compliance Checks
3. Get the Production CSID
"""
self.ensure_one()
try:
# If the company does not have a private key, we generate it.
# The private key is used to generate the CSR but also to sign the invoices
if not self.company_id.l10n_sa_private_key:
self.company_id.l10n_sa_private_key = self.company_id._l10n_sa_generate_private_key()
self._l10n_sa_generate_csr()
# STEP 1: The first step of the process is to get the CCSID
self._l10n_sa_get_compliance_CSID(otp)
# STEP 2: Once we have the CCSID, we preform the compliance checks
self._l10n_sa_run_compliance_checks()
# STEP 3: Once the compliance checks are completed, we request the PCSID
self._l10n_sa_get_production_CSID()
# Once all three steps are completed, we set the errors field to False
self.l10n_sa_csr_errors = False
except (RequestException, HTTPError, UserError) as e:
# In case of an exception returned from ZATCA (not timeout), we will need to regenerate the CSR
# As the same CSR cannot be used twice for the same CCSID request
self._l10n_sa_reset_certificates()
self.l10n_sa_csr_errors = e.args[0] or _("Journal could not be onboarded")
def _l10n_sa_get_compliance_CSID(self, otp):
"""
Request a Compliance Cryptographic Stamp Identifier (CCSID) from ZATCA
"""
CCSID_data = self._l10n_sa_api_get_compliance_CSID(otp)
if CCSID_data.get('error'):
raise UserError(_("Could not obtain Compliance CSID: %s") % CCSID_data['error'])
self.sudo().write({
'l10n_sa_compliance_csid_json': json.dumps(CCSID_data),
'l10n_sa_production_csid_json': False,
'l10n_sa_compliance_checks_passed': False,
})
def _l10n_sa_get_production_CSID(self, OTP=None):
"""
Request a Production Cryptographic Stamp Identifier (PCSID) from ZATCA
"""
self_sudo = self.sudo()
if not self_sudo.l10n_sa_compliance_csid_json:
raise UserError(_("Cannot request a Production CSID before requesting a CCSID first"))
elif not self_sudo.l10n_sa_compliance_checks_passed:
raise UserError(_("Cannot request a Production CSID before completing the Compliance Checks"))
renew = False
zatca_format = self.env.ref('l10n_sa_edi.edi_sa_zatca')
if self_sudo.l10n_sa_production_csid_json:
time_now = zatca_format._l10n_sa_get_zatca_datetime(datetime.now())
if zatca_format._l10n_sa_get_zatca_datetime(self_sudo.l10n_sa_production_csid_validity) < time_now:
renew = True
else:
raise UserError(_("The Production CSID is still valid. You can only renew it once it has expired."))
CCSID_data = json.loads(self_sudo.l10n_sa_compliance_csid_json)
PCSID_data = self_sudo._l10n_sa_request_production_csid(CCSID_data, renew, OTP)
if PCSID_data.get('error'):
raise UserError(_("Could not obtain Production CSID: %s") % PCSID_data['error'])
self_sudo.l10n_sa_production_csid_json = json.dumps(PCSID_data)
# ====== Compliance Checks =======
def _l10n_sa_get_compliance_files(self):
"""
Return the list of files to be used for the compliance checks.
"""
file_names, compliance_files = [
'standard/invoice.xml', 'standard/credit.xml', 'standard/debit.xml',
'simplified/invoice.xml', 'simplified/credit.xml', 'simplified/debit.xml',
], {}
for file in file_names:
fpath = get_module_resource('l10n_sa_edi', 'tests/compliance', file)
with open(fpath, 'rb') as ip:
compliance_files[file] = ip.read().decode()
return compliance_files
def _l10n_sa_run_compliance_checks(self):
"""
Run Compliance Checks once the CCSID has been obtained.
The goal of the Compliance Checks is to make sure our system is able to produce, sign and send Invoices
correctly. For this we use dummy invoice UBL files available under the tests/compliance folder:
Standard Invoice, Standard Credit Note, Standard Debit Note, Simplified Invoice, Simplified Credit Note,
Simplified Debit Note.
We read each one of these files separately, sign them, then process them through the Compliance Checks API.
"""
self.ensure_one()
self_sudo = self.sudo()
if self.country_code != 'SA':
raise UserError(_("Compliance checks can only be run for companies operating from KSA"))
if not self_sudo.l10n_sa_compliance_csid_json:
raise UserError(_("You need to request the CCSID first before you can proceed"))
CCSID_data = json.loads(self_sudo.l10n_sa_compliance_csid_json)
compliance_files = self._l10n_sa_get_compliance_files()
for fname, fval in compliance_files.items():
invoice_hash_hex = self.env['account.edi.xml.ubl_21.zatca']._l10n_sa_generate_invoice_xml_hash(
fval).decode()
digital_signature = self.env.ref('l10n_sa_edi.edi_sa_zatca')._l10n_sa_get_digital_signature(self.company_id, invoice_hash_hex).decode()
prepared_xml = self._l10n_sa_prepare_compliance_xml(fname, fval, CCSID_data['binarySecurityToken'],
digital_signature)
result = self._l10n_sa_api_compliance_checks(prepared_xml.decode(), CCSID_data)
if result.get('error'):
raise UserError(Markup("<p class='mb-0'>%s <b>%s</b></p>") % (_("Could not complete Compliance Checks for the following file:"), fname))
if result['validationResults']['status'] == 'WARNING':
warnings = "".join(Markup("<li><b>%s</b>: %s </li>") % (e['code'], e['message']) for e in result['validationResults']['warningMessages'])
self.l10n_sa_csr_errors = Markup("<br/><br/><ul class='pl-3'><b>%s</b>%s</ul>") % (_("Warnings:"), warnings)
elif result['validationResults']['status'] != 'PASS':
errors = "".join(Markup("<li><b>%s</b>: %s </li>") % (e['code'], e['message']) for e in result['validationResults']['errorMessages'])
raise UserError(Markup("<p class='mb-0'>%s <b>%s</b> %s</p>")
% (_("Could not complete Compliance Checks for the following file:"), fname, Markup("<br/><br/><ul class='pl-3'><b>%s</b>%s</ul>") % (_("Errors:"), errors)))
self.l10n_sa_compliance_checks_passed = True
def _l10n_sa_prepare_compliance_xml(self, xml_name, xml_raw, PCSID, signature):
"""
Prepare XML content to be used for Compliance checks
"""
xml_content = self._l10n_sa_prepare_invoice_xml(xml_raw)
signed_xml = self.env.ref('l10n_sa_edi.edi_sa_zatca')._l10n_sa_sign_xml(xml_content, PCSID, signature)
if xml_name.startswith('simplified'):
qr_code_str = self.env['account.move']._l10n_sa_get_qr_code(self, signed_xml, b64decode(PCSID).decode(),
signature, True)
root = etree.fromstring(signed_xml)
qr_node = root.xpath('//*[local-name()="ID"][text()="QR"]/following-sibling::*/*')[0]
qr_node.text = b64encode(qr_code_str).decode()
return etree.tostring(root, with_tail=False)
return signed_xml
def _l10n_sa_prepare_invoice_xml(self, xml_content):
"""
Prepare the XML content of the test invoices before running the compliance checks
"""
ubl_extensions = etree.fromstring(self.env.ref('l10n_sa_edi.export_sa_zatca_ubl_extensions')._render())
root = etree.fromstring(xml_content.encode())
root.insert(0, ubl_extensions)
ns_map = self.env['account.edi.xml.ubl_21.zatca']._l10n_sa_get_namespaces()
def _get_node(xpath_str):
return root.xpath(xpath_str, namespaces=ns_map)[0]
# Update the Company VAT number in the test invoice
vat_el = _get_node('//cbc:CompanyID')
vat_el.text = self.company_id.vat
# Update the Company Name in the test invoice
name_nodes = ['cac:PartyName/cbc:Name', 'cac:PartyLegalEntity/cbc:RegistrationName', 'cac:Contact/cbc:Name']
for node in name_nodes:
comp_name_el = _get_node('//cac:AccountingSupplierParty/cac:Party/' + node)
comp_name_el.text = self.company_id.display_name
return etree.tostring(root)
# ====== Index Chain & Previous Invoice Calculation =======
def _l10n_sa_edi_get_next_chain_index(self):
self.ensure_one()
if not self.l10n_sa_chain_sequence_id:
self.l10n_sa_chain_sequence_id = self.env['ir.sequence'].create({
'name': f'ZATCA account move sequence for Journal {self.name} (id: {self.id})',
'code': f'l10n_sa_edi.account.move.{self.id}',
'implementation': 'no_gap',
'company_id': self.company_id.id,
})
return self.l10n_sa_chain_sequence_id.next_by_id()
def _l10n_sa_get_last_posted_invoice(self):
"""
Returns the last invoice posted to this journal's chain.
That invoice may have been received by the govt or not (eg. in case of a timeout).
Only upon confirmed reception/refusal of that invoice can another one be posted.
"""
self.ensure_one()
return self.env['account.move'].search(
[
('journal_id', '=', self.id),
('l10n_sa_chain_index', '!=', 0)
],
limit=1, order='l10n_sa_chain_index desc'
)
# ====== API Calls to ZATCA =======
def _l10n_sa_api_get_compliance_CSID(self, otp):
"""
API call to the Compliance CSID API to generate a CCSID certificate, password and compliance request_id
Requires a CSR token and a One Time Password (OTP)
"""
self.ensure_one()
if not otp:
raise UserError(_("Please, set a valid OTP to be used for Onboarding"))
if not self.l10n_sa_csr:
raise UserError(_("Please, generate a CSR before requesting a CCSID"))
request_data = {
'body': json.dumps({'csr': self.l10n_sa_csr.decode()}),
'header': {'OTP': otp}
}
return self._l10n_sa_call_api(request_data, ZATCA_API_URLS['apis']['ccsid'], 'POST')
def _l10n_sa_api_get_production_CSID(self, CCSID_data):
"""
API call to the Production CSID API to generate a PCSID certificate, password and production request_id
Requires a requestID from the Compliance CSID API
"""
request_data = {
'body': json.dumps({'compliance_request_id': str(CCSID_data['requestID'])}),
'header': {'Authorization': self._l10n_sa_authorization_header(CCSID_data)}
}
return self._l10n_sa_call_api(request_data, ZATCA_API_URLS['apis']['pcsid'], 'POST')
def _l10n_sa_api_renew_production_CSID(self, PCSID_data, OTP):
"""
API call to the Production CSID API to renew a PCSID certificate, password and production request_id
Requires an expired Production CSIDPCSID_data
"""
self.ensure_one()
auth_data = PCSID_data
# For renewal, the sandbox API expects a specific Username/Password, which are set in the SANDBOX_AUTH dict
if self.company_id.l10n_sa_api_mode == 'sandbox':
auth_data = SANDBOX_AUTH
request_data = {
'body': json.dumps({'csr': self.l10n_sa_csr.decode()}),
'header': {
'OTP': OTP,
'Authorization': self._l10n_sa_authorization_header(auth_data)
}
}
return self._l10n_sa_call_api(request_data, ZATCA_API_URLS['apis']['pcsid'], 'PATCH')
def _l10n_sa_api_compliance_checks(self, xml_content, CCSID_data):
"""
API call to the COMPLIANCE endpoint to generate a security token used for subsequent API calls
Requires a CSR token and a One Time Password (OTP)
"""
invoice_tree = etree.fromstring(xml_content)
# Get the Invoice Hash from the XML document
invoice_hash_node = invoice_tree.xpath('//*[@Id="invoiceSignedData"]/*[local-name()="DigestValue"]')[0]
invoice_hash = invoice_hash_node.text
# Get the Invoice UUID from the XML document
invoice_uuid_node = invoice_tree.xpath('//*[local-name()="UUID"]')[0]
invoice_uuid = invoice_uuid_node.text
request_data = {
'body': json.dumps({
"invoiceHash": invoice_hash,
"uuid": invoice_uuid,
"invoice": b64encode(xml_content.encode()).decode()
}),
'header': {
'Authorization': self._l10n_sa_authorization_header(CCSID_data),
'Clearance-Status': '1'
}
}
return self._l10n_sa_call_api(request_data, ZATCA_API_URLS['apis']['compliance'], 'POST')
def _l10n_sa_get_api_clearance_url(self, invoice):
"""
Return the API to be used for clearance. To be overridden to account for other cases, such as reporting.
"""
return ZATCA_API_URLS['apis']['reporting' if invoice._l10n_sa_is_simplified() else 'clearance']
def _l10n_sa_api_clearance(self, invoice, xml_content, PCSID_data):
"""
API call to the CLEARANCE/REPORTING endpoint to sign an invoice
- If SIMPLIFIED invoice: Reporting
- If STANDARD invoice: Clearance
"""
invoice_tree = etree.fromstring(xml_content)
invoice_hash_node = invoice_tree.xpath('//*[@Id="invoiceSignedData"]/*[local-name()="DigestValue"]')[0]
invoice_hash = invoice_hash_node.text
request_data = {
'body': json.dumps({
"invoiceHash": invoice_hash,
"uuid": invoice.l10n_sa_uuid,
"invoice": b64encode(xml_content.encode()).decode()
}),
'header': {
'Authorization': self._l10n_sa_authorization_header(PCSID_data),
'Clearance-Status': '1'
}
}
url_string = self._l10n_sa_get_api_clearance_url(invoice)
return self._l10n_sa_call_api(request_data, url_string, 'POST')
# ====== Certificate Methods =======
def _l10n_sa_get_pcsid_validity(self, PCSID_data):
"""
Return PCSID expiry date
"""
b64_decoded_pcsid = b64decode(PCSID_data['binarySecurityToken'])
x509_certificate = load_der_x509_certificate(b64decode(b64_decoded_pcsid.decode()), default_backend())
return x509_certificate.not_valid_after
def _l10n_sa_request_production_csid(self, csid_data, renew=False, otp=None):
"""
Generate company Production CSID data
"""
self.ensure_one()
return (
self._l10n_sa_api_renew_production_CSID(csid_data, otp)
if renew
else self._l10n_sa_api_get_production_CSID(csid_data)
)
def _l10n_sa_api_get_pcsid(self):
"""
Get CSIDs required to perform ZATCA api calls, and regenerate them if they need to be regenerated.
"""
self.ensure_one()
if not self.l10n_sa_production_csid_json:
raise UserError(_("Please, make a request to obtain the Compliance CSID and Production CSID before sending "
"documents to ZATCA"))
pcsid_validity = self.env.ref('l10n_sa_edi.edi_sa_zatca')._l10n_sa_get_zatca_datetime(self.l10n_sa_production_csid_validity)
time_now = self.env.ref('l10n_sa_edi.edi_sa_zatca')._l10n_sa_get_zatca_datetime(datetime.now())
if pcsid_validity < time_now and self.company_id.l10n_sa_api_mode != 'sandbox':
raise UserError(_("Production certificate has expired, please renew the PCSID before proceeding"))
return json.loads(self.l10n_sa_production_csid_json)
# ====== API Helper Methods =======
def _l10n_sa_call_api(self, request_data, request_url, method):
"""
Helper function to make api calls to the ZATCA API Endpoint
"""
api_url = ZATCA_API_URLS[self.env.company.l10n_sa_api_mode]
request_url = urljoin(api_url, request_url)
try:
request_response = requests.request(method, request_url, data=request_data.get('body'),
headers={
**self._l10n_sa_api_headers(),
**request_data.get('header')
}, timeout=(30, 30))
request_response.raise_for_status()
except (ValueError, HTTPError) as ex:
# In the case of an explicit error from ZATCA, i.e we got a response but the code of the response is not 2xx
return {
'error': _("Server returned an unexpected error: ") + (request_response.text or str(ex)),
'blocking_level': 'error'
}
except RequestException as ex:
# Usually only happens if a Timeout occurs. In this case we're not sure if the invoice was accepted or
# rejected, or if it even made it to ZATCA
return {'error': str(ex), 'blocking_level': 'warning', 'excepted': True}
try:
response_data = request_response.json()
except json.decoder.JSONDecodeError:
return {
'error': _("JSON response from ZATCA could not be decoded"),
'blocking_level': 'error'
}
if not request_response.ok and (response_data.get('errors') or response_data.get('warnings')):
if isinstance(response_data, dict) and response_data.get('errors'):
return {
'error': _("Invoice submission to ZATCA returned errors"),
'json_errors': response_data['errors'],
'blocking_level': 'error',
}
return {
'error': request_response.reason,
'blocking_level': 'error'
}
return response_data
def _l10n_sa_api_headers(self):
"""
Return the base headers to be included in ZATCA API calls
"""
return {
'Content-Type': 'application/json',
'Accept-Language': 'en',
'Accept-Version': 'V2'
}
def _l10n_sa_authorization_header(self, CSID_data):
"""
Compute the Authorization header by combining the CSID and the Secret key, then encode to Base64
"""
auth_data = CSID_data
auth_str = "%s:%s" % (auth_data['binarySecurityToken'], auth_data['secret'])
return 'Basic ' + b64encode(auth_str.encode()).decode()
def _l10n_sa_load_edi_demo_data(self):
self.ensure_one()
self.company_id.l10n_sa_private_key = self.company_id._l10n_sa_generate_private_key()
self.write({
'l10n_sa_serial_number': 'SIDI3-CBMPR-L2D8X-KM0KN-X4ISJ',
'l10n_sa_compliance_checks_passed': True,
'l10n_sa_csr': b'LS0tLS1CRUdJTiBDRVJUSUZJQ0FURSBSRVFVRVNULS0tLS0KTUlJQ2NqQ0NBaGNDQVFBd2djRXhDekFKQmdOVkJBWVRBbE5CTVJNd0VRWURWUVFMREFvek1UQXhOelV6T1RjMApNUk13RVFZRFZRUUtEQXBUUVNCRGIyMXdZVzU1TVJNd0VRWURWUVFEREFwVFFTQkRiMjF3WVc1NU1SZ3dGZ1lEClZRUmhEQTh6TVRBeE56VXpPVGMwTURBd01ETXhEekFOQmdOVkJBZ01CbEpwZVdGa2FERklNRVlHQTFVRUJ3dy8KdzVqQ3A4T1o0b0NldzVuaWdLYkRtTUt2dzVuRm9NT1o0b0NndzVqQ3FTRERtTUtudzVuaWdKN0RtZUtBcHNPWgo0b0NndzVuTGhzT1l3ckhEbU1LcE1GWXdFQVlIS29aSXpqMENBUVlGSzRFRUFBb0RRZ0FFN2ZpZWZWQ21HcTlzCmV0OVl4aWdQNzZWUmJxZlh0VWNtTk1VN3FkTlBiSm5NNGh5R1QwanpPcXUrSWNXWW5IelFJYmxJVmsydENPQnQKYjExanY4MGVwcUNCOVRDQjhnWUpLb1pJaHZjTkFRa09NWUhrTUlIaE1DUUdDU3NHQVFRQmdqY1VBZ1FYRXhWUQpVa1ZhUVZSRFFTMURiMlJsTFZOcFoyNXBibWN3Z2JnR0ExVWRFUVNCc0RDQnJhU0JxakNCcHpFME1ESUdBMVVFCkJBd3JNUzFQWkc5dmZESXRNVFY4TXkxVFNVUkpNeTFEUWsxUVVpMU1Na1E0V0MxTFRUQkxUaTFZTkVsVFNqRWYKTUIwR0NnbVNKb21UOGl4a0FRRU1Eek14TURFM05UTTVOelF3TURBd016RU5NQXNHQTFVRURBd0VNVEV3TURFdgpNQzBHQTFVRUdnd21RV3dnUVcxcGNpQk5iMmhoYlcxbFpDQkNhVzRnUVdKa2RXd2dRWHBwZWlCVGRISmxaWFF4CkRqQU1CZ05WQkE4TUJVOTBhR1Z5TUFvR0NDcUdTTTQ5QkFNQ0Ewa0FNRVlDSVFEb3VCeXhZRDRuQ2pUQ2V6TkYKczV6SmlVWW1QZVBRNnFWNDdZemRHeWRla1FJaEFPRjNVTWF4UFZuc29zOTRFMlNkT2JJcTVYYVAvKzlFYWs5TgozMUtWRUkvTQotLS0tLUVORCBDRVJUSUZJQ0FURSBSRVFVRVNULS0tLS0K',
'l10n_sa_compliance_csid_json': """{"requestID": 1234567890123, "dispositionMessage": "ISSUED", "binarySecurityToken": "TUlJQ2xUQ0NBanVnQXdJQkFnSUdBWWgydEhlOU1Bb0dDQ3FHU000OUJBTUNNQlV4RXpBUkJnTlZCQU1NQ21WSmJuWnZhV05wYm1jd0hoY05Nak13TmpBeE1URXlOVEV6V2hjTk1qZ3dOVE14TWpFd01EQXdXakNCd1RFTE1Ba0dBMVVFQmhNQ1UwRXhFekFSQmdOVkJBc01Dak14TURFM05UTTVOelF4RXpBUkJnTlZCQW9NQ2xOQklFTnZiWEJoYm5reEV6QVJCZ05WQkFNTUNsTkJJRU52YlhCaGJua3hHREFXQmdOVkJHRU1Eek14TURFM05UTTVOelF3TURBd016RVBNQTBHQTFVRUNBd0dVbWw1WVdSb01VZ3dSZ1lEVlFRSEREL0RtTUtudzVuaWdKN0RtZUtBcHNPWXdxL0RtY1dndzVuaWdLRERtTUtwSU1PWXdxZkRtZUtBbnNPWjRvQ213NW5pZ0tERG1jdUd3NWpDc2NPWXdxa3dWakFRQmdjcWhrak9QUUlCQmdVcmdRUUFDZ05DQUFUdCtKNTlVS1lhcjJ4NjMxakdLQS92cFZGdXA5ZTFSeVkweFR1cDAwOXNtY3ppSElaUFNQTTZxNzRoeFppY2ZOQWh1VWhXVGEwSTRHMXZYV08velI2bW80SE1NSUhKTUF3R0ExVWRFd0VCL3dRQ01BQXdnYmdHQTFVZEVRU0JzRENCcmFTQnFqQ0JwekUwTURJR0ExVUVCQXdyTVMxUFpHOXZmREl0TVRWOE15MVRTVVJKTXkxRFFrMVFVaTFNTWtRNFdDMUxUVEJMVGkxWU5FbFRTakVmTUIwR0NnbVNKb21UOGl4a0FRRU1Eek14TURFM05UTTVOelF3TURBd016RU5NQXNHQTFVRURBd0VNVEV3TURFdk1DMEdBMVVFR2d3bVFXd2dRVzFwY2lCTmIyaGhiVzFsWkNCQ2FXNGdRV0prZFd3Z1FYcHBlaUJUZEhKbFpYUXhEakFNQmdOVkJBOE1CVTkwYUdWeU1Bb0dDQ3FHU000OUJBTUNBMGdBTUVVQ0lRQ2FBNlNKMXBXWDQ4UUE1V1pZVEQ4VmJpODFwZExSY01iZm1NQStZMmNBWlFJZ0NqbXp6Uzh4TnNDWllvWTFoWGIrN3R2NUpKRDVWeUVMR3hER1lyRHFpa2c9", "secret": "dBwSQ1ykNStUO6XRQAQhuDAWAdg/GgNZYNmiwClAGcQ=", "errors": null}""",
'l10n_sa_production_csid_json': """{"requestID": 30368, "tokenType": "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509v3", "dispositionMessage": "ISSUED", "binarySecurityToken": "TUlJRDJ6Q0NBNENnQXdJQkFnSVRid0FBZHFEbUlocXNqcG01Q3dBQkFBQjJvREFLQmdncWhrak9QUVFEQWpCak1SVXdFd1lLQ1pJbWlaUHlMR1FCR1JZRmJHOWpZV3d4RXpBUkJnb0praWFKay9Jc1pBRVpGZ05uYjNZeEZ6QVZCZ29Ka2lhSmsvSXNaQUVaRmdkbGVIUm5ZWHAwTVJ3d0dnWURWUVFERXhOVVUxcEZTVTVXVDBsRFJTMVRkV0pEUVMweE1CNFhEVEl5TURNeU9ERTFORFl6TWxvWERUSXlNRE16TURFMU5EWXpNbG93VFRFTE1Ba0dBMVVFQmhNQ1UwRXhEakFNQmdOVkJBb1RCVXBoY21seU1Sb3dHQVlEVlFRTEV4RktaV1JrWVdnZ1FuSmhibU5vTVRJek5ERVNNQkFHQTFVRUF4TUpNVEkzTGpBdU1DNHhNRll3RUFZSEtvWkl6ajBDQVFZRks0RUVBQW9EUWdBRUQvd2IybGhCdkJJQzhDbm5adm91bzZPelJ5bXltVTlOV1JoSXlhTWhHUkVCQ0VaQjRFQVZyQnVWMnhYaXhZNHFCWWY5ZGRlcnprVzlEd2RvM0lsSGdxT0NBaW93Z2dJbU1JR0xCZ05WSFJFRWdZTXdnWUNrZmpCOE1Sd3dHZ1lEVlFRRURCTXlNakl5TWpNeU5EUTBNelF6YW1abU5ETXlNUjh3SFFZS0NaSW1pWlB5TEdRQkFRd1BNekV3TVRjMU16azNOREF3TURBek1RMHdDd1lEVlFRTURBUXhNREV4TVJFd0R3WURWUVFhREFoVFlXMXdiR1VnUlRFWk1CY0dBMVVFRHd3UVUyRnRjR3hsSUVKMWMzTnBibVZ6Y3pBZEJnTlZIUTRFRmdRVWhXY3NiYkpoakQ1WldPa3dCSUxDK3dOVmZLWXdId1lEVlIwakJCZ3dGb0FVZG1DTSt3YWdyR2RYTlozUG1xeW5LNWsxdFM4d1RnWURWUjBmQkVjd1JUQkRvRUdnUDRZOWFIUjBjRG92TDNSemRHTnliQzU2WVhSallTNW5iM1l1YzJFdlEyVnlkRVZ1Y205c2JDOVVVMXBGU1U1V1QwbERSUzFUZFdKRFFTMHhMbU55YkRDQnJRWUlLd1lCQlFVSEFRRUVnYUF3Z1owd2JnWUlLd1lCQlFVSE1BR0dZbWgwZEhBNkx5OTBjM1JqY213dWVtRjBZMkV1WjI5MkxuTmhMME5sY25SRmJuSnZiR3d2VkZOYVJXbHVkbTlwWTJWVFEwRXhMbVY0ZEdkaGVuUXVaMjkyTG14dlkyRnNYMVJUV2tWSlRsWlBTVU5GTFZOMVlrTkJMVEVvTVNrdVkzSjBNQ3NHQ0NzR0FRVUZCekFCaGg5b2RIUndPaTh2ZEhOMFkzSnNMbnBoZEdOaExtZHZkaTV6WVM5dlkzTndNQTRHQTFVZER3RUIvd1FFQXdJSGdEQWRCZ05WSFNVRUZqQVVCZ2dyQmdFRkJRY0RBZ1lJS3dZQkJRVUhBd013SndZSkt3WUJCQUdDTnhVS0JCb3dHREFLQmdnckJnRUZCUWNEQWpBS0JnZ3JCZ0VGQlFjREF6QUtCZ2dxaGtqT1BRUURBZ05KQURCR0FpRUF5Tmh5Y1EzYk5sTEZkT1BscVlUNlJWUVRXZ25LMUdoME5IZGNTWTRQZkMwQ0lRQ1NBdGhYdnY3dGV0VUw2OVdqcDhCeG5MTE13ZXJ4WmhCbmV3by9nRjNFSkE9PQ==", "secret": "f9YRhopN/G7x0TECOY6nKSCHLNYlb5riAHSFPICo4qw="}"""
})
| 59.13141 | 36,898 |
4,744 |
py
|
PYTHON
|
15.0
|
import re
from odoo import models, fields
from odoo.exceptions import UserError
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import ec
class ResCompany(models.Model):
_inherit = "res.company"
def _l10n_sa_generate_private_key(self):
"""
Compute a private key for each company that will be used to generate certificate signing requests (CSR)
in order to receive X509 certificates from the ZATCA APIs and sign EDI documents
- public_exponent=65537 is a default value that should be used most of the time, as per the documentation
of cryptography.
- key_size=2048 is considered a reasonable default key size, as per the documentation of cryptography.
See https://cryptography.io/en/latest/hazmat/primitives/asymmetric/ec/
"""
private_key = ec.generate_private_key(ec.SECP256K1, default_backend())
return private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption())
l10n_sa_private_key = fields.Binary("ZATCA Private key", attachment=False, groups="base.group_system", copy=False,
help="The private key used to generate the CSR and obtain certificates",)
l10n_sa_api_mode = fields.Selection(
[('sandbox', 'Sandbox'), ('preprod', 'Simulation (Pre-Production)'), ('prod', 'Production')],
help="Specifies which API the system should use", required=True,
default='sandbox', copy=False)
l10n_sa_edi_building_number = fields.Char(compute='_compute_address',
inverse='_l10n_sa_edi_inverse_building_number')
l10n_sa_edi_plot_identification = fields.Char(compute='_compute_address',
inverse='_l10n_sa_edi_inverse_plot_identification')
l10n_sa_additional_identification_scheme = fields.Selection(
related='partner_id.l10n_sa_additional_identification_scheme', readonly=False)
l10n_sa_additional_identification_number = fields.Char(
related='partner_id.l10n_sa_additional_identification_number', readonly=False)
def write(self, vals):
for company in self:
if 'l10n_sa_api_mode' in vals:
if company.l10n_sa_api_mode == 'prod' and vals['l10n_sa_api_mode'] != 'prod':
raise UserError("You cannot change the ZATCA Submission Mode once it has been set to Production")
journals = self.env['account.journal'].search([('company_id', '=', company.id)])
journals._l10n_sa_reset_certificates()
journals.l10n_sa_latest_submission_hash = False
return super().write(vals)
def _get_company_address_field_names(self):
""" Override to add ZATCA specific address fields """
return super()._get_company_address_field_names() + \
['l10n_sa_edi_building_number', 'l10n_sa_edi_plot_identification']
def _l10n_sa_edi_inverse_building_number(self):
for company in self:
company.partner_id.l10n_sa_edi_building_number = company.l10n_sa_edi_building_number
def _l10n_sa_edi_inverse_plot_identification(self):
for company in self:
company.partner_id.l10n_sa_edi_plot_identification = company.l10n_sa_edi_plot_identification
def _l10n_sa_get_csr_invoice_type(self):
"""
Return the Invoice Type flag used in the CSR. 4-digit numerical input using 0 & 1 mapped to “TSCZ” where:
- 0: False/Not supported, 1: True/Supported
- T: Tax Invoice (Standard), S: Simplified Invoice, C & Z will be used in the future and should
always be 0
For example: 1100 would mean the Solution will be generating Standard and Simplified invoices.
We can assume Odoo-powered EGS solutions will always generate both Standard & Simplified invoices
:return:
"""
return '1100'
def _l10n_sa_check_organization_unit(self):
"""
Check company Organization Unit according to ZATCA specifications
Standards:
BR-KSA-39
BR-KSA-40
See https://zatca.gov.sa/ar/RulesRegulations/Taxes/Documents/20210528_ZATCA_Electronic_Invoice_XML_Implementation_Standard_vShared.pdf
"""
self.ensure_one()
if not self.vat:
return False
return len(self.vat) == 15 and bool(re.match(r'^3\d{13}3$', self.vat))
| 50.967742 | 4,740 |
978 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class AccountEdiDocument(models.Model):
_inherit = 'account.edi.document'
def _prepare_jobs(self):
"""
Override to achieve the following:
If there is a job to process that may already be part of the chain (posted invoice that timed out),
Moves it at the beginning of the list.
"""
jobs = super()._prepare_jobs()
if len(jobs) > 1:
move_first_index = 0
for index, job in enumerate(jobs):
documents = job['documents']
if any(d.edi_format_id.code == 'sa_zatca' and d.state == 'to_send' and d.move_id.l10n_sa_chain_index for d in documents):
move_first_index = index
break
jobs = [jobs[move_first_index]] + jobs[:move_first_index] + jobs[move_first_index + 1:]
return jobs
| 34.928571 | 978 |
669 |
py
|
PYTHON
|
15.0
|
from odoo import models, fields, api, _
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
l10n_sa_api_mode = fields.Selection(related='company_id.l10n_sa_api_mode', readonly=False)
@api.depends('company_id')
def _compute_company_informations(self):
super()._compute_company_informations()
for record in self:
if self.company_id.country_code == 'SA':
record.company_informations += _('\nBuilding Number: %s, Plot Identification: %s \nNeighborhood: %s') % (self.company_id.l10n_sa_edi_building_number, self.company_id.l10n_sa_edi_plot_identification, self.company_id.street2)
| 47.785714 | 669 |
21,829 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from hashlib import sha256
from base64 import b64encode
from lxml import etree
from odoo import models, fields
from odoo.modules.module import get_module_resource
import re
TAX_EXEMPTION_CODES = ['VATEX-SA-29', 'VATEX-SA-29-7', 'VATEX-SA-30']
TAX_ZERO_RATE_CODES = ['VATEX-SA-32', 'VATEX-SA-33', 'VATEX-SA-34-1', 'VATEX-SA-34-2', 'VATEX-SA-34-3', 'VATEX-SA-34-4',
'VATEX-SA-34-5', 'VATEX-SA-35', 'VATEX-SA-36', 'VATEX-SA-EDU', 'VATEX-SA-HEA']
PAYMENT_MEANS_CODE = {
'bank': 42,
'card': 48,
'cash': 10,
'transfer': 30,
'unknown': 1
}
class AccountEdiXmlUBL21Zatca(models.AbstractModel):
_name = "account.edi.xml.ubl_21.zatca"
_inherit = 'account.edi.xml.ubl_21'
_description = "UBL 2.1 (ZATCA)"
def _l10n_sa_get_namespaces(self):
"""
Namespaces used in the final UBL declaration, required to canonalize the finalized XML document of the Invoice
"""
return {
'cac': 'urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2',
'cbc': 'urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2',
'ext': 'urn:oasis:names:specification:ubl:schema:xsd:CommonExtensionComponents-2',
'sig': 'urn:oasis:names:specification:ubl:schema:xsd:CommonSignatureComponents-2',
'sac': 'urn:oasis:names:specification:ubl:schema:xsd:SignatureAggregateComponents-2',
'sbc': 'urn:oasis:names:specification:ubl:schema:xsd:SignatureBasicComponents-2',
'ds': 'http://www.w3.org/2000/09/xmldsig#',
'xades': 'http://uri.etsi.org/01903/v1.3.2#'
}
def _l10n_sa_generate_invoice_xml_sha(self, xml_content):
"""
Transform, canonicalize then hash the invoice xml content using the SHA256 algorithm,
then return the hashed content
"""
def _canonicalize_xml(content):
"""
Canonicalize XML content using the c14n method. The specs mention using the c14n11 canonicalization,
which is simply calling etree.tostring and setting the method argument to 'c14n'. There are minor
differences between c14n11 and c14n canonicalization algorithms, but for the purpose of ZATCA signing,
c14n is enough
"""
return etree.tostring(content, method="c14n", exclusive=False, with_comments=False,
inclusive_ns_prefixes=self._l10n_sa_get_namespaces())
def _transform_and_canonicalize_xml(content):
""" Transform XML content to remove certain elements and signatures using an XSL template """
invoice_xsl = etree.parse(get_module_resource('l10n_sa_edi', 'data', 'pre-hash_invoice.xsl'))
transform = etree.XSLT(invoice_xsl)
return _canonicalize_xml(transform(content))
root = etree.fromstring(xml_content)
# Transform & canonicalize the XML content
transformed_xml = _transform_and_canonicalize_xml(root)
# Get the SHA256 hashed value of the XML content
return sha256(transformed_xml)
def _l10n_sa_generate_invoice_xml_hash(self, xml_content, mode='hexdigest'):
"""
Generate the b64 encoded sha256 hash of a given xml string:
- First: Transform the xml content using a pre-hash_invoice.xsl file
- Second: Canonicalize the transformed xml content using the c14n method
- Third: hash the canonicalized content using the sha256 algorithm then encode it into b64 format
"""
xml_sha = self._l10n_sa_generate_invoice_xml_sha(xml_content)
if mode == 'hexdigest':
xml_hash = xml_sha.hexdigest().encode()
elif mode == 'digest':
xml_hash = xml_sha.digest()
return b64encode(xml_hash)
def _l10n_sa_get_previous_invoice_hash(self, invoice):
""" Function that returns the Base 64 encoded SHA256 hash of the previously submitted invoice """
if invoice.company_id.l10n_sa_api_mode == 'sandbox' or not invoice.journal_id.l10n_sa_latest_submission_hash:
# If no invoice, or if using Sandbox, return the b64 encoded SHA256 value of the '0' character
return "NWZlY2ViNjZmZmM4NmYzOGQ5NTI3ODZjNmQ2OTZjNzljMmRiYzIzOWRkNGU5MWI0NjcyOWQ3M2EyN2ZiNTdlOQ=="
return invoice.journal_id.l10n_sa_latest_submission_hash
def _get_delivery_vals_list(self, invoice):
""" Override to include/update values specific to ZATCA's UBL 2.1 specs """
shipping_address = False
if 'partner_shipping_id' in invoice._fields and invoice.partner_shipping_id:
shipping_address = invoice.partner_shipping_id
return [{'actual_delivery_date': invoice.l10n_sa_delivery_date,
'delivery_address_vals': self._get_partner_address_vals(shipping_address) if shipping_address else {},}]
def _get_partner_party_identification_vals_list(self, partner):
""" Override to include/update values specific to ZATCA's UBL 2.1 specs """
return [{
'id_attrs': {'schemeID': partner.l10n_sa_additional_identification_scheme},
'id': partner.l10n_sa_additional_identification_number if partner.l10n_sa_additional_identification_scheme != 'TIN' else partner.vat
}]
def _l10n_sa_get_payment_means_code(self, invoice):
""" Return payment means code to be used to set the value on the XML file """
return 'unknown'
def _get_invoice_payment_means_vals_list(self, invoice):
""" Override to include/update values specific to ZATCA's UBL 2.1 specs """
res = super()._get_invoice_payment_means_vals_list(invoice)
res[0]['payment_means_code'] = PAYMENT_MEANS_CODE.get(self._l10n_sa_get_payment_means_code(invoice), PAYMENT_MEANS_CODE['unknown'])
res[0]['payment_means_code_attrs'] = {'listID': 'UN/ECE 4461'}
res[0]['adjustment_reason'] = invoice.ref
return res
def _get_partner_address_vals(self, partner):
""" Override to include/update values specific to ZATCA's UBL 2.1 specs """
return {
**super()._get_partner_address_vals(partner),
'building_number': partner.l10n_sa_edi_building_number,
'neighborhood': partner.street2,
'plot_identification': partner.l10n_sa_edi_plot_identification,
}
def _export_invoice_filename(self, invoice):
"""
Generate the name of the invoice XML file according to ZATCA business rules:
Seller Vat Number (BT-31), Date (BT-2), Time (KSA-25), Invoice Number (BT-1)
"""
vat = invoice.company_id.partner_id.commercial_partner_id.vat
invoice_number = re.sub("[^a-zA-Z0-9 -]", "-", invoice.name)
invoice_date = fields.Datetime.context_timestamp(self.with_context(tz='Asia/Riyadh'), invoice.l10n_sa_confirmation_datetime)
return '%s_%s_%s.xml' % (vat, invoice_date.strftime('%Y%m%dT%H%M%S'), invoice_number)
def _l10n_sa_get_invoice_transaction_code(self, invoice):
"""
Returns the transaction code string to be inserted in the UBL file follows the following format:
- NNPNESB, in compliance with KSA Business Rule KSA-2, where:
- NN (positions 1 and 2) = invoice subtype:
- 01 for tax invoice
- 02 for simplified tax invoice
- E (position 5) = Exports invoice transaction, 0 for false, 1 for true
"""
return '0%s00%s00' % (
'2' if invoice._l10n_sa_is_simplified() else '1',
'1' if invoice.commercial_partner_id.country_id != invoice.company_id.country_id and not invoice._l10n_sa_is_simplified() else '0'
)
def _l10n_sa_get_invoice_type(self, invoice):
"""
Returns the invoice type string to be inserted in the UBL file
- 383: Debit Note
- 381: Credit Note
- 388: Invoice
"""
return 383 if invoice.debit_origin_id else 381 if invoice.move_type == 'out_refund' else 388
def _l10n_sa_get_billing_reference_vals(self, invoice):
""" Get the billing reference vals required to render the BillingReference for credit/debit notes """
if self._l10n_sa_get_invoice_type(invoice) != 388:
return {
'id': (invoice.reversed_entry_id.name or invoice.ref) if invoice.move_type == 'out_refund' else invoice.debit_origin_id.name,
'issue_date': None,
}
return {}
def _get_partner_party_tax_scheme_vals_list(self, partner, role):
"""
Override to return an empty list if the partner is a customer and their country is not KSA.
This is according to KSA Business Rule BR-KSA-46 which states that in the case of Export Invoices,
the buyer VAT registration number or buyer group VAT registration number must not exist in the Invoice
"""
if role != 'customer' or partner.country_id.code == 'SA':
return super()._get_partner_party_tax_scheme_vals_list(partner, role)
return []
def _apply_invoice_tax_filter(self, tax_values):
""" Override to filter out withholding tax """
res = not tax_values['tax_id'].l10n_sa_is_retention
# If the move that is being sent is not a down payment invoice, and the sale module is installed
# we need to make sure the line is neither retention, nor a down payment line
if not tax_values['base_line_id'].move_id._is_downpayment():
return not tax_values['tax_id'].l10n_sa_is_retention and not tax_values['base_line_id']._get_downpayment_lines()
return res
def _apply_invoice_line_filter(self, invoice_line):
""" Override to filter out down payment lines """
if not invoice_line.move_id._is_downpayment():
return not invoice_line._get_downpayment_lines()
return True
def _l10n_sa_get_prepaid_amount(self, invoice, vals):
""" Calculate the down-payment amount according to ZATCA rules """
downpayment_lines = False if invoice._is_downpayment() else invoice.line_ids.filtered(lambda l: l._get_downpayment_lines())
if downpayment_lines:
tax_vals = invoice._prepare_edi_tax_details(filter_to_apply=lambda t: not t['tax_id'].l10n_sa_is_retention)
base_amount = abs(sum(tax_vals['invoice_line_tax_details'][l]['base_amount_currency'] for l in downpayment_lines))
tax_amount = abs(sum(tax_vals['invoice_line_tax_details'][l]['tax_amount_currency'] for l in downpayment_lines))
return {
'total_amount': base_amount + tax_amount,
'base_amount': base_amount,
'tax_amount': tax_amount
}
def _l10n_sa_get_monetary_vals(self, invoice, vals):
""" Calculate the invoice monteray amount values, including prepaid amounts (down payment) """
# We use base_amount_currency + tax_amount_currency instead of amount_total because we do not want to include
# withholding tax amounts in our calculations
total_amount = abs(vals['taxes_vals']['base_amount_currency'] + vals['taxes_vals']['tax_amount_currency'])
tax_inclusive_amount = total_amount
tax_exclusive_amount = abs(vals['taxes_vals']['base_amount_currency'])
prepaid_amount = 0
payable_amount = total_amount
# - When we calculate the tax values, we filter out taxes and invoice lines linked to downpayments.
# As such, when we calculate the TaxInclusiveAmount, it already accounts for the tax amount of the downpayment
# Same goes for the TaxExclusiveAmount, and we do not need to add the Tax amount of the downpayment
# - The payable amount does not account for the tax amount of the downpayment, so we add it
downpayment_vals = self._l10n_sa_get_prepaid_amount(invoice, vals)
if downpayment_vals:
# Makes no sense, but according to ZATCA, if there is a downpayment, the TotalInclusiveAmount
# should include the total amount of the invoice (including downpayment amount) PLUS the downpayment
# total amount, AGAIN.
prepaid_amount = tax_inclusive_amount + downpayment_vals['total_amount']
payable_amount = - downpayment_vals['total_amount']
return {
'tax_inclusive_amount': tax_inclusive_amount,
'tax_exclusive_amount': tax_exclusive_amount,
'prepaid_amount': prepaid_amount,
'payable_amount': payable_amount
}
def _get_tax_category_list(self, invoice, taxes):
""" Override to filter out withholding taxes """
non_retention_taxes = taxes.filtered(lambda t: not t.l10n_sa_is_retention)
return super()._get_tax_category_list(invoice, non_retention_taxes)
def _export_invoice_vals(self, invoice):
""" Override to include/update values specific to ZATCA's UBL 2.1 specs """
vals = super()._export_invoice_vals(invoice)
vals.update({
'main_template': 'account_edi_ubl_cii.ubl_20_Invoice',
'InvoiceType_template': 'l10n_sa_edi.ubl_21_InvoiceType_zatca',
'InvoiceLineType_template': 'l10n_sa_edi.ubl_21_InvoiceLineType_zatca',
'AddressType_template': 'l10n_sa_edi.ubl_21_AddressType_zatca',
'PartyType_template': 'l10n_sa_edi.ubl_21_PartyType_zatca',
'TaxTotalType_template': 'l10n_sa_edi.ubl_21_TaxTotalType_zatca',
'PaymentMeansType_template': 'l10n_sa_edi.ubl_21_PaymentMeansType_zatca',
})
vals['vals'].update({
'profile_id': 'reporting:1.0',
'invoice_type_code_attrs': {'name': self._l10n_sa_get_invoice_transaction_code(invoice)},
'invoice_type_code': self._l10n_sa_get_invoice_type(invoice),
'issue_date': fields.Datetime.context_timestamp(self.with_context(tz='Asia/Riyadh'),
invoice.l10n_sa_confirmation_datetime),
'previous_invoice_hash': self._l10n_sa_get_previous_invoice_hash(invoice),
'billing_reference_vals': self._l10n_sa_get_billing_reference_vals(invoice),
'tax_total_vals': self._l10n_sa_get_additional_tax_total_vals(invoice, vals),
# Due date is not required for ZATCA UBL 2.1
'due_date': None,
})
vals['vals']['legal_monetary_total_vals'].update(self._l10n_sa_get_monetary_vals(invoice, vals))
return vals
def _l10n_sa_get_additional_tax_total_vals(self, invoice, vals):
"""
For ZATCA, an additional TaxTotal element needs to be included in the UBL file
(Only for the Invoice, not the lines)
If the invoice is in a different currency from the one set on the company (SAR), then the additional
TaxAmount element needs to hold the tax amount converted to the company's currency.
Business Rules: BT-110 & BT-111
"""
curr_amount = abs(vals['taxes_vals']['tax_amount_currency'])
if invoice.currency_id != invoice.company_currency_id:
curr_amount = abs(vals['taxes_vals']['tax_amount'])
return vals['vals']['tax_total_vals'] + [{
'currency': invoice.company_currency_id,
'currency_dp': invoice.company_currency_id.decimal_places,
'tax_amount': curr_amount,
}]
def _get_invoice_line_item_vals(self, line, taxes_vals):
""" Override to include/update values specific to ZATCA's UBL 2.1 specs """
vals = super()._get_invoice_line_item_vals(line, taxes_vals)
vals['sellers_item_identification_vals'] = {'id': line.product_id.code or line.product_id.default_code}
return vals
def _l10n_sa_get_line_prepayment_vals(self, line, taxes_vals):
"""
If an invoice line is linked to a down payment invoice, we need to return the proper values
to be included in the UBL
"""
if not line.move_id._is_downpayment() and line.sale_line_ids and all(sale_line.is_downpayment for sale_line in line.sale_line_ids):
prepayment_move_id = line.sale_line_ids.invoice_lines.move_id.filtered(lambda m: m._is_downpayment())
return {
'prepayment_id': prepayment_move_id.name,
'issue_date': fields.Datetime.context_timestamp(self.with_context(tz='Asia/Riyadh'),
prepayment_move_id.l10n_sa_confirmation_datetime),
'document_type_code': 386
}
return {}
def _get_invoice_line_vals(self, line, taxes_vals):
""" Override to include/update values specific to ZATCA's UBL 2.1 specs """
def grouping_key_generator(tax_values):
tax = tax_values['tax_id']
tax_category_vals = self._get_tax_category_list(line.move_id, tax)[0]
return {
'tax_category_id': tax_category_vals['id'],
'tax_category_percent': tax_category_vals['percent'],
'_tax_category_vals_': tax_category_vals,
}
if not line.move_id._is_downpayment() and line._get_downpayment_lines():
# When we initially calculate the taxes_vals, we filter out the down payment lines, which means we have no
# values to set in the TaxableAmount and TaxAmount nodes on the InvoiceLine for the down payment.
# This means ZATCA will return a warning message for the BR-KSA-80 rule since it cannot calculate the
# TaxableAmount and the TaxAmount nodes correctly. To avoid this, we re-caclculate the taxes_vals just before
# we set the values for the down payment line, and we do not pass any filters to the _prepare_edi_tax_details
# method
line_taxes = line.move_id._prepare_edi_tax_details(grouping_key_generator=grouping_key_generator)
taxes_vals = line_taxes['invoice_line_tax_details'][line]
line_vals = super()._get_invoice_line_vals(line, taxes_vals)
total_amount_sa = abs(taxes_vals['tax_amount_currency'] + taxes_vals['base_amount_currency'])
extension_amount = abs(line_vals['line_extension_amount'])
if not line.move_id._is_downpayment() and line._get_downpayment_lines():
total_amount_sa = extension_amount = 0
line_vals['price_vals']['price_amount'] = 0
line_vals['tax_total_vals'][0]['tax_amount'] = 0
line_vals['prepayment_vals'] = self._l10n_sa_get_line_prepayment_vals(line, taxes_vals)
line_vals['tax_total_vals'][0]['total_amount_sa'] = total_amount_sa
line_vals['invoiced_quantity'] = abs(line_vals['invoiced_quantity'])
line_vals['line_extension_amount'] = extension_amount
return line_vals
def _get_invoice_tax_totals_vals_list(self, invoice, taxes_vals):
"""
Override to include/update values specific to ZATCA's UBL 2.1 specs.
In this case, we make sure the tax amounts are always absolute (no negative values)
"""
res = [{
'currency': invoice.currency_id,
'currency_dp': invoice.currency_id.decimal_places,
'tax_amount': abs(taxes_vals['tax_amount_currency']),
'tax_subtotal_vals': [{
'currency': invoice.currency_id,
'currency_dp': invoice.currency_id.decimal_places,
'taxable_amount': abs(vals['base_amount_currency']),
'tax_amount': abs(vals['tax_amount_currency']),
'percent': vals['_tax_category_vals_']['percent'],
'tax_category_vals': vals['_tax_category_vals_'],
} for vals in taxes_vals['tax_details'].values()],
}]
return res
def _get_tax_unece_codes(self, invoice, tax):
""" Override to include/update values specific to ZATCA's UBL 2.1 specs """
def _exemption_reason(code, reason):
return {
'tax_category_code': code,
'tax_exemption_reason_code': reason,
'tax_exemption_reason': exemption_codes[reason].split(reason)[1].lstrip(),
}
supplier = invoice.company_id.partner_id.commercial_partner_id
customer = invoice.commercial_partner_id
if supplier.country_id == customer.country_id and supplier.country_id.code == 'SA':
if not tax or tax.amount == 0:
exemption_codes = dict(tax._fields["l10n_sa_exemption_reason_code"]._description_selection(self.env))
if tax.l10n_sa_exemption_reason_code in TAX_EXEMPTION_CODES:
return _exemption_reason('E', tax.l10n_sa_exemption_reason_code)
elif tax.l10n_sa_exemption_reason_code in TAX_ZERO_RATE_CODES:
return _exemption_reason('Z', tax.l10n_sa_exemption_reason_code)
else:
return {
'tax_category_code': 'O',
'tax_exemption_reason_code': 'Not subject to VAT',
'tax_exemption_reason': 'Not subject to VAT',
}
else:
return {
'tax_category_code': 'S',
'tax_exemption_reason_code': None,
'tax_exemption_reason': None,
}
return super()._get_tax_unece_codes(invoice, tax)
def _get_invoice_payment_terms_vals_list(self, invoice):
""" Override to include/update values specific to ZATCA's UBL 2.1 specs """
return []
| 53.898765 | 21,829 |
1,584 |
py
|
PYTHON
|
15.0
|
from odoo import fields, models, api
class ResPartner(models.Model):
_inherit = 'res.partner'
l10n_sa_edi_building_number = fields.Char("Building Number")
l10n_sa_edi_plot_identification = fields.Char("Plot Identification")
l10n_sa_additional_identification_scheme = fields.Selection([
('TIN', 'Tax Identification Number'),
('CRN', 'Commercial Registration Number'),
('MOM', 'Momra License'),
('MLS', 'MLSD License'),
('700', '700 Number'),
('SAG', 'Sagia License'),
('NAT', 'National ID'),
('GCC', 'GCC ID'),
('IQA', 'Iqama Number'),
('PAS', 'Passport ID'),
('OTH', 'Other ID')
], default="OTH", string="Identification Scheme", help="Additional Identification scheme for Seller/Buyer")
l10n_sa_additional_identification_number = fields.Char("Identification Number (SA)",
help="Additional Identification Number for Seller/Buyer")
@api.model
def _commercial_fields(self):
return super()._commercial_fields() + ['l10n_sa_edi_building_number',
'l10n_sa_edi_plot_identification',
'l10n_sa_additional_identification_scheme',
'l10n_sa_additional_identification_number']
def _address_fields(self):
return super()._address_fields() + ['l10n_sa_edi_building_number',
'l10n_sa_edi_plot_identification']
| 44 | 1,584 |
1,643 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2015 WT-IO-IT GmbH (https://www.wt-io-it.at)
# Mag. Wolfgang Taferner <[email protected]>
# List of contributors:
# Mag. Wolfgang Taferner <[email protected]>
# Josse Colpaert <[email protected]>
{
"name": "Austria - Accounting",
"version": "3.0",
"author": "WT-IO-IT GmbH, Wolfgang Taferner",
"website": "https://www.wt-io-it.at",
'category': 'Accounting/Localizations/Account Charts',
'summary': "Austrian Standardized Charts & Tax",
"description": """
Austrian charts of accounts (Einheitskontenrahmen 2010).
==========================================================
* Defines the following chart of account templates:
* Austrian General Chart of accounts 2010
* Defines templates for VAT on sales and purchases
* Defines tax templates
* Defines fiscal positions for Austrian fiscal legislation
* Defines tax reports U1/U30
""",
"depends": [
"account",
"base_iban",
"base_vat",
],
"data": [
'data/res.country.state.csv',
'data/account_account_tag.xml',
'data/account_account_template.xml',
'data/account_chart_template.xml',
'data/account_tax_report_data.xml',
'data/account_tax_group_data.xml',
'data/account_tax_template.xml',
'data/account_fiscal_position_template.xml',
'data/account_chart_template_configure_data.xml',
],
'demo': [
'demo/demo_company.xml',
],
'license': 'LGPL-3',
}
| 32.215686 | 1,643 |
1,114 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Notes',
'version': '1.0',
'category': 'Productivity/Notes',
'description': "",
'website': 'https://www.odoo.com/app/notes',
'summary': 'Organize your work with memos',
'sequence': 260,
'depends': [
'mail',
],
'data': [
'security/note_security.xml',
'security/ir.model.access.csv',
'data/mail_activity_data.xml',
'data/note_data.xml',
'data/res_users_data.xml',
'views/note_views.xml',
],
'demo': [
'data/note_demo.xml',
],
'test': [
],
'installable': True,
'application': True,
'auto_install': False,
'assets': {
'web.assets_backend': [
'note/static/src/scss/note.scss',
'note/static/src/js/systray_activity_menu.js',
],
'web.qunit_suite_tests': [
'note/static/tests/**/*',
],
'web.assets_qweb': [
'note/static/src/xml/**/*',
],
},
'license': 'LGPL-3',
}
| 25.318182 | 1,114 |
626 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.base.tests.common import TransactionCaseWithUserDemo
class TestNote(TransactionCaseWithUserDemo):
def test_bug_lp_1156215(self):
""" ensure any users can create new users """
demo_user = self.user_demo
group_erp = self.env.ref('base.group_erp_manager')
demo_user.write({
'groups_id': [(4, group_erp.id)],
})
# must not fail
demo_user.create({
'name': 'test bug lp:1156215',
'login': 'lp_1156215',
})
| 28.454545 | 626 |
437 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields
class MailActivityType(models.Model):
_inherit = "mail.activity.type"
category = fields.Selection(selection_add=[('reminder', 'Reminder')])
class MailActivity(models.Model):
_inherit = "mail.activity"
note_id = fields.Many2one('note.note', string="Related Note", ondelete='cascade')
| 27.3125 | 437 |
2,824 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, models, modules, _
_logger = logging.getLogger(__name__)
class Users(models.Model):
_name = 'res.users'
_inherit = ['res.users']
@api.model_create_multi
def create(self, vals_list):
users = super().create(vals_list)
user_group_id = self.env['ir.model.data']._xmlid_to_res_id('base.group_user')
# for new employee, create his own 5 base note stages
users.filtered_domain([('groups_id', 'in', [user_group_id])])._create_note_stages()
return users
@api.model
def _init_data_user_note_stages(self):
emp_group_id = self.env.ref('base.group_user').id
query = """
SELECT res_users.id
FROM res_users
WHERE res_users.active IS TRUE AND EXISTS (
SELECT 1 FROM res_groups_users_rel WHERE res_groups_users_rel.gid = %s AND res_groups_users_rel.uid = res_users.id
) AND NOT EXISTS (
SELECT 1 FROM note_stage stage WHERE stage.user_id = res_users.id
)
GROUP BY id"""
self.env.cr.execute(query, (emp_group_id,))
uids = [res[0] for res in self.env.cr.fetchall()]
self.browse(uids)._create_note_stages()
def _create_note_stages(self):
for num in range(4):
stage = self.env.ref('note.note_stage_%02d' % (num,), raise_if_not_found=False)
if not stage:
break
for user in self:
stage.sudo().copy(default={'user_id': user.id})
else:
_logger.debug("Created note columns for %s", self)
@api.model
def systray_get_activities(self):
""" If user have not scheduled any note, it will not appear in activity menu.
Making note activity always visible with number of notes on label. If there is no notes,
activity menu not visible for note.
"""
activities = super(Users, self).systray_get_activities()
notes_count = self.env['note.note'].search_count([('user_id', '=', self.env.uid)])
if notes_count:
note_index = next((index for (index, a) in enumerate(activities) if a["model"] == "note.note"), None)
note_label = _('Notes')
if note_index is not None:
activities[note_index]['name'] = note_label
else:
activities.append({
'type': 'activity',
'name': note_label,
'model': 'note.note',
'icon': modules.module.get_module_icon(self.env['note.note']._original_module),
'total_count': 0,
'today_count': 0,
'overdue_count': 0,
'planned_count': 0
})
return activities
| 38.684932 | 2,824 |
6,455 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.tools import html2plaintext
from odoo.addons.web_editor.controllers.main import handle_history_divergence
class Stage(models.Model):
_name = "note.stage"
_description = "Note Stage"
_order = 'sequence'
name = fields.Char('Stage Name', translate=True, required=True)
sequence = fields.Integer(help="Used to order the note stages", default=1)
user_id = fields.Many2one('res.users', string='Owner', required=True, ondelete='cascade', default=lambda self: self.env.uid, help="Owner of the note stage")
fold = fields.Boolean('Folded by Default')
class Tag(models.Model):
_name = "note.tag"
_description = "Note Tag"
name = fields.Char('Tag Name', required=True, translate=True)
color = fields.Integer('Color Index')
_sql_constraints = [
('name_uniq', 'unique (name)', "Tag name already exists !"),
]
class Note(models.Model):
_name = 'note.note'
_inherit = ['mail.thread', 'mail.activity.mixin']
_description = "Note"
_order = 'sequence, id desc'
def _get_default_stage_id(self):
return self.env['note.stage'].search([('user_id', '=', self.env.uid)], limit=1)
name = fields.Text(compute='_compute_name', string='Note Summary', store=True)
user_id = fields.Many2one('res.users', string='Owner', default=lambda self: self.env.uid)
memo = fields.Html('Note Content')
sequence = fields.Integer('Sequence', default=0)
stage_id = fields.Many2one('note.stage', compute='_compute_stage_id',
inverse='_inverse_stage_id', string='Stage', default=_get_default_stage_id)
stage_ids = fields.Many2many('note.stage', 'note_stage_rel', 'note_id', 'stage_id',
string='Stages of Users', default=_get_default_stage_id)
open = fields.Boolean(string='Active', default=True)
date_done = fields.Date('Date done')
color = fields.Integer(string='Color Index')
tag_ids = fields.Many2many('note.tag', 'note_tags_rel', 'note_id', 'tag_id', string='Tags')
# modifying property of ``mail.thread`` field
message_partner_ids = fields.Many2many(compute_sudo=True)
@api.depends('memo')
def _compute_name(self):
""" Read the first line of the memo to determine the note name """
for note in self:
text = html2plaintext(note.memo) if note.memo else ''
note.name = text.strip().replace('*', '').split("\n")[0]
def _compute_stage_id(self):
first_user_stage = self.env['note.stage'].search([('user_id', '=', self.env.uid)], limit=1)
for note in self:
for stage in note.stage_ids.filtered(lambda stage: stage.user_id == self.env.user):
note.stage_id = stage
# note without user's stage
if not note.stage_id:
note.stage_id = first_user_stage
def _inverse_stage_id(self):
for note in self.filtered('stage_id'):
note.stage_ids = note.stage_id + note.stage_ids.filtered(lambda stage: stage.user_id != self.env.user)
@api.model
def name_create(self, name):
return self.create({'memo': name}).name_get()[0]
@api.model
def read_group(self, domain, fields, groupby, offset=0, limit=None, orderby=False, lazy=True):
if groupby and groupby[0] == "stage_id" and (len(groupby) == 1 or lazy):
stages = self.env['note.stage'].search([('user_id', '=', self.env.uid)])
if stages:
# if the user has some stages
result = []
for stage in stages:
# notes by stage for stages user
nb_stage_counts = self.search_count(domain + [('stage_ids', '=', stage.id)])
result.append({
'__context': {'group_by': groupby[1:]},
'__domain': domain + [('stage_ids.id', '=', stage.id)],
'stage_id': (stage.id, stage.name),
'stage_id_count': nb_stage_counts,
'__count': nb_stage_counts,
'__fold': stage.fold,
})
# note without user's stage
nb_notes_ws = self.search_count(domain + [('stage_ids', 'not in', stages.ids)])
if nb_notes_ws:
# add note to the first column if it's the first stage
dom_not_in = ('stage_ids', 'not in', stages.ids)
if result and result[0]['stage_id'][0] == stages[0].id:
dom_in = result[0]['__domain'].pop()
result[0]['__domain'] = domain + ['|', dom_in, dom_not_in]
result[0]['stage_id_count'] += nb_notes_ws
result[0]['__count'] += nb_notes_ws
else:
# add the first stage column
result = [{
'__context': {'group_by': groupby[1:]},
'__domain': domain + [dom_not_in],
'stage_id': (stages[0].id, stages[0].name),
'stage_id_count': nb_notes_ws,
'__count': nb_notes_ws,
'__fold': stages[0].name,
}] + result
else: # if stage_ids is empty, get note without user's stage
nb_notes_ws = self.search_count(domain)
if nb_notes_ws:
result = [{ # notes for unknown stage
'__context': {'group_by': groupby[1:]},
'__domain': domain,
'stage_id': False,
'stage_id_count': nb_notes_ws,
'__count': nb_notes_ws
}]
else:
result = []
return result
return super(Note, self).read_group(domain, fields, groupby, offset=offset, limit=limit, orderby=orderby, lazy=lazy)
def action_close(self):
return self.write({'open': False, 'date_done': fields.date.today()})
def action_open(self):
return self.write({'open': True})
def write(self, vals):
if len(self) == 1:
handle_history_divergence(self, 'memo', vals)
return super(Note, self).write(vals)
| 44.826389 | 6,455 |
802 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import http
from odoo.http import request
class NoteController(http.Controller):
@http.route('/note/new', type='json', auth='user')
def note_new_from_systray(self, note, activity_type_id=None, date_deadline=None):
""" Route to create note and their activity directly from the systray """
note = request.env['note.note'].create({'memo': note})
if date_deadline:
note.activity_schedule(
activity_type_id=activity_type_id or request.env['mail.activity.type'].sudo().search([('category', '=', 'reminder')], limit=1).id,
note=note.memo,
date_deadline=date_deadline
)
return note.id
| 40.1 | 802 |
999 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Event Booths',
'category': 'Marketing/Events',
'version': '1.0',
'summary': 'Events, display your booths on your website',
'description': """
Display your booths on your website for the users to register.
""",
'depends': ['website_event', 'event_booth'],
'data': [
'security/ir.model.access.csv',
'security/event_booth_security.xml',
'views/event_type_views.xml',
'views/event_event_views.xml',
'views/event_booth_registration_templates.xml',
'views/event_booth_templates.xml',
],
'demo': [
'data/event_demo.xml',
],
'auto_install': True,
'assets': {
'web.assets_frontend': [
'/website_event_booth/static/src/js/booth_register.js',
'/website_event_booth/static/src/scss/website_event_booth.scss',
]
},
'license': 'LGPL-3',
}
| 31.21875 | 999 |
496 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class EventType(models.Model):
_inherit = 'event.type'
booth_menu = fields.Boolean(
string='Booths on Website', compute='_compute_booth_menu',
readonly=False, store=True)
@api.depends('website_menu')
def _compute_booth_menu(self):
for event_type in self:
event_type.booth_menu = event_type.website_menu
| 29.176471 | 496 |
2,310 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.addons.http_routing.models.ir_http import slug
class Event(models.Model):
_inherit = 'event.event'
exhibition_map = fields.Image(string='Exhibition Map', max_width=1024, max_height=1024)
# frontend menu management
booth_menu = fields.Boolean(
string='Booth Register', compute='_compute_booth_menu',
readonly=False, store=True)
booth_menu_ids = fields.One2many(
'website.event.menu', 'event_id', string='Event Booths Menus',
domain=[('menu_type', '=', 'booth')])
@api.depends('event_type_id', 'website_menu')
def _compute_booth_menu(self):
for event in self:
if event.event_type_id and event.event_type_id != event._origin.event_type_id:
event.booth_menu = event.event_type_id.booth_menu
elif event.website_menu and (event.website_menu != event._origin.website_menu or not event.booth_menu):
event.booth_menu = True
elif not event.website_menu:
event.booth_menu = False
# ------------------------------------------------------------
# WEBSITE MENU MANAGEMENT
# ------------------------------------------------------------
def toggle_booth_menu(self, val):
self.booth_menu = val
def _get_menu_update_fields(self):
return super(Event, self)._get_menu_update_fields() + ['booth_menu']
def _update_website_menus(self, menus_update_by_field=None):
super(Event, self)._update_website_menus(menus_update_by_field=menus_update_by_field)
for event in self:
if event.menu_id and (not menus_update_by_field or event in menus_update_by_field.get('booth_menu')):
event._update_website_menu_entry('booth_menu', 'booth_menu_ids', 'booth')
def _get_menu_type_field_matching(self):
res = super(Event, self)._get_menu_type_field_matching()
res['booth'] = 'booth_menu'
return res
def _get_website_menu_entries(self):
self.ensure_one()
return super(Event, self)._get_website_menu_entries() + [
(_('Get A Booth'), '/event/%s/booth' % slug(self), False, 90, 'booth')
]
| 42 | 2,310 |
323 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class EventMenu(models.Model):
_inherit = "website.event.menu"
menu_type = fields.Selection(
selection_add=[('booth', 'Event Booth Menus')], ondelete={'booth': 'cascade'})
| 29.363636 | 323 |
7,626 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import json
import werkzeug
from werkzeug.exceptions import Forbidden, NotFound
from odoo import exceptions, http, _
from odoo.http import request
from odoo.addons.website_event.controllers.main import WebsiteEventController
class WebsiteEventBoothController(WebsiteEventController):
@http.route('/event/<model("event.event"):event>/booth', type='http', auth='public', website=True, sitemap=True)
def event_booth_main(self, event):
try:
event.check_access_rights('read')
event.check_access_rule('read')
except exceptions.AccessError:
raise Forbidden()
event_sudo = event.sudo()
values = {
'event': event_sudo,
'event_booths': event_sudo.event_booth_ids,
'available_booth_category_ids': event_sudo.event_booth_category_available_ids,
'main_object': event,
}
return request.render('website_event_booth.event_booth_registration', values)
@http.route('/event/<model("event.event"):event>/booth/register',
type='http', auth='public', methods=['POST'], website=True, sitemap=False)
def event_booth_register(self, event, booth_category_id):
event_booth_ids = request.httprequest.form.getlist('event_booth_ids')
return request.redirect(('/event/%s/booth/register_form?' % event.id) + werkzeug.urls.url_encode({
'booth_ids': ','.join(event_booth_ids),
'booth_category_id': int(booth_category_id),
}))
@http.route('/event/<model("event.event"):event>/booth/register_form',
type='http', auth='public', methods=['GET'], website=True, sitemap=False)
def event_booth_contact_form(self, event, booth_ids=None, booth_category_id=None):
if not booth_ids or not booth_category_id:
raise NotFound()
booth_category = request.env['event.booth.category'].sudo().browse(int(booth_category_id))
event_booths = request.env['event.booth'].sudo().browse([int(booth_id) for booth_id in booth_ids.split(',')])
default_contact = {}
if not request.env.user._is_public():
default_contact = {
'name': request.env.user.partner_id.name,
'email': request.env.user.partner_id.email,
'phone': request.env.user.partner_id.phone,
'mobile': request.env.user.partner_id.mobile,
}
else:
visitor = request.env['website.visitor']._get_visitor_from_request()
if visitor.email:
default_contact = {
'name': visitor.name,
'email': visitor.email,
'mobile': visitor.mobile,
}
return request.render(
'website_event_booth.event_booth_registration_details',
{'event': event.sudo(),
'default_contact': default_contact,
'booth_category': booth_category,
'event_booths': event_booths,
}
)
def _get_requested_booths(self, event, event_booth_ids):
booth_ids = json.loads(event_booth_ids)
booths = request.env['event.booth'].sudo().search([
('event_id', '=', event.id),
('state', '=', 'available'),
('id', 'in', booth_ids)
])
if booth_ids != booths.ids:
raise Forbidden(_('Booth registration failed. Please try again.'))
if len(booths.booth_category_id) != 1:
raise Forbidden(_('Booths should belong to the same category.'))
return booths
@http.route('/event/<model("event.event"):event>/booth/confirm',
type='http', auth='public', methods=['POST'], website=True, sitemap=False)
def event_booth_registration_confirm(self, event, booth_category_id, event_booth_ids, **kwargs):
booths = self._get_requested_booths(event, event_booth_ids)
booth_values = self._prepare_booth_registration_values(event, kwargs)
booths.action_confirm(booth_values)
return request.redirect(('/event/%s/booth/success?' % event.id) + werkzeug.urls.url_encode({
'booths': ','.join([str(id) for id in booths.ids]),
}))
# This will be removed soon
@http.route('/event/<model("event.event"):event>/booth/success',
type='http', auth='public', methods=['GET'], website=True, sitemap=False)
def event_booth_registration_complete(self, event, booths):
booth_ids = request.env['event.booth'].sudo().search([
('event_id', '=', event.id),
('state', '=', 'unavailable'),
('id', 'in', [int(id) for id in booths.split(',')]),
])
if len(booth_ids.mapped('partner_id')) > 1:
raise NotFound()
event_sudo = event.sudo()
return request.render(
'website_event_booth.event_booth_registration_complete',
{'event': event,
'event_booths': event_sudo.event_booth_ids,
'main_object': event,
'contact_name': booth_ids[0].contact_name or booth_ids.partner_id.name,
'contact_email': booth_ids[0].contact_email or booth_ids.partner_id.email,
'contact_mobile': booth_ids[0].contact_mobile or booth_ids.partner_id.mobile,
'contact_phone': booth_ids[0].contact_phone or booth_ids.partner_id.phone,
}
)
def _prepare_booth_registration_values(self, event, kwargs):
return self._prepare_booth_registration_partner_values(event, kwargs)
def _prepare_booth_registration_partner_values(self, event, kwargs):
if request.env.user._is_public():
contact_email = kwargs['contact_email']
partner = request.env['res.partner'].sudo().find_or_create(contact_email)
if not partner.name and kwargs.get('contact_name'):
partner.name = kwargs['contact_name']
if not partner.phone and kwargs.get('contact_phone'):
partner.phone = kwargs['contact_phone']
if not partner.mobile and kwargs.get('contact_mobile'):
partner.mobile = kwargs['contact_mobile']
else:
partner = request.env.user.partner_id
return {
'partner_id': partner.id,
'contact_name': kwargs.get('contact_name') or partner.name,
'contact_email': kwargs.get('contact_email') or partner.email,
'contact_mobile': kwargs.get('contact_mobile') or partner.mobile,
'contact_phone': kwargs.get('contact_phone') or partner.phone,
}
@http.route('/event/booth/check_availability', type='json', auth='public', methods=['POST'])
def check_booths_availability(self, event_booth_ids=None):
if not event_booth_ids:
return {}
booths = request.env['event.booth'].sudo().browse(event_booth_ids)
return {
'unavailable_booths': booths.filtered(lambda booth: not booth.is_available).ids
}
@http.route(['/event/booth_category/get_available_booths'], type='json', auth='public')
def get_booth_category_available_booths(self, event_id, booth_category_id):
booth_ids = request.env['event.booth'].sudo().search([
('event_id', '=', int(event_id)),
('booth_category_id', '=', int(booth_category_id)),
('state', '=', 'available')
])
return [
{'id': booth.id, 'name': booth.name}
for booth in booth_ids
]
| 45.664671 | 7,626 |
974 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'HR Org Chart',
'category': 'Hidden',
'version': '1.0',
'description':
"""
Org Chart Widget for HR
=======================
This module extend the employee form with a organizational chart.
(N+1, N+2, direct subordinates)
""",
'depends': ['hr'],
'auto_install': True,
'data': [
'views/hr_views.xml'
],
'assets': {
'web._assets_primary_variables': [
'hr_org_chart/static/src/scss/variables.scss',
],
'web.assets_backend': [
'hr_org_chart/static/src/scss/hr_org_chart.scss',
'hr_org_chart/static/src/js/hr_org_chart.js',
],
'web.qunit_suite_tests': [
'hr_org_chart/static/tests/**/*',
],
'web.assets_qweb': [
'hr_org_chart/static/src/xml/**/*',
],
},
'license': 'LGPL-3',
}
| 26.324324 | 974 |
1,277 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests import Form, tagged, TransactionCase
from odoo.exceptions import MissingError
@tagged('post_install', '-at_install')
class TestEmployeeDeletion(TransactionCase):
def test_employee_deletion(self):
# Tests an issue with the form view where the employee could be deleted
employee_a, employee_b = self.env['hr.employee'].create([
{
'name': 'A',
},
{
'name': 'B',
},
])
department_a, department_b = self.env['hr.department'].create([
{
'name': 'DEP A',
'manager_id': employee_a.id,
},
{
'name': 'DEP B',
'manager_id': employee_b.id,
},
])
employee_a.write({
'parent_id': employee_a.id,
'coach_id': employee_a.id,
'department_id': department_a.id,
})
try:
with Form(employee_a) as form:
form.department_id = department_b
except MissingError:
self.fail('The employee should not have been deleted')
| 32.74359 | 1,277 |
696 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class Employee(models.Model):
_inherit = ["hr.employee"]
subordinate_ids = fields.One2many('hr.employee', string='Subordinates', compute='_compute_subordinates', help="Direct and indirect subordinates",
compute_sudo=True)
class HrEmployeePublic(models.Model):
_inherit = ["hr.employee.public"]
subordinate_ids = fields.One2many('hr.employee.public', string='Subordinates', compute='_compute_subordinates', help="Direct and indirect subordinates",
compute_sudo=True)
| 38.666667 | 696 |
1,584 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class HrEmployeeBase(models.AbstractModel):
_inherit = "hr.employee.base"
child_all_count = fields.Integer(
'Indirect Subordinates Count',
compute='_compute_subordinates', recursive=True, store=False,
compute_sudo=True)
def _get_subordinates(self, parents=None):
"""
Helper function to compute subordinates_ids.
Get all subordinates (direct and indirect) of an employee.
An employee can be a manager of his own manager (recursive hierarchy; e.g. the CEO is manager of everyone but is also
member of the RD department, managed by the CTO itself managed by the CEO).
In that case, the manager in not counted as a subordinate if it's in the 'parents' set.
"""
if not parents:
parents = self.env[self._name]
indirect_subordinates = self.env[self._name]
parents |= self
direct_subordinates = self.child_ids - parents
for child in direct_subordinates:
child_subordinate = child._get_subordinates(parents=parents)
indirect_subordinates |= child_subordinate
return indirect_subordinates | direct_subordinates
@api.depends('child_ids', 'child_ids.child_all_count')
def _compute_subordinates(self):
for employee in self:
employee.subordinate_ids = employee._get_subordinates()
employee.child_all_count = len(employee.subordinate_ids)
| 40.615385 | 1,584 |
3,651 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import http
from odoo.exceptions import AccessError
from odoo.http import request
class HrOrgChartController(http.Controller):
_managers_level = 5 # FP request
def _check_employee(self, employee_id, **kw):
if not employee_id: # to check
return None
employee_id = int(employee_id)
if 'allowed_company_ids' in request.env.context:
cids = request.env.context['allowed_company_ids']
else:
cids = [request.env.company.id]
Employee = request.env['hr.employee.public'].with_context(allowed_company_ids=cids)
# check and raise
if not Employee.check_access_rights('read', raise_exception=False):
return None
try:
Employee.browse(employee_id).check_access_rule('read')
except AccessError:
return None
else:
return Employee.browse(employee_id)
def _prepare_employee_data(self, employee):
job = employee.sudo().job_id
return dict(
id=employee.id,
name=employee.name,
link='/mail/view?model=%s&res_id=%s' % ('hr.employee.public', employee.id,),
job_id=job.id,
job_name=job.name or '',
job_title=employee.job_title or '',
direct_sub_count=len(employee.child_ids - employee),
indirect_sub_count=employee.child_all_count,
)
@http.route('/hr/get_redirect_model', type='json', auth='user')
def get_redirect_model(self):
if request.env['hr.employee'].check_access_rights('read', raise_exception=False):
return 'hr.employee'
return 'hr.employee.public'
@http.route('/hr/get_org_chart', type='json', auth='user')
def get_org_chart(self, employee_id, **kw):
employee = self._check_employee(employee_id, **kw)
if not employee: # to check
return {
'managers': [],
'children': [],
}
# compute employee data for org chart
ancestors, current = request.env['hr.employee.public'].sudo(), employee.sudo()
while current.parent_id and len(ancestors) < self._managers_level+1 and current != current.parent_id:
ancestors += current.parent_id
current = current.parent_id
values = dict(
self=self._prepare_employee_data(employee),
managers=[
self._prepare_employee_data(ancestor)
for idx, ancestor in enumerate(ancestors)
if idx < self._managers_level
],
managers_more=len(ancestors) > self._managers_level,
children=[self._prepare_employee_data(child) for child in employee.child_ids if child != employee],
)
values['managers'].reverse()
return values
@http.route('/hr/get_subordinates', type='json', auth='user')
def get_subordinates(self, employee_id, subordinates_type=None, **kw):
"""
Get employee subordinates.
Possible values for 'subordinates_type':
- 'indirect'
- 'direct'
"""
employee = self._check_employee(employee_id, **kw)
if not employee: # to check
return {}
if subordinates_type == 'direct':
res = (employee.child_ids - employee).ids
elif subordinates_type == 'indirect':
res = (employee.subordinate_ids - employee.child_ids).ids
else:
res = employee.subordinate_ids.ids
return res
| 36.51 | 3,651 |
657 |
py
|
PYTHON
|
15.0
|
#-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) 2013-2015 Akretion (http://www.akretion.com)
{
'name': 'France - FEC Export',
'icon': '/l10n_fr/static/description/icon.png',
'category': 'Accounting/Localizations/Reporting',
'summary': "Fichier d'Échange Informatisé (FEC) for France",
'author': "Akretion,Odoo Community Association (OCA)",
'depends': ['l10n_fr', 'account'],
'data': [
'security/ir.model.access.csv',
'security/security.xml',
'wizard/account_fr_fec_view.xml',
],
'auto_install': True,
'license': 'LGPL-3',
}
| 32.75 | 655 |
3,862 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
from datetime import timedelta
from freezegun import freeze_time
from odoo.addons.account.tests.common import AccountTestInvoicingCommon
from odoo.tests import tagged
from odoo import fields, Command
@tagged('post_install_l10n', 'post_install', '-at_install')
class TestAccountFrFec(AccountTestInvoicingCommon):
@classmethod
def setUpClass(cls, chart_template_ref='l10n_fr.l10n_fr_pcg_chart_template'):
super().setUpClass(chart_template_ref=chart_template_ref)
company = cls.company_data['company']
company.vat = 'FR13542107651'
lines_data = [(1437.12, 'Hello\tDarkness'), (1676.64, 'my\rold\nfriend'), (3353.28, '\t\t\r')]
with freeze_time('2021-05-02'):
today = fields.Date.today().strftime('%Y-%m-%d')
cls.wizard = cls.env['account.fr.fec'].create({
'date_from': fields.Date.today() - timedelta(days=1),
'date_to': fields.Date.today(),
'export_type': 'official',
'test_file': True,
})
cls.tax_sale_a = cls.env['account.tax'].create({
'name': "TVA 20,0%",
'amount_type': 'percent',
'type_tax_use': 'sale',
'amount': 20,
'invoice_repartition_line_ids': [
Command.create({
'factor_percent': 100.0,
'repartition_type': 'base',
}),
Command.create({
'repartition_type': 'tax',
'factor_percent': 100.0,
'account_id': cls.env['account.account'].search([('code', '=', "445710")], limit=1).id,
})
]
})
cls.invoice_a = cls.env['account.move'].create({
'move_type': 'out_invoice',
'partner_id': cls.partner_a.id,
'date': today,
'invoice_date': today,
'currency_id': company.currency_id.id,
'invoice_line_ids': [(0, None, {
'name': name,
'product_id': cls.product_a.id,
'quantity': 1,
'tax_ids': [(6, 0, [cls.tax_sale_a.id])],
'price_unit': price_unit,
}) for price_unit, name in lines_data]
})
cls.invoice_a.action_post()
def test_generate_fec_sanitize_pieceref(self):
self.wizard.generate_fec()
expected_content = (
"JournalCode|JournalLib|EcritureNum|EcritureDate|CompteNum|CompteLib|CompAuxNum|CompAuxLib|PieceRef|PieceDate|EcritureLib|Debit|Credit|EcritureLet|DateLet|ValidDate|Montantdevise|Idevise\r\n"
"INV|Customer Invoices|INV/2021/00001|20210502|701100|Ventes de produits finis (ou groupe) A|||-|20210502|Hello Darkness|0,00| 000000000001437,12|||20210502|-000000000001437,12|EUR\r\n"
"INV|Customer Invoices|INV/2021/00001|20210502|701100|Ventes de produits finis (ou groupe) A|||-|20210502|my old friend|0,00| 000000000001676,64|||20210502|-000000000001676,64|EUR\r\n"
"INV|Customer Invoices|INV/2021/00001|20210502|701100|Ventes de produits finis (ou groupe) A|||-|20210502|/|0,00| 000000000003353,28|||20210502|-000000000003353,28|EUR\r\n"
"INV|Customer Invoices|INV/2021/00001|20210502|445710|TVA collectée|||-|20210502|TVA 20,0%|0,00| 000000000001293,41|||20210502|-000000000001293,41|EUR\r\n"
f"INV|Customer Invoices|INV/2021/00001|20210502|411100|Clients - Ventes de biens ou de prestations de services|{self.partner_a.id}|partner_a|-|20210502|INV/2021/00001| 000000000007760,45|0,00|||20210502| 000000000007760,45|EUR"
)
content = base64.b64decode(self.wizard.fec_data).decode()
self.assertEqual(expected_content, content)
| 48.2625 | 3,861 |
19,320 |
py
|
PYTHON
|
15.0
|
#-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) 2013-2015 Akretion (http://www.akretion.com)
import base64
import io
from odoo import api, fields, models, _
from odoo.exceptions import UserError, AccessDenied
from odoo.tools import float_is_zero, pycompat
from odoo.tools.misc import get_lang
from stdnum.fr import siren
class AccountFrFec(models.TransientModel):
_name = 'account.fr.fec'
_description = 'Ficher Echange Informatise'
date_from = fields.Date(string='Start Date', required=True)
date_to = fields.Date(string='End Date', required=True)
fec_data = fields.Binary('FEC File', readonly=True)
filename = fields.Char(string='Filename', size=256, readonly=True)
test_file = fields.Boolean()
export_type = fields.Selection([
('official', 'Official FEC report (posted entries only)'),
('nonofficial', 'Non-official FEC report (posted and unposted entries)'),
], string='Export Type', required=True, default='official')
@api.onchange('test_file')
def _onchange_export_file(self):
if not self.test_file:
self.export_type = 'official'
def _do_query_unaffected_earnings(self):
''' Compute the sum of ending balances for all accounts that are of a type that does not bring forward the balance in new fiscal years.
This is needed because we have to display only one line for the initial balance of all expense/revenue accounts in the FEC.
'''
sql_query = '''
SELECT
'OUV' AS JournalCode,
'Balance initiale' AS JournalLib,
'OUVERTURE/' || %s AS EcritureNum,
%s AS EcritureDate,
'120/129' AS CompteNum,
'Benefice (perte) reporte(e)' AS CompteLib,
'' AS CompAuxNum,
'' AS CompAuxLib,
'-' AS PieceRef,
%s AS PieceDate,
'/' AS EcritureLib,
replace(CASE WHEN COALESCE(sum(aml.balance), 0) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Debit,
replace(CASE WHEN COALESCE(sum(aml.balance), 0) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Credit,
'' AS EcritureLet,
'' AS DateLet,
%s AS ValidDate,
'' AS Montantdevise,
'' AS Idevise
FROM
account_move_line aml
LEFT JOIN account_move am ON am.id=aml.move_id
JOIN account_account aa ON aa.id = aml.account_id
LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id
WHERE
am.date < %s
AND am.company_id = %s
AND aat.include_initial_balance IS NOT TRUE
'''
# For official report: only use posted entries
if self.export_type == "official":
sql_query += '''
AND am.state = 'posted'
'''
company = self.env.company
formatted_date_from = fields.Date.to_string(self.date_from).replace('-', '')
date_from = self.date_from
formatted_date_year = date_from.year
self._cr.execute(
sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id))
listrow = []
row = self._cr.fetchone()
listrow = list(row)
return listrow
def _get_company_legal_data(self, company):
"""
Dom-Tom are excluded from the EU's fiscal territory
Those regions do not have SIREN
sources:
https://www.service-public.fr/professionnels-entreprises/vosdroits/F23570
http://www.douane.gouv.fr/articles/a11024-tva-dans-les-dom
* Returns the siren if the company is french or an empty siren for dom-tom
* For non-french companies -> returns the complete vat number
"""
dom_tom_group = self.env.ref('l10n_fr.dom-tom')
is_dom_tom = company.account_fiscal_country_id.code in dom_tom_group.country_ids.mapped('code')
if not company.vat or is_dom_tom:
return ''
elif company.country_id.code == 'FR' and len(company.vat) >= 13 and siren.is_valid(company.vat[4:13]):
return company.vat[4:13]
else:
return company.vat
def generate_fec(self):
self.ensure_one()
if not (self.env.is_admin() or self.env.user.has_group('account.group_account_user')):
raise AccessDenied()
# We choose to implement the flat file instead of the XML
# file for 2 reasons :
# 1) the XSD file impose to have the label on the account.move
# but Odoo has the label on the account.move.line, so that's a
# problem !
# 2) CSV files are easier to read/use for a regular accountant.
# So it will be easier for the accountant to check the file before
# sending it to the fiscal administration
today = fields.Date.today()
if self.date_from > today or self.date_to > today:
raise UserError(_('You could not set the start date or the end date in the future.'))
if self.date_from >= self.date_to:
raise UserError(_('The start date must be inferior to the end date.'))
company = self.env.company
company_legal_data = self._get_company_legal_data(company)
header = [
u'JournalCode', # 0
u'JournalLib', # 1
u'EcritureNum', # 2
u'EcritureDate', # 3
u'CompteNum', # 4
u'CompteLib', # 5
u'CompAuxNum', # 6 We use partner.id
u'CompAuxLib', # 7
u'PieceRef', # 8
u'PieceDate', # 9
u'EcritureLib', # 10
u'Debit', # 11
u'Credit', # 12
u'EcritureLet', # 13
u'DateLet', # 14
u'ValidDate', # 15
u'Montantdevise', # 16
u'Idevise', # 17
]
rows_to_write = [header]
# INITIAL BALANCE
unaffected_earnings_xml_ref = self.env.ref('account.data_unaffected_earnings')
unaffected_earnings_line = True # used to make sure that we add the unaffected earning initial balance only once
if unaffected_earnings_xml_ref:
#compute the benefit/loss of last year to add in the initial balance of the current year earnings account
unaffected_earnings_results = self._do_query_unaffected_earnings()
unaffected_earnings_line = False
sql_query = '''
SELECT
'OUV' AS JournalCode,
'Balance initiale' AS JournalLib,
'OUVERTURE/' || %s AS EcritureNum,
%s AS EcritureDate,
MIN(aa.code) AS CompteNum,
replace(replace(MIN(aa.name), '|', '/'), '\t', '') AS CompteLib,
'' AS CompAuxNum,
'' AS CompAuxLib,
'-' AS PieceRef,
%s AS PieceDate,
'/' AS EcritureLib,
replace(CASE WHEN sum(aml.balance) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Debit,
replace(CASE WHEN sum(aml.balance) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Credit,
'' AS EcritureLet,
'' AS DateLet,
%s AS ValidDate,
'' AS Montantdevise,
'' AS Idevise,
MIN(aa.id) AS CompteID
FROM
account_move_line aml
LEFT JOIN account_move am ON am.id=aml.move_id
JOIN account_account aa ON aa.id = aml.account_id
LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id
WHERE
am.date < %s
AND am.company_id = %s
AND aat.include_initial_balance = 't'
'''
# For official report: only use posted entries
if self.export_type == "official":
sql_query += '''
AND am.state = 'posted'
'''
sql_query += '''
GROUP BY aml.account_id, aat.type
HAVING aat.type not in ('receivable', 'payable')
'''
formatted_date_from = fields.Date.to_string(self.date_from).replace('-', '')
date_from = self.date_from
formatted_date_year = date_from.year
currency_digits = 2
self._cr.execute(
sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id))
for row in self._cr.fetchall():
listrow = list(row)
account_id = listrow.pop()
if not unaffected_earnings_line:
account = self.env['account.account'].browse(account_id)
if account.user_type_id.id == self.env.ref('account.data_unaffected_earnings').id:
#add the benefit/loss of previous fiscal year to the first unaffected earnings account found.
unaffected_earnings_line = True
current_amount = float(listrow[11].replace(',', '.')) - float(listrow[12].replace(',', '.'))
unaffected_earnings_amount = float(unaffected_earnings_results[11].replace(',', '.')) - float(unaffected_earnings_results[12].replace(',', '.'))
listrow_amount = current_amount + unaffected_earnings_amount
if float_is_zero(listrow_amount, precision_digits=currency_digits):
continue
if listrow_amount > 0:
listrow[11] = str(listrow_amount).replace('.', ',')
listrow[12] = '0,00'
else:
listrow[11] = '0,00'
listrow[12] = str(-listrow_amount).replace('.', ',')
rows_to_write.append(listrow)
#if the unaffected earnings account wasn't in the selection yet: add it manually
if (not unaffected_earnings_line
and unaffected_earnings_results
and (unaffected_earnings_results[11] != '0,00'
or unaffected_earnings_results[12] != '0,00')):
#search an unaffected earnings account
unaffected_earnings_account = self.env['account.account'].search([('user_type_id', '=', self.env.ref('account.data_unaffected_earnings').id),
('company_id', '=', company.id)], limit=1)
if unaffected_earnings_account:
unaffected_earnings_results[4] = unaffected_earnings_account.code
unaffected_earnings_results[5] = unaffected_earnings_account.name
rows_to_write.append(unaffected_earnings_results)
# INITIAL BALANCE - receivable/payable
sql_query = '''
SELECT
'OUV' AS JournalCode,
'Balance initiale' AS JournalLib,
'OUVERTURE/' || %s AS EcritureNum,
%s AS EcritureDate,
MIN(aa.code) AS CompteNum,
replace(MIN(aa.name), '|', '/') AS CompteLib,
CASE WHEN MIN(aat.type) IN ('receivable', 'payable')
THEN
CASE WHEN rp.ref IS null OR rp.ref = ''
THEN rp.id::text
ELSE replace(rp.ref, '|', '/')
END
ELSE ''
END
AS CompAuxNum,
CASE WHEN aat.type IN ('receivable', 'payable')
THEN COALESCE(replace(rp.name, '|', '/'), '')
ELSE ''
END AS CompAuxLib,
'-' AS PieceRef,
%s AS PieceDate,
'/' AS EcritureLib,
replace(CASE WHEN sum(aml.balance) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Debit,
replace(CASE WHEN sum(aml.balance) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Credit,
'' AS EcritureLet,
'' AS DateLet,
%s AS ValidDate,
'' AS Montantdevise,
'' AS Idevise,
MIN(aa.id) AS CompteID
FROM
account_move_line aml
LEFT JOIN account_move am ON am.id=aml.move_id
LEFT JOIN res_partner rp ON rp.id=aml.partner_id
JOIN account_account aa ON aa.id = aml.account_id
LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id
WHERE
am.date < %s
AND am.company_id = %s
AND aat.include_initial_balance = 't'
'''
# For official report: only use posted entries
if self.export_type == "official":
sql_query += '''
AND am.state = 'posted'
'''
sql_query += '''
GROUP BY aml.account_id, aat.type, rp.ref, rp.id
HAVING aat.type in ('receivable', 'payable')
'''
self._cr.execute(
sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id))
for row in self._cr.fetchall():
listrow = list(row)
account_id = listrow.pop()
rows_to_write.append(listrow)
# LINES
sql_query = '''
SELECT
REGEXP_REPLACE(replace(aj.code, '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS JournalCode,
REGEXP_REPLACE(replace(COALESCE(aj__name.value, aj.name), '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS JournalLib,
REGEXP_REPLACE(replace(am.name, '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS EcritureNum,
TO_CHAR(am.date, 'YYYYMMDD') AS EcritureDate,
aa.code AS CompteNum,
REGEXP_REPLACE(replace(aa.name, '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS CompteLib,
CASE WHEN aat.type IN ('receivable', 'payable')
THEN
CASE WHEN rp.ref IS null OR rp.ref = ''
THEN rp.id::text
ELSE replace(rp.ref, '|', '/')
END
ELSE ''
END
AS CompAuxNum,
CASE WHEN aat.type IN ('receivable', 'payable')
THEN COALESCE(REGEXP_REPLACE(replace(rp.name, '|', '/'), '[\\t\\r\\n]', ' ', 'g'), '')
ELSE ''
END AS CompAuxLib,
CASE WHEN am.ref IS null OR am.ref = ''
THEN '-'
ELSE REGEXP_REPLACE(replace(am.ref, '|', '/'), '[\\t\\r\\n]', ' ', 'g')
END
AS PieceRef,
TO_CHAR(COALESCE(am.invoice_date, am.date), 'YYYYMMDD') AS PieceDate,
CASE WHEN aml.name IS NULL OR aml.name = '' THEN '/'
WHEN aml.name SIMILAR TO '[\\t|\\s|\\n]*' THEN '/'
ELSE REGEXP_REPLACE(replace(aml.name, '|', '/'), '[\\t\\n\\r]', ' ', 'g') END AS EcritureLib,
replace(CASE WHEN aml.debit = 0 THEN '0,00' ELSE to_char(aml.debit, '000000000000000D99') END, '.', ',') AS Debit,
replace(CASE WHEN aml.credit = 0 THEN '0,00' ELSE to_char(aml.credit, '000000000000000D99') END, '.', ',') AS Credit,
CASE WHEN rec.name IS NULL THEN '' ELSE rec.name END AS EcritureLet,
CASE WHEN aml.full_reconcile_id IS NULL THEN '' ELSE TO_CHAR(rec.create_date, 'YYYYMMDD') END AS DateLet,
TO_CHAR(am.date, 'YYYYMMDD') AS ValidDate,
CASE
WHEN aml.amount_currency IS NULL OR aml.amount_currency = 0 THEN ''
ELSE replace(to_char(aml.amount_currency, '000000000000000D99'), '.', ',')
END AS Montantdevise,
CASE WHEN aml.currency_id IS NULL THEN '' ELSE rc.name END AS Idevise
FROM
account_move_line aml
LEFT JOIN account_move am ON am.id=aml.move_id
LEFT JOIN res_partner rp ON rp.id=aml.partner_id
JOIN account_journal aj ON aj.id = am.journal_id
LEFT JOIN ir_translation aj__name ON aj__name.res_id = aj.id
AND aj__name.type = 'model'
AND aj__name.name = 'account.journal,name'
AND aj__name.lang = %s
AND aj__name.value != ''
JOIN account_account aa ON aa.id = aml.account_id
LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id
LEFT JOIN res_currency rc ON rc.id = aml.currency_id
LEFT JOIN account_full_reconcile rec ON rec.id = aml.full_reconcile_id
WHERE
am.date >= %s
AND am.date <= %s
AND am.company_id = %s
'''
# For official report: only use posted entries
if self.export_type == "official":
sql_query += '''
AND am.state = 'posted'
'''
sql_query += '''
ORDER BY
am.date,
am.name,
aml.id
'''
lang = self.env.user.lang or get_lang(self.env).code
self._cr.execute(
sql_query, (lang, self.date_from, self.date_to, company.id))
for row in self._cr.fetchall():
rows_to_write.append(list(row))
fecvalue = self._csv_write_rows(rows_to_write)
end_date = fields.Date.to_string(self.date_to).replace('-', '')
suffix = ''
if self.export_type == "nonofficial":
suffix = '-NONOFFICIAL'
self.write({
'fec_data': base64.encodebytes(fecvalue),
# Filename = <siren>FECYYYYMMDD where YYYMMDD is the closing date
'filename': '%sFEC%s%s.csv' % (company_legal_data, end_date, suffix),
})
# Set fiscal year lock date to the end date (not in test)
fiscalyear_lock_date = self.env.company.fiscalyear_lock_date
if not self.test_file and (not fiscalyear_lock_date or fiscalyear_lock_date < self.date_to):
self.env.company.write({'fiscalyear_lock_date': self.date_to})
return {
'name': 'FEC',
'type': 'ir.actions.act_url',
'url': "web/content/?model=account.fr.fec&id=" + str(self.id) + "&filename_field=filename&field=fec_data&download=true&filename=" + self.filename,
'target': 'self',
}
def _csv_write_rows(self, rows, lineterminator=u'\r\n'):
"""
Write FEC rows into a file
It seems that Bercy's bureaucracy is not too happy about the
empty new line at the End Of File.
@param {list(list)} rows: the list of rows. Each row is a list of strings
@param {unicode string} [optional] lineterminator: effective line terminator
Has nothing to do with the csv writer parameter
The last line written won't be terminated with it
@return the value of the file
"""
fecfile = io.BytesIO()
writer = pycompat.csv_writer(fecfile, delimiter='|', lineterminator='')
rows_length = len(rows)
for i, row in enumerate(rows):
if not i == rows_length - 1:
row[-1] += lineterminator
writer.writerow(row)
fecvalue = fecfile.getvalue()
fecfile.close()
return fecvalue
| 44.825986 | 19,320 |
2,258 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) 2009 Renato Lima - Akretion
{
'name': 'Brazilian - Accounting',
'category': 'Accounting/Localizations/Account Charts',
'description': """
Base module for the Brazilian localization
==========================================
This module consists of:
- Generic Brazilian chart of accounts
- Brazilian taxes such as:
- IPI
- ICMS
- PIS
- COFINS
- ISS
- IR
- IRPJ
- CSLL
The field tax_discount has also been added in the account.tax.template and
account.tax objects to allow the proper computation of some Brazilian VATs
such as ICMS. The chart of account creation wizard has been extended to
propagate those new data properly.
It's important to note however that this module lack many implementations to
use Odoo properly in Brazil. Those implementations (such as the electronic
fiscal Invoicing which is already operational) are brought by more than 15
additional modules of the Brazilian Launchpad localization project
https://launchpad.net/openerp.pt-br-localiz and their dependencies in the
extra addons branch. Those modules aim at not breaking with the remarkable
Odoo modularity, this is why they are numerous but small. One of the
reasons for maintaining those modules apart is that Brazilian Localization
leaders need commit rights agility to complete the localization as companies
fund the remaining legal requirements (such as soon fiscal ledgers,
accounting SPED, fiscal SPED and PAF ECF that are still missing as September
2011). Those modules are also strictly licensed under AGPL V3 and today don't
come with any additional paid permission for online use of 'private modules'.
""",
'author': 'Akretion, Odoo Brasil',
'website': 'http://openerpbrasil.org',
'depends': ['account'],
'data': [
'data/l10n_br_chart_data.xml',
'data/account.account.template.csv',
'data/account_tax_group_data.xml',
'data/account_tax_report_data.xml',
'data/account_tax_template_data.xml',
'views/account_view.xml',
],
'demo': [
'demo/demo_company.xml',
],
'license': 'LGPL-3',
}
| 37.016393 | 2,258 |
1,360 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class AccountTaxTemplate(models.Model):
""" Add fields used to define some brazilian taxes """
_inherit = 'account.tax.template'
tax_discount = fields.Boolean(string='Discount this Tax in Prince',
help="Mark it for (ICMS, PIS e etc.).")
base_reduction = fields.Float(string='Redution', digits=0, required=True,
help="Um percentual decimal em % entre 0-1.", default=0)
amount_mva = fields.Float(string='MVA Percent', digits=0, required=True,
help="Um percentual decimal em % entre 0-1.", default=0)
class AccountTax(models.Model):
""" Add fields used to define some brazilian taxes """
_inherit = 'account.tax'
tax_discount = fields.Boolean(string='Discount this Tax in Prince',
help="Mark it for (ICMS, PIS e etc.).")
base_reduction = fields.Float(string='Redution', digits=0, required=True,
help="Um percentual decimal em % entre 0-1.", default=0)
amount_mva = fields.Float(string='MVA Percent', digits=0, required=True,
help="Um percentual decimal em % entre 0-1.", default=0)
| 48.571429 | 1,360 |
2,456 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Denmark - Accounting',
'version': '1.0',
'author': 'Odoo House ApS, VK DATA ApS',
'website': 'http://odoodanmark.dk',
'category': 'Accounting/Localizations/Account Charts',
'description': """
Localization Module for Denmark
===============================
This is the module to manage the **accounting chart for Denmark**. Cover both one-man business as well as I/S, IVS, ApS and A/S
**Modulet opsætter:**
- **Dansk kontoplan**
- Dansk moms
- 25% moms
- Resturationsmoms 6,25%
- Omvendt betalingspligt
- Konteringsgrupper
- EU (Virksomhed)
- EU (Privat)
- 3.lande
- Finans raporter
- Resulttopgørelse
- Balance
- Momsafregning
- Afregning
- Rubrik A, B og C
- **Anglo-Saxon regnskabsmetode**
.
Produkt setup:
==============
**Vare**
**Salgsmoms:** Salgmoms 25%
**Salgskonto:** 1010 Salg af vare, m/moms
**Købsmoms:** Købsmoms 25%
**Købskonto:** 2010 Direkte omkostninger vare, m/moms
.
**Ydelse**
**Salgsmoms:** Salgmoms 25%, ydelser
**Salgskonto:** 1011 Salg af ydelser, m/moms
**Købsmoms:** Købsmoms 25%, ydelser
**Købskonto:** 2011 Direkte omkostninger ydelser, m/moms
.
**Vare med omvendt betalingspligt**
**Salgsmoms:** Salg omvendt betalingspligt
**Salgskonto:** 1012 Salg af vare, u/moms
**Købsmoms:** Køb omvendt betalingspligt
**Købskonto:** 2012 Direkte omkostninger vare, u/moms
.
**Restauration**
**Købsmoms:** Restaurationsmoms 6,25%, købsmoms
**Købskonto:** 4010 Restaurationsbesøg
.
""",
'depends': ['account', 'base_iban', 'base_vat'],
'data': [
'data/account_account_tags.xml',
'data/l10n_dk_chart_template_data.xml',
'data/account.account.template.csv',
'data/l10n_dk_chart_template_post_data.xml',
'data/account_tax_report_data.xml',
'data/account_tax_template_data.xml',
'data/account_fiscal_position_template.xml',
'data/account_fiscal_position_tax_template.xml',
'data/account_fiscal_position_account_template.xml',
'data/account_chart_template_configuration_data.xml',
'data/menuitem_data.xml'
],
'demo': [
'demo/demo_company.xml',
],
'license': 'LGPL-3',
}
| 22.394495 | 2,441 |
769 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class AccountJournal(models.Model):
_inherit = 'account.journal'
@api.model
def _prepare_liquidity_account_vals(self, company, code, vals):
# OVERRIDE
account_vals = super()._prepare_liquidity_account_vals(company, code, vals)
if company.account_fiscal_country_id.code == 'DK':
# Ensure the newly liquidity accounts have the right account tag in order to be part
# of the Danish financial reports.
account_vals.setdefault('tag_ids', [])
account_vals['tag_ids'].append((4, self.env.ref('l10n_dk.account_tag_liquidity').id))
return account_vals
| 36.619048 | 769 |
683 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class AccountChartTemplate(models.Model):
_inherit = 'account.chart.template'
@api.model
def _prepare_transfer_account_for_direct_creation(self, name, company):
res = super(AccountChartTemplate, self)._prepare_transfer_account_for_direct_creation(name, company)
if company.account_fiscal_country_id.code == 'DK':
account_tag_liquidity = self.env.ref('l10n_dk.account_tag_liquidity')
res['tag_ids'] = [(6, 0, account_tag_liquidity.ids)]
res['name'] = 'Bank i transfer'
return res
| 40.176471 | 683 |
1,705 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Thanks to AEOdoo and the Spanish community
# Specially among others Ignacio Ibeas, Pedro Baeza and Landoo
{
'name': "Spain - SII EDI Suministro de Libros",
'version': '1.0',
'category': 'Accounting/Localizations/EDI',
'description': """
This module sends the taxes information (mostly VAT) of the
vendor bills and customer invoices to the SII. It is called
Procedimiento G417 - IVA. Llevanza de libros registro. It is
required for every company with a turnover of +6M€ and others can
already make use of it. The invoices are automatically
sent after validation.
How the information is sent to the SII depends on the
configuration that is put in the taxes. The taxes
that were in the chart template (l10n_es) are automatically
configured to have the right type. It is possible however
that extra taxes need to be created for certain exempt/no sujeta reasons.
You need to configure your certificate and the tax agency.
""",
'depends': [
'l10n_es',
'account_edi',
],
'data': [
'data/account_tax_data.xml',
'data/account_edi_data.xml',
'data/res_partner_data.xml',
'security/ir.model.access.csv',
'views/account_tax_views.xml',
'views/l10n_es_edi_certificate_views.xml',
'views/res_config_settings_views.xml',
],
'external_dependencies': {
'python': ['pyOpenSSL'],
},
'post_init_hook': '_l10n_es_edi_post_init',
'license': 'LGPL-3',
}
| 36.234043 | 1,703 |
3,277 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from datetime import datetime
from odoo.tests import tagged
from odoo import fields
from .common import TestEsEdiCommon
@tagged('external_l10n', 'post_install', '-at_install', '-standard', 'external')
class TestEdiWebServices(TestEsEdiCommon):
@classmethod
def setUpClass(cls, chart_template_ref='l10n_es.account_chart_template_full', edi_format_ref='l10n_es_edi_sii.edi_es_sii'):
super().setUpClass(chart_template_ref=chart_template_ref, edi_format_ref=edi_format_ref)
# Invoice name are tracked by the web-services so this constant tries to get a new unique invoice name at each
# execution.
cls.today = datetime.now()
cls.time_name = cls.today.strftime('%H%M%S')
cls.out_invoice = cls.env['account.move'].create({
'name': f'INV{cls.time_name}',
'move_type': 'out_invoice',
'partner_id': cls.partner_a.id,
'invoice_line_ids': [(0, 0, {
'product_id': cls.product_a.id,
'price_unit': 1000.0,
'quantity': 5,
'discount': 20.0,
'tax_ids': [(6, 0, cls._get_tax_by_xml_id('s_iva21b').ids)],
})],
})
cls.out_invoice.action_post()
cls.in_invoice = cls.env['account.move'].create({
'name': f'BILL{cls.time_name}',
'ref': f'REFBILL{cls.time_name}',
'move_type': 'in_invoice',
'partner_id': cls.partner_a.id,
'invoice_date': fields.Date.to_string(cls.today.date()),
'invoice_line_ids': [(0, 0, {
'product_id': cls.product_a.id,
'price_unit': 1000.0,
'quantity': 5,
'discount': 20.0,
'tax_ids': [(6, 0, cls._get_tax_by_xml_id('p_iva10_bc').ids)],
})],
})
cls.in_invoice.action_post()
cls.moves = cls.out_invoice + cls.in_invoice
def test_edi_aeat(self):
self.env.company.l10n_es_edi_tax_agency = 'aeat'
self.moves.action_process_edi_web_services(with_commit=False)
generated_files = self._process_documents_web_services(self.moves, {'es_sii'})
self.assertTrue(generated_files)
self.assertRecordValues(self.out_invoice, [{'edi_state': 'sent'}])
self.assertRecordValues(self.in_invoice, [{'edi_state': 'sent'}])
def test_edi_gipuzkoa(self):
self.env.company.l10n_es_edi_tax_agency = 'gipuzkoa'
self.moves.action_process_edi_web_services(with_commit=False)
generated_files = self._process_documents_web_services(self.moves, {'es_sii'})
self.assertTrue(generated_files)
self.assertRecordValues(self.out_invoice, [{'edi_state': 'sent'}])
self.assertRecordValues(self.in_invoice, [{'edi_state': 'sent'}])
def test_edi_bizkaia(self):
self.env.company.l10n_es_edi_tax_agency = 'bizkaia'
self.moves.action_process_edi_web_services(with_commit=False)
generated_files = self._process_documents_web_services(self.moves, {'es_sii'})
self.assertTrue(generated_files)
self.assertRecordValues(self.out_invoice, [{'edi_state': 'sent'}])
self.assertRecordValues(self.in_invoice, [{'edi_state': 'sent'}])
| 42.012821 | 3,277 |
49,679 |
py
|
PYTHON
|
15.0
|
# coding: utf-8
from .common import TestEsEdiCommon
import json
from freezegun import freeze_time
from unittest.mock import patch
from odoo.tests import tagged
def mocked_l10n_es_edi_call_web_service_sign(edi_format, invoices, info_list):
return {inv: {'success': True} for inv in invoices}
@tagged('post_install_l10n', 'post_install', '-at_install')
class TestEdiXmls(TestEsEdiCommon):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.certificate.write({
'date_start': '2019-01-01 01:00:00',
'date_end': '2021-01-01 01:00:00',
})
def test_010_out_invoice_s_iva10b_s_iva21s(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
partner_id=self.partner_a.id,
invoice_line_ids=[
{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva10b').ids)]},
{'price_unit': 200.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva21s').ids)]},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'INV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'F1',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseTipoOperacion': {
'PrestacionServicios': {
'Sujeta': {
'NoExenta': {
'TipoNoExenta': 'S1',
'DesgloseIVA': {
'DetalleIVA': [
{
'TipoImpositivo': 21.0,
'BaseImponible': 200.0,
'CuotaRepercutida': 42.0,
},
],
},
},
},
},
'Entrega': {
'Sujeta': {
'NoExenta': {
'TipoNoExenta': 'S1',
'DesgloseIVA': {
'DetalleIVA': [
{
'TipoImpositivo': 10.0,
'BaseImponible': 100.0,
'CuotaRepercutida': 10.0,
},
],
},
},
},
},
},
},
'ImporteTotal': 352.0,
'Contraparte': {
'IDOtro': {'ID': 'BE0477472701', 'IDType': '02'},
'NombreRazon': 'partner_a',
},
},
})
def test_020_out_invoice_s_iva10b_s_iva0_ns(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
partner_id=self.partner_b.id,
invoice_line_ids=[
{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva10b').ids)]},
{'price_unit': 200.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_ns').ids)]},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'INV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'F1',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseFactura': {
'Sujeta': {
'NoExenta': {
'TipoNoExenta': 'S1',
'DesgloseIVA': {
'DetalleIVA': [
{
'TipoImpositivo': 10.0,
'BaseImponible': 100.0,
'CuotaRepercutida': 10.0
},
],
},
},
},
},
},
'ImporteTotal': 110.0,
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
},
})
def test_030_out_invoice_s_iva10b_s_req014_s_iva21s_s_req52(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
partner_id=self.partner_a.id,
invoice_line_ids=[
{
'price_unit': 100.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('s_iva10b') + self._get_tax_by_xml_id('s_req014')).ids)],
},
{
'price_unit': 200.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('s_iva21s') + self._get_tax_by_xml_id('s_req52')).ids)],
},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'INV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'F1',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseTipoOperacion': {
'PrestacionServicios': {
'Sujeta': {
'NoExenta': {
'TipoNoExenta': 'S1',
'DesgloseIVA': {
'DetalleIVA': [
{
'TipoImpositivo': 21.0,
'BaseImponible': 200.0,
'CuotaRepercutida': 42.0,
'CuotaRecargoEquivalencia': 10.4,
'TipoRecargoEquivalencia': 5.2
}
]
}
}
}
},
'Entrega': {
'Sujeta': {
'NoExenta': {
'TipoNoExenta': 'S1',
'DesgloseIVA': {
'DetalleIVA': [
{
'TipoImpositivo': 10.0,
'BaseImponible': 100.0,
'CuotaRepercutida': 10.0,
'CuotaRecargoEquivalencia': 1.4,
'TipoRecargoEquivalencia': 1.4
}
]
}
}
}
}
}
},
'ImporteTotal': 363.8,
'Contraparte': {
'IDOtro': {'ID': 'BE0477472701', 'IDType': '02'},
'NombreRazon': 'partner_a',
},
},
})
def test_040_out_refund_s_iva10b_s_iva10b_s_iva21s(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='out_refund',
partner_id=self.partner_a.id,
invoice_line_ids=[
{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva10b').ids)]},
{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva10b').ids)]},
{'price_unit': 200.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva21s').ids)]},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'RINV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'R1',
'TipoRectificativa': 'I',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseTipoOperacion': {
'PrestacionServicios': {
'Sujeta': {
'NoExenta': {
'TipoNoExenta': 'S1',
'DesgloseIVA': {
'DetalleIVA': [
{
'TipoImpositivo': 21.0,
'BaseImponible': -200.0,
'CuotaRepercutida': -42.0
}
]
}
}
}
},
'Entrega': {
'Sujeta': {
'NoExenta': {
'TipoNoExenta': 'S1',
'DesgloseIVA': {
'DetalleIVA': [
{
'TipoImpositivo': 10.0,
'BaseImponible': -200.0,
'CuotaRepercutida': -20.0
}
]
}
}
}
}
}
},
'ImporteTotal': -462.0,
'Contraparte': {
'IDOtro': {'ID': 'BE0477472701', 'IDType': '02'},
'NombreRazon': 'partner_a',
},
},
})
def test_050_out_invoice_s_iva0_sp_i_s_iva0_ic(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
partner_id=self.partner_a.id,
invoice_line_ids=[
{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_sp_i').ids)]},
{'price_unit': 200.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_ic').ids)]},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'INV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'F1',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseTipoOperacion': {
'PrestacionServicios': {
'NoSujeta': {
'ImportePorArticulos7_14_Otros': 100.0
},
},
'Entrega': {
'Sujeta': {
'Exenta': {
'DetalleExenta': [
{
'BaseImponible': 200.0,
'CausaExencion': 'E5',
},
],
},
},
},
},
},
'ImporteTotal': 300.0,
'Contraparte': {
'IDOtro': {'ID': 'BE0477472701', 'IDType': '02'},
'NombreRazon': 'partner_a',
},
},
})
def test_060_out_refund_s_iva0_sp_i_s_iva0_ic(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='out_refund',
partner_id=self.partner_a.id,
invoice_line_ids=[
{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_sp_i').ids)]},
{'price_unit': 200.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_ic').ids)]},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'RINV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'R1',
'TipoRectificativa': 'I',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseTipoOperacion': {
'PrestacionServicios': {
'NoSujeta': {
'ImportePorArticulos7_14_Otros': -100.0
},
},
'Entrega': {
'Sujeta': {
'Exenta': {
'DetalleExenta': [
{
'BaseImponible': -200.0,
'CausaExencion': 'E5',
},
],
},
},
},
},
},
'ImporteTotal': -300.0,
'Contraparte': {
'IDOtro': {'ID': 'BE0477472701', 'IDType': '02'},
'NombreRazon': 'partner_a',
},
},
})
def test_070_out_invoice_s_iva_e_s_iva0_e(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
partner_id=self.partner_a.id,
invoice_line_ids=[
{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva_e').ids)]},
{'price_unit': 200.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_e').ids)]},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'INV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'F1',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseTipoOperacion': {
'PrestacionServicios': {
'NoSujeta': {
'ImportePorArticulos7_14_Otros': 100.0,
},
},
'Entrega': {
'Sujeta': {
'Exenta': {
'DetalleExenta': [
{
'BaseImponible': 200.0,
'CausaExencion': 'E2',
},
],
},
},
},
},
},
'ImporteTotal': 300.0,
'Contraparte': {
'IDOtro': {'ID': 'BE0477472701', 'IDType': '02'},
'NombreRazon': 'partner_a',
},
},
})
def test_080_out_refund_s_iva0_sp_i_s_iva0_ic(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='out_refund',
partner_id=self.partner_a.id,
invoice_line_ids=[
{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_sp_i').ids)]},
{'price_unit': 200.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_ic').ids)]},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'RINV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'R1',
'TipoRectificativa': 'I',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseTipoOperacion': {
'PrestacionServicios': {
'NoSujeta': {
'ImportePorArticulos7_14_Otros': -100.0,
},
},
'Entrega': {
'Sujeta': {
'Exenta': {
'DetalleExenta': [
{
'BaseImponible': -200.0,
'CausaExencion': 'E5',
},
],
},
},
},
},
},
'ImporteTotal': -300.0,
'Contraparte': {
'IDOtro': {'ID': 'BE0477472701', 'IDType': '02'},
'NombreRazon': 'partner_a',
},
},
})
def test_085_out_refund_s_iva0_sp_i_s_iva0_ic_multi_currency(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='out_refund',
partner_id=self.partner_a.id,
currency_id=self.currency_data['currency'].id,
invoice_line_ids=[
{'price_unit': 200.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_sp_i').ids)]},
{'price_unit': 400.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('s_iva0_ic').ids)]},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'RINV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'R1',
'TipoRectificativa': 'I',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseTipoOperacion': {
'PrestacionServicios': {
'NoSujeta': {
'ImportePorArticulos7_14_Otros': -100.0,
},
},
'Entrega': {
'Sujeta': {
'Exenta': {
'DetalleExenta': [
{
'BaseImponible': -200.0,
'CausaExencion': 'E5',
},
],
},
},
},
},
},
'ImporteTotal': -300.0,
'Contraparte': {
'IDOtro': {'ID': 'BE0477472701', 'IDType': '02'},
'NombreRazon': 'partner_a',
},
},
})
def test_090_in_invoice_p_iva10_bc_p_irpf19_p_iva21_sc_p_irpf19(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='in_invoice',
ref='sup0001',
partner_id=self.partner_b.id,
l10n_es_registration_date='2019-01-02',
invoice_line_ids=[
{
'price_unit': 100.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('p_iva10_bc') + self._get_tax_by_xml_id('p_irpf19')).ids)],
},
{
'price_unit': 200.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('p_iva21_sc') + self._get_tax_by_xml_id('p_irpf19')).ids)],
},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'FechaExpedicionFacturaEmisor': '01-01-2019',
'NumSerieFacturaEmisor': 'sup0001',
'IDEmisorFactura': {'NIF': 'F35999705'}
},
'FacturaRecibida': {
'TipoFactura': 'F1',
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
'DescripcionOperacion': 'manual',
'ClaveRegimenEspecialOTrascendencia': '01',
'ImporteTotal': 352.0,
'FechaRegContable': '02-01-2019',
'DesgloseFactura': {
'DesgloseIVA': {
'DetalleIVA': [
{'BaseImponible': 100.0, 'CuotaSoportada': 10.0, 'TipoImpositivo': 10.0},
{'BaseImponible': 200.0, 'CuotaSoportada': 42.0, 'TipoImpositivo': 21.0}
]
}
},
'CuotaDeducible': 52.0
},
'PeriodoLiquidacion': {'Periodo': '01', 'Ejercicio': '2019'}
})
def test_100_in_refund_p_iva10_bc(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='in_refund',
ref='sup0001',
partner_id=self.partner_b.id,
l10n_es_registration_date='2019-01-02',
invoice_line_ids=[{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('p_iva10_bc').ids)]}],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'FechaExpedicionFacturaEmisor': '01-01-2019',
'NumSerieFacturaEmisor': 'sup0001',
'IDEmisorFactura': {'NIF': 'F35999705'},
},
'FacturaRecibida': {
'TipoFactura': 'R4',
'TipoRectificativa': 'I',
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
'DescripcionOperacion': 'manual',
'ClaveRegimenEspecialOTrascendencia': '01',
'ImporteTotal': -110.0,
'FechaRegContable': '02-01-2019',
'DesgloseFactura': {
'DesgloseIVA': {
'DetalleIVA': [
{'BaseImponible': -100.0, 'CuotaSoportada': -10.0, 'TipoImpositivo': 10.0},
],
},
},
'CuotaDeducible': -10.0,
},
'PeriodoLiquidacion': {'Periodo': '01', 'Ejercicio': '2019'},
})
def test_110_in_invoice_p_iva10_bc_p_req014_p_iva21_sc_p_req52(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='in_invoice',
ref='sup0001',
partner_id=self.partner_b.id,
l10n_es_registration_date='2019-01-02',
invoice_line_ids=[
{
'price_unit': 100.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('p_iva10_bc') + self._get_tax_by_xml_id('p_req014')).ids)],
},
{
'price_unit': 200.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('p_iva21_sc') + self._get_tax_by_xml_id('p_req52')).ids)],
},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'FechaExpedicionFacturaEmisor': '01-01-2019',
'NumSerieFacturaEmisor': 'sup0001',
'IDEmisorFactura': {'NIF': 'F35999705'},
},
'FacturaRecibida': {
'TipoFactura': 'F1',
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
'DescripcionOperacion': 'manual',
'ClaveRegimenEspecialOTrascendencia': '01',
'ImporteTotal': 363.8,
'FechaRegContable': '02-01-2019',
'DesgloseFactura': {
'DesgloseIVA': {
'DetalleIVA': [
{
'BaseImponible': 100.0,
'CuotaSoportada': 10.0,
'TipoImpositivo': 10.0,
'CuotaRecargoEquivalencia': 1.4,
'TipoRecargoEquivalencia': 1.4,
},
{
'BaseImponible': 200.0,
'CuotaSoportada': 42.0,
'TipoImpositivo': 21.0,
'CuotaRecargoEquivalencia': 10.4,
'TipoRecargoEquivalencia': 5.2,
},
],
},
},
'CuotaDeducible': 52.0,
},
'PeriodoLiquidacion': {'Periodo': '01', 'Ejercicio': '2019'},
})
def test_120_in_invoice_p_iva21_sp_ex(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='in_invoice',
ref='sup0001',
partner_id=self.partner_b.id,
l10n_es_registration_date='2019-01-02',
invoice_line_ids=[{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('p_iva21_sp_ex').ids)]}],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'FechaExpedicionFacturaEmisor': '01-01-2019',
'NumSerieFacturaEmisor': 'sup0001',
'IDEmisorFactura': {'NIF': 'F35999705'},
},
'FacturaRecibida': {
'TipoFactura': 'F1',
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
'DescripcionOperacion': 'manual',
'ClaveRegimenEspecialOTrascendencia': '01',
'ImporteTotal': 121.0,
'FechaRegContable': '02-01-2019',
'DesgloseFactura': {
'InversionSujetoPasivo': {
'DetalleIVA': [{
'BaseImponible': 100.0,
'CuotaSoportada': 21.0,
'TipoImpositivo': 21.0,
}],
},
},
'CuotaDeducible': 21.0,
},
'PeriodoLiquidacion': {'Periodo': '01', 'Ejercicio': '2019'},
})
def test_130_in_invoice_p_iva0_ns_p_iva10_bc(self):
# TODO make it work
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='in_invoice',
ref='sup0001',
partner_id=self.partner_b.id,
l10n_es_registration_date='2019-01-02',
invoice_line_ids=[
{'price_unit': 100.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('p_iva0_ns').ids)]},
{'price_unit': 200.0, 'tax_ids': [(6, 0, self._get_tax_by_xml_id('p_iva10_bc').ids)]},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'FechaExpedicionFacturaEmisor': '01-01-2019',
'NumSerieFacturaEmisor': 'sup0001',
'IDEmisorFactura': {'NIF': 'F35999705'},
},
'FacturaRecibida': {
'TipoFactura': 'F1',
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
'DescripcionOperacion': 'manual',
'ClaveRegimenEspecialOTrascendencia': '01',
'ImporteTotal': 320.0,
'FechaRegContable': '02-01-2019',
'DesgloseFactura': {
'DesgloseIVA': {
'DetalleIVA': [
{'BaseImponible': 100.0},
{'BaseImponible': 200.0, 'TipoImpositivo': 10.0, 'CuotaSoportada': 20.0},
],
},
},
'CuotaDeducible': 20.0,
},
'PeriodoLiquidacion': {'Periodo': '01', 'Ejercicio': '2019'},
})
def test_140_out_invoice_s_iva10b_s_irpf1(self):
# TODO: debug
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
partner_id=self.partner_b.id,
invoice_line_ids=[
{
'price_unit': 100.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('s_iva10b') + self._get_tax_by_xml_id('s_irpf1')).ids)],
},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'IDEmisorFactura': {'NIF': '59962470K'},
'NumSerieFacturaEmisor': 'INV/2019/00001',
'FechaExpedicionFacturaEmisor': '01-01-2019',
},
'PeriodoLiquidacion': {'Ejercicio': '2019', 'Periodo': '01'},
'FacturaExpedida': {
'TipoFactura': 'F1',
'ClaveRegimenEspecialOTrascendencia': '01',
'DescripcionOperacion': 'manual',
'TipoDesglose': {
'DesgloseFactura': {
'Sujeta': {
'NoExenta': {
'TipoNoExenta': 'S1',
'DesgloseIVA': {
'DetalleIVA': [
{
'TipoImpositivo': 10.0,
'BaseImponible': 100.0,
'CuotaRepercutida': 10.0,
},
],
},
},
},
},
},
'ImporteTotal': 110.0,
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
},
})
def test_150_in_invoice_p_iva10_bc_p_irpf1(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='in_invoice',
ref='sup0001',
partner_id=self.partner_b.id,
l10n_es_registration_date='2019-01-02',
invoice_line_ids=[
{
'price_unit': 100.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('p_iva10_bc') + self._get_tax_by_xml_id('p_irpf1')).ids)],
},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'FechaExpedicionFacturaEmisor': '01-01-2019',
'NumSerieFacturaEmisor': 'sup0001',
'IDEmisorFactura': {'NIF': 'F35999705'},
},
'FacturaRecibida': {
'TipoFactura': 'F1',
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
'DescripcionOperacion': 'manual',
'ClaveRegimenEspecialOTrascendencia': '01',
'ImporteTotal': 110.0,
'FechaRegContable': '02-01-2019',
'DesgloseFactura': {
'DesgloseIVA': {
'DetalleIVA': [
{
'BaseImponible': 100.0,
'CuotaSoportada': 10.0,
'TipoImpositivo': 10.0,
},
],
},
},
'CuotaDeducible': 10.0,
},
'PeriodoLiquidacion': {'Periodo': '01', 'Ejercicio': '2019'},
})
def test_160_in_refund_p_iva10_bc_p_irpf1(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='in_refund',
ref='sup0001',
partner_id=self.partner_b.id,
l10n_es_registration_date='2019-01-02',
invoice_line_ids=[
{
'price_unit': 100.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('p_iva10_bc') + self._get_tax_by_xml_id('p_irpf1')).ids)],
},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'FechaExpedicionFacturaEmisor': '01-01-2019',
'NumSerieFacturaEmisor': 'sup0001',
'IDEmisorFactura': {'NIF': 'F35999705'},
},
'FacturaRecibida': {
'TipoFactura': 'R4',
'TipoRectificativa': 'I',
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
'DescripcionOperacion': 'manual',
'ClaveRegimenEspecialOTrascendencia': '01',
'ImporteTotal': -110.0,
'FechaRegContable': '02-01-2019',
'DesgloseFactura': {
'DesgloseIVA': {
'DetalleIVA': [
{
'BaseImponible': -100.0,
'CuotaSoportada': -10.0,
'TipoImpositivo': 10.0,
},
],
},
},
'CuotaDeducible': -10.0,
},
'PeriodoLiquidacion': {'Periodo': '01', 'Ejercicio': '2019'},
})
def test_165_in_refund_p_iva10_bc_p_irpf1_multi_currency(self):
with freeze_time(self.frozen_today), \
patch('odoo.addons.l10n_es_edi_sii.models.account_edi_format.AccountEdiFormat._l10n_es_edi_call_web_service_sign',
new=mocked_l10n_es_edi_call_web_service_sign):
invoice = self.create_invoice(
move_type='in_refund',
ref='sup0001',
partner_id=self.partner_b.id,
currency_id=self.currency_data['currency'].id,
l10n_es_registration_date='2019-01-02',
invoice_line_ids=[
{
'price_unit': 200.0,
'tax_ids': [(6, 0, (self._get_tax_by_xml_id('p_iva10_bc') + self._get_tax_by_xml_id('p_irpf1')).ids)],
},
],
)
invoice.action_post()
generated_files = self._process_documents_web_services(invoice, {'es_sii'})
self.assertTrue(generated_files)
json_file = json.loads(generated_files[0].decode())[0]
self.assertEqual(json_file, {
'IDFactura': {
'FechaExpedicionFacturaEmisor': '01-01-2019',
'NumSerieFacturaEmisor': 'sup0001',
'IDEmisorFactura': {'NIF': 'F35999705'},
},
'FacturaRecibida': {
'TipoFactura': 'R4',
'TipoRectificativa': 'I',
'Contraparte': {'NombreRazon': 'partner_b', 'NIF': 'F35999705'},
'DescripcionOperacion': 'manual',
'ClaveRegimenEspecialOTrascendencia': '01',
'ImporteTotal': -110.0,
'FechaRegContable': '02-01-2019',
'DesgloseFactura': {
'DesgloseIVA': {
'DetalleIVA': [
{
'BaseImponible': -100.0,
'CuotaSoportada': -10.0,
'TipoImpositivo': 10.0,
},
],
},
},
'CuotaDeducible': -10.0,
},
'PeriodoLiquidacion': {'Periodo': '01', 'Ejercicio': '2019'},
})
| 46.955577 | 49,679 |
2,764 |
py
|
PYTHON
|
15.0
|
# coding: utf-8
import base64
from pytz import timezone
from datetime import datetime
from odoo.tests import tagged
from odoo.tools import misc, float_compare
from odoo.addons.account_edi.tests.common import AccountEdiTestCommon
@tagged('post_install_l10n', 'post_install', '-at_install')
class TestEsEdiCommon(AccountEdiTestCommon):
@classmethod
def setUpClass(cls, chart_template_ref='l10n_es.account_chart_template_full', edi_format_ref='l10n_es_edi_sii.edi_es_sii'):
super().setUpClass(chart_template_ref=chart_template_ref, edi_format_ref=edi_format_ref)
cls.frozen_today = datetime(year=2019, month=1, day=1, hour=0, minute=0, second=0, tzinfo=timezone('utc'))
# Allow to see the full result of AssertionError.
cls.maxDiff = None
# ==== Config ====
cls.certificate = cls.env['l10n_es_edi.certificate'].create({
'content': base64.encodebytes(
misc.file_open("l10n_es_edi_sii/demo/certificates/sello_entidad_act.p12", 'rb').read()),
'password': 'IZDesa2021',
})
cls.company_data['company'].write({
'country_id': cls.env.ref('base.es').id,
'state_id': cls.env.ref('base.state_es_z').id,
'l10n_es_edi_certificate_id': cls.certificate.id,
'vat': 'ES59962470K',
'l10n_es_edi_test_env': True,
'l10n_es_edi_tax_agency': 'bizkaia',
})
# ==== Business ====
cls.partner_a.write({
'vat': 'BE0477472701',
'country_id': cls.env.ref('base.be').id,
})
cls.partner_b.write({
'vat': 'ESF35999705',
})
cls.product_t = cls.env["product.product"].create(
{"name": "Test product"})
cls.partner_t = cls.env["res.partner"].create({"name": "Test partner", "vat": "ESF35999705"})
@classmethod
def _get_tax_by_xml_id(cls, trailing_xml_id):
""" Helper to retrieve a tax easily.
:param trailing_xml_id: The trailing tax's xml id.
:return: An account.tax record
"""
return cls.env.ref(f'l10n_es.{cls.env.company.id}_account_tax_template_{trailing_xml_id}')
@classmethod
def create_invoice(cls, **kwargs):
return cls.env['account.move'].with_context(edi_test_mode=True).create({
'move_type': 'out_invoice',
'partner_id': cls.partner_a.id,
'invoice_date': '2019-01-01',
'date': '2019-01-01',
**kwargs,
'invoice_line_ids': [(0, 0, {
'product_id': cls.product_a.id,
'price_unit': 1000.0,
**line_vals,
}) for line_vals in kwargs.get('invoice_line_ids', [])],
})
| 35.896104 | 2,764 |
1,975 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class SIIAccountTaxMixin(models.AbstractModel):
_name = 'l10n_es.sii.account.tax.mixin'
_description = 'SII Fields'
l10n_es_exempt_reason = fields.Selection(
selection=[
('E1', 'Art. 20'),
('E2', 'Art. 21'),
('E3', 'Art. 22'),
('E4', 'Art. 23 y 24'),
('E5', 'Art. 25'),
('E6', 'Otros'),
],
string="Exempt Reason (Spain)",
)
l10n_es_type = fields.Selection(
selection=[
('exento', 'Exento'),
('sujeto', 'Sujeto'),
('sujeto_agricultura', 'Sujeto Agricultura'),
('sujeto_isp', 'Sujeto ISP'),
('no_sujeto', 'No Sujeto'),
('no_sujeto_loc', 'No Sujeto por reglas de Localization'),
('no_deducible', 'No Deducible'),
('retencion', 'Retencion'),
('recargo', 'Recargo de Equivalencia'),
('ignore', 'Ignore even the base amount'),
],
string="Tax Type (Spain)", default='sujeto'
)
l10n_es_bien_inversion = fields.Boolean('Bien de Inversion', default=False)
class AccountTax(models.Model):
_inherit = ['account.tax', 'l10n_es.sii.account.tax.mixin']
_name = 'account.tax'
class AccountTaxTemplate(models.Model):
_inherit = ['account.tax.template', 'l10n_es.sii.account.tax.mixin']
_name = 'account.tax.template'
def _get_tax_vals(self, company, tax_template_to_tax):
# OVERRIDE
# Copy values from 'account.tax.template' to vals will be used to create a new 'account.tax'.
vals = super()._get_tax_vals(company, tax_template_to_tax)
vals['l10n_es_exempt_reason'] = self.l10n_es_exempt_reason
vals['l10n_es_type'] = self.l10n_es_type
vals['l10n_es_bien_inversion'] = self.l10n_es_bien_inversion
return vals
| 35.909091 | 1,975 |
1,456 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class AccountMove(models.Model):
_inherit = 'account.move'
l10n_es_edi_is_required = fields.Boolean(
string="Is the Spanish EDI needed",
compute='_compute_l10n_es_edi_is_required'
)
l10n_es_edi_csv = fields.Char(string="CSV return code", copy=False)
l10n_es_registration_date = fields.Date(
string="Registration Date", copy=False,
help="Technical field to keep the date the invoice was sent the first time as the date the invoice was "
"registered into the system.",
)
# -------------------------------------------------------------------------
# COMPUTE METHODS
# -------------------------------------------------------------------------
@api.depends('move_type', 'company_id')
def _compute_l10n_es_edi_is_required(self):
for move in self:
move.l10n_es_edi_is_required = move.is_invoice() \
and move.country_code == 'ES' \
and move.company_id.l10n_es_edi_tax_agency
@api.depends('l10n_es_edi_is_required')
def _compute_edi_show_cancel_button(self):
super()._compute_edi_show_cancel_button()
for move in self.filtered('l10n_es_edi_is_required'):
move.edi_show_cancel_button = False
| 40.444444 | 1,456 |
33,473 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from collections import defaultdict
from urllib3.util.ssl_ import create_urllib3_context, DEFAULT_CIPHERS
from urllib3.contrib.pyopenssl import inject_into_urllib3
from OpenSSL.crypto import load_certificate, load_privatekey, FILETYPE_PEM
from zeep.transports import Transport
from odoo import fields
from odoo.exceptions import UserError
from odoo.tools import html_escape
import math
import json
import requests
import zeep
from odoo import models, _
# Custom patches to perform the WSDL requests.
EUSKADI_CIPHERS = f"{DEFAULT_CIPHERS}:!DH"
class PatchedHTTPAdapter(requests.adapters.HTTPAdapter):
""" An adapter to block DH ciphers which may not work for the tax agencies called"""
def init_poolmanager(self, *args, **kwargs):
# OVERRIDE
inject_into_urllib3()
kwargs['ssl_context'] = create_urllib3_context(ciphers=EUSKADI_CIPHERS)
return super().init_poolmanager(*args, **kwargs)
def cert_verify(self, conn, url, verify, cert):
# OVERRIDE
# The last parameter is only used by the super method to check if the file exists.
# In our case, cert is an odoo record 'l10n_es_edi.certificate' so not a path to a file.
# By putting 'None' as last parameter, we ensure the check about TLS configuration is
# still made without checking temporary files exist.
super().cert_verify(conn, url, verify, None)
conn.cert_file = cert
conn.key_file = None
def get_connection(self, url, proxies=None):
# OVERRIDE
# Patch the OpenSSLContext to decode the certificate in-memory.
conn = super().get_connection(url, proxies=proxies)
context = conn.conn_kw['ssl_context']
def patched_load_cert_chain(l10n_es_odoo_certificate, keyfile=None, password=None):
cert_file, key_file, dummy = l10n_es_odoo_certificate.sudo()._decode_certificate()
cert_obj = load_certificate(FILETYPE_PEM, cert_file)
pkey_obj = load_privatekey(FILETYPE_PEM, key_file)
context._ctx.use_certificate(cert_obj)
context._ctx.use_privatekey(pkey_obj)
context.load_cert_chain = patched_load_cert_chain
return conn
class AccountEdiFormat(models.Model):
_inherit = 'account.edi.format'
# -------------------------------------------------------------------------
# ES EDI
# -------------------------------------------------------------------------
def _l10n_es_edi_get_invoices_tax_details_info(self, invoice, filter_invl_to_apply=None):
def grouping_key_generator(tax_values):
tax = tax_values['tax_id']
return {
'applied_tax_amount': tax.amount,
'l10n_es_type': tax.l10n_es_type,
'l10n_es_exempt_reason': tax.l10n_es_exempt_reason if tax.l10n_es_type == 'exento' else False,
'l10n_es_bien_inversion': tax.l10n_es_bien_inversion,
}
def filter_to_apply(tax_values):
# For intra-community, we do not take into account the negative repartition line
return tax_values['tax_repartition_line_id'].factor_percent > 0.0
def full_filter_invl_to_apply(invoice_line):
if 'ignore' in invoice_line.tax_ids.flatten_taxes_hierarchy().mapped('l10n_es_type'):
return False
return filter_invl_to_apply(invoice_line) if filter_invl_to_apply else True
tax_details = invoice._prepare_edi_tax_details(
grouping_key_generator=grouping_key_generator,
filter_invl_to_apply=full_filter_invl_to_apply,
filter_to_apply=filter_to_apply,
)
sign = -1 if invoice.is_sale_document() else 1
tax_details_info = defaultdict(dict)
# Detect for which is the main tax for 'recargo'. Since only a single combination tax + recargo is allowed
# on the same invoice, this can be deduced globally.
recargo_tax_details = {} # Mapping between main tax and recargo tax details
invoice_lines = invoice.invoice_line_ids.filtered(lambda x: not x.display_type)
if filter_invl_to_apply:
invoice_lines = invoice_lines.filtered(filter_invl_to_apply)
for line in invoice_lines:
taxes = line.tax_ids.flatten_taxes_hierarchy()
recargo_tax = [t for t in taxes if t.l10n_es_type == 'recargo']
if recargo_tax and taxes:
recargo_main_tax = taxes.filtered(lambda x: x.l10n_es_type in ('sujeto', 'sujeto_isp'))[:1]
if not recargo_tax_details.get(recargo_main_tax):
recargo_tax_details[recargo_main_tax] = [
x for x in tax_details['tax_details'].values()
if x['group_tax_details'][0]['tax_id'] == recargo_tax[0]
][0]
tax_amount_deductible = 0.0
tax_amount_retention = 0.0
base_amount_not_subject = 0.0
base_amount_not_subject_loc = 0.0
tax_subject_info_list = []
tax_subject_isp_info_list = []
for tax_values in tax_details['tax_details'].values():
if invoice.is_sale_document():
# Customer invoices
if tax_values['l10n_es_type'] in ('sujeto', 'sujeto_isp'):
tax_amount_deductible += tax_values['tax_amount']
base_amount = sign * tax_values['base_amount']
tax_info = {
'TipoImpositivo': tax_values['applied_tax_amount'],
'BaseImponible': round(base_amount, 2),
'CuotaRepercutida': round(math.copysign(tax_values['tax_amount'], base_amount), 2),
}
recargo = recargo_tax_details.get(tax_values['group_tax_details'][0]['tax_id'])
if recargo:
tax_info['CuotaRecargoEquivalencia'] = round(sign * recargo['tax_amount'], 2)
tax_info['TipoRecargoEquivalencia'] = recargo['applied_tax_amount']
if tax_values['l10n_es_type'] == 'sujeto':
tax_subject_info_list.append(tax_info)
else:
tax_subject_isp_info_list.append(tax_info)
elif tax_values['l10n_es_type'] == 'exento':
tax_details_info['Sujeta'].setdefault('Exenta', {'DetalleExenta': []})
tax_details_info['Sujeta']['Exenta']['DetalleExenta'].append({
'BaseImponible': round(sign * tax_values['base_amount'], 2),
'CausaExencion': tax_values['l10n_es_exempt_reason'],
})
elif tax_values['l10n_es_type'] == 'retencion':
tax_amount_retention += tax_values['tax_amount']
elif tax_values['l10n_es_type'] == 'no_sujeto':
base_amount_not_subject += tax_values['base_amount']
elif tax_values['l10n_es_type'] == 'no_sujeto_loc':
base_amount_not_subject_loc += tax_values['base_amount']
elif tax_values['l10n_es_type'] == 'ignore':
continue
if tax_subject_isp_info_list and not tax_subject_info_list:
tax_details_info['Sujeta']['NoExenta'] = {'TipoNoExenta': 'S2'}
elif not tax_subject_isp_info_list and tax_subject_info_list:
tax_details_info['Sujeta']['NoExenta'] = {'TipoNoExenta': 'S1'}
elif tax_subject_isp_info_list and tax_subject_info_list:
tax_details_info['Sujeta']['NoExenta'] = {'TipoNoExenta': 'S3'}
if tax_subject_info_list:
tax_details_info['Sujeta']['NoExenta'].setdefault('DesgloseIVA', {})
tax_details_info['Sujeta']['NoExenta']['DesgloseIVA'].setdefault('DetalleIVA', [])
tax_details_info['Sujeta']['NoExenta']['DesgloseIVA']['DetalleIVA'] += tax_subject_info_list
if tax_subject_isp_info_list:
tax_details_info['Sujeta']['NoExenta'].setdefault('DesgloseIVA', {})
tax_details_info['Sujeta']['NoExenta']['DesgloseIVA'].setdefault('DetalleIVA', [])
tax_details_info['Sujeta']['NoExenta']['DesgloseIVA']['DetalleIVA'] += tax_subject_isp_info_list
else:
# Vendor bills
if tax_values['l10n_es_type'] in ('sujeto', 'sujeto_isp', 'no_sujeto', 'no_sujeto_loc'):
tax_amount_deductible += tax_values['tax_amount']
elif tax_values['l10n_es_type'] == 'retencion':
tax_amount_retention += tax_values['tax_amount']
elif tax_values['l10n_es_type'] == 'no_sujeto':
base_amount_not_subject += tax_values['base_amount']
elif tax_values['l10n_es_type'] == 'no_sujeto_loc':
base_amount_not_subject_loc += tax_values['base_amount']
elif tax_values['l10n_es_type'] == 'ignore':
continue
if tax_values['l10n_es_type'] not in ['retencion', 'recargo']: # = in sujeto/sujeto_isp/no_deducible
base_amount = sign * tax_values['base_amount']
tax_details_info.setdefault('DetalleIVA', [])
tax_info = {
'BaseImponible': round(base_amount, 2),
}
if tax_values['applied_tax_amount'] > 0.0:
tax_info.update({
'TipoImpositivo': tax_values['applied_tax_amount'],
'CuotaSoportada': round(math.copysign(tax_values['tax_amount'], base_amount), 2),
})
if tax_values['l10n_es_bien_inversion']:
tax_info['BienInversion'] = 'S'
recargo = recargo_tax_details.get(tax_values['group_tax_details'][0]['tax_id'])
if recargo:
tax_info['CuotaRecargoEquivalencia'] = round(sign * recargo['tax_amount'], 2)
tax_info['TipoRecargoEquivalencia'] = recargo['applied_tax_amount']
tax_details_info['DetalleIVA'].append(tax_info)
if not invoice.company_id.currency_id.is_zero(base_amount_not_subject) and invoice.is_sale_document():
tax_details_info['NoSujeta']['ImportePorArticulos7_14_Otros'] = round(sign * base_amount_not_subject, 2)
if not invoice.company_id.currency_id.is_zero(base_amount_not_subject_loc) and invoice.is_sale_document():
tax_details_info['NoSujeta']['ImporteTAIReglasLocalizacion'] = round(sign * base_amount_not_subject_loc, 2)
return {
'tax_details_info': tax_details_info,
'tax_details': tax_details,
'tax_amount_deductible': tax_amount_deductible,
'tax_amount_retention': tax_amount_retention,
'base_amount_not_subject': base_amount_not_subject,
}
def _l10n_es_edi_get_partner_info(self, partner):
eu_country_codes = set(self.env.ref('base.europe').country_ids.mapped('code'))
partner_info = {}
IDOtro_ID = partner.vat or 'NO_DISPONIBLE'
if (not partner.country_id or partner.country_id.code == 'ES') and partner.vat:
# ES partner with VAT.
partner_info['NIF'] = partner.vat[2:] if partner.vat.startswith('ES') else partner.vat
if self.env.context.get('error_1117'):
partner_info['IDOtro'] = {'IDType': '07', 'ID': IDOtro_ID}
elif partner.country_id.code in eu_country_codes and partner.vat:
# European partner.
partner_info['IDOtro'] = {'IDType': '02', 'ID': IDOtro_ID}
else:
partner_info['IDOtro'] = {'ID': IDOtro_ID}
if partner.vat:
partner_info['IDOtro']['IDType'] = '04'
else:
partner_info['IDOtro']['IDType'] = '06'
if partner.country_id:
partner_info['IDOtro']['CodigoPais'] = partner.country_id.code
return partner_info
def _l10n_es_edi_get_invoices_info(self, invoices):
eu_country_codes = set(self.env.ref('base.europe').country_ids.mapped('code'))
simplified_partner = self.env.ref("l10n_es_edi_sii.partner_simplified")
info_list = []
for invoice in invoices:
com_partner = invoice.commercial_partner_id
is_simplified = invoice.partner_id == simplified_partner
info = {
'PeriodoLiquidacion': {
'Ejercicio': str(invoice.date.year),
'Periodo': str(invoice.date.month).zfill(2),
},
'IDFactura': {
'FechaExpedicionFacturaEmisor': invoice.invoice_date.strftime('%d-%m-%Y'),
},
}
if invoice.is_sale_document():
invoice_node = info['FacturaExpedida'] = {}
else:
invoice_node = info['FacturaRecibida'] = {}
# === Partner ===
partner_info = self._l10n_es_edi_get_partner_info(com_partner)
# === Invoice ===
invoice_node['DescripcionOperacion'] = invoice.invoice_origin or 'manual'
if invoice.is_sale_document():
info['IDFactura']['IDEmisorFactura'] = {'NIF': invoice.company_id.vat[2:]}
info['IDFactura']['NumSerieFacturaEmisor'] = invoice.name[:60]
if not is_simplified:
invoice_node['Contraparte'] = {
**partner_info,
'NombreRazon': com_partner.name[:120],
}
if not com_partner.country_id or com_partner.country_id.code in eu_country_codes:
invoice_node['ClaveRegimenEspecialOTrascendencia'] = '01'
else:
invoice_node['ClaveRegimenEspecialOTrascendencia'] = '02'
else:
info['IDFactura']['IDEmisorFactura'] = partner_info
info['IDFactura']['NumSerieFacturaEmisor'] = invoice.ref[:60]
if not is_simplified:
invoice_node['Contraparte'] = {
**partner_info,
'NombreRazon': com_partner.name[:120],
}
if invoice.l10n_es_registration_date:
invoice_node['FechaRegContable'] = invoice.l10n_es_registration_date.strftime('%d-%m-%Y')
else:
invoice_node['FechaRegContable'] = fields.Date.context_today(self).strftime('%d-%m-%Y')
country_code = com_partner.country_id.code
if not country_code or country_code == 'ES' or country_code not in eu_country_codes:
invoice_node['ClaveRegimenEspecialOTrascendencia'] = '01'
else:
invoice_node['ClaveRegimenEspecialOTrascendencia'] = '09' # For Intra-Com
if invoice.move_type == 'out_invoice':
invoice_node['TipoFactura'] = 'F2' if is_simplified else 'F1'
elif invoice.move_type == 'out_refund':
invoice_node['TipoFactura'] = 'R5' if is_simplified else 'R1'
invoice_node['TipoRectificativa'] = 'I'
elif invoice.move_type == 'in_invoice':
invoice_node['TipoFactura'] = 'F1'
elif invoice.move_type == 'in_refund':
invoice_node['TipoFactura'] = 'R4'
invoice_node['TipoRectificativa'] = 'I'
# === Taxes ===
sign = -1 if invoice.is_sale_document() else 1
if invoice.is_sale_document():
# Customer invoices
if com_partner.country_id.code in ('ES', False) and not (com_partner.vat or '').startswith("ESN"):
tax_details_info_vals = self._l10n_es_edi_get_invoices_tax_details_info(invoice)
invoice_node['TipoDesglose'] = {'DesgloseFactura': tax_details_info_vals['tax_details_info']}
invoice_node['ImporteTotal'] = round(sign * (
tax_details_info_vals['tax_details']['base_amount']
+ tax_details_info_vals['tax_details']['tax_amount']
- tax_details_info_vals['tax_amount_retention']
), 2)
else:
tax_details_info_service_vals = self._l10n_es_edi_get_invoices_tax_details_info(
invoice,
filter_invl_to_apply=lambda x: any(t.tax_scope == 'service' for t in x.tax_ids)
)
tax_details_info_consu_vals = self._l10n_es_edi_get_invoices_tax_details_info(
invoice,
filter_invl_to_apply=lambda x: any(t.tax_scope == 'consu' for t in x.tax_ids)
)
if tax_details_info_service_vals['tax_details_info']:
invoice_node.setdefault('TipoDesglose', {})
invoice_node['TipoDesglose'].setdefault('DesgloseTipoOperacion', {})
invoice_node['TipoDesglose']['DesgloseTipoOperacion']['PrestacionServicios'] = tax_details_info_service_vals['tax_details_info']
if tax_details_info_consu_vals['tax_details_info']:
invoice_node.setdefault('TipoDesglose', {})
invoice_node['TipoDesglose'].setdefault('DesgloseTipoOperacion', {})
invoice_node['TipoDesglose']['DesgloseTipoOperacion']['Entrega'] = tax_details_info_consu_vals['tax_details_info']
if not invoice_node.get('TipoDesglose'):
raise UserError(_(
"In case of a foreign customer, you need to configure the tax scope on taxes:\n%s",
"\n".join(invoice.line_ids.tax_ids.mapped('name'))
))
invoice_node['ImporteTotal'] = round(sign * (
tax_details_info_service_vals['tax_details']['base_amount']
+ tax_details_info_service_vals['tax_details']['tax_amount']
- tax_details_info_service_vals['tax_amount_retention']
+ tax_details_info_consu_vals['tax_details']['base_amount']
+ tax_details_info_consu_vals['tax_details']['tax_amount']
- tax_details_info_consu_vals['tax_amount_retention']
), 2)
else:
# Vendor bills
tax_details_info_isp_vals = self._l10n_es_edi_get_invoices_tax_details_info(
invoice,
filter_invl_to_apply=lambda x: any(t for t in x.tax_ids if t.l10n_es_type == 'sujeto_isp'),
)
tax_details_info_other_vals = self._l10n_es_edi_get_invoices_tax_details_info(
invoice,
filter_invl_to_apply=lambda x: not any(t for t in x.tax_ids if t.l10n_es_type == 'sujeto_isp'),
)
invoice_node['DesgloseFactura'] = {}
if tax_details_info_isp_vals['tax_details_info']:
invoice_node['DesgloseFactura']['InversionSujetoPasivo'] = tax_details_info_isp_vals['tax_details_info']
if tax_details_info_other_vals['tax_details_info']:
invoice_node['DesgloseFactura']['DesgloseIVA'] = tax_details_info_other_vals['tax_details_info']
invoice_node['ImporteTotal'] = round(sign * (
tax_details_info_isp_vals['tax_details']['base_amount']
+ tax_details_info_isp_vals['tax_details']['tax_amount']
- tax_details_info_isp_vals['tax_amount_retention']
+ tax_details_info_other_vals['tax_details']['base_amount']
+ tax_details_info_other_vals['tax_details']['tax_amount']
- tax_details_info_other_vals['tax_amount_retention']
), 2)
invoice_node['CuotaDeducible'] = round(sign * (
tax_details_info_isp_vals['tax_amount_deductible']
+ tax_details_info_other_vals['tax_amount_deductible']
), 2)
info_list.append(info)
return info_list
def _l10n_es_edi_web_service_aeat_vals(self, invoices):
if invoices[0].is_sale_document():
return {
'url': 'https://www2.agenciatributaria.gob.es/static_files/common/internet/dep/aplicaciones/es/aeat/ssii_1_1/fact/ws/SuministroFactEmitidas.wsdl',
'test_url': 'https://prewww1.aeat.es/wlpl/SSII-FACT/ws/fe/SiiFactFEV1SOAP',
}
else:
return {
'url': 'https://www2.agenciatributaria.gob.es/static_files/common/internet/dep/aplicaciones/es/aeat/ssii_1_1/fact/ws/SuministroFactRecibidas.wsdl',
'test_url': 'https://prewww1.aeat.es/wlpl/SSII-FACT/ws/fr/SiiFactFRV1SOAP',
}
def _l10n_es_edi_web_service_bizkaia_vals(self, invoices):
if invoices[0].is_sale_document():
return {
'url': 'https://www.bizkaia.eus/ogasuna/sii/documentos/SuministroFactEmitidas.wsdl',
'test_url': 'https://pruapps.bizkaia.eus/SSII-FACT/ws/fe/SiiFactFEV1SOAP',
}
else:
return {
'url': 'https://www.bizkaia.eus/ogasuna/sii/documentos/SuministroFactRecibidas.wsdl',
'test_url': 'https://pruapps.bizkaia.eus/SSII-FACT/ws/fr/SiiFactFRV1SOAP',
}
def _l10n_es_edi_web_service_gipuzkoa_vals(self, invoices):
if invoices[0].is_sale_document():
return {
'url': 'https://egoitza.gipuzkoa.eus/ogasuna/sii/ficheros/v1.1/SuministroFactEmitidas.wsdl',
'test_url': 'https://sii-prep.egoitza.gipuzkoa.eus/JBS/HACI/SSII-FACT/ws/fe/SiiFactFEV1SOAP',
}
else:
return {
'url': 'https://egoitza.gipuzkoa.eus/ogasuna/sii/ficheros/v1.1/SuministroFactRecibidas.wsdl',
'test_url': 'https://sii-prep.egoitza.gipuzkoa.eus/JBS/HACI/SSII-FACT/ws/fr/SiiFactFRV1SOAP',
}
def _l10n_es_edi_call_web_service_sign(self, invoices, info_list):
company = invoices.company_id
# All are sharing the same value, see '_get_batch_key'.
csv_number = invoices.mapped('l10n_es_edi_csv')[0]
# Set registration date
invoices.filtered(lambda inv: not inv.l10n_es_registration_date).write({
'l10n_es_registration_date': fields.Date.context_today(self),
})
# === Call the web service ===
# Get connection data.
l10n_es_edi_tax_agency = company.mapped('l10n_es_edi_tax_agency')[0]
connection_vals = getattr(self, f'_l10n_es_edi_web_service_{l10n_es_edi_tax_agency}_vals')(invoices)
header = {
'IDVersionSii': '1.1',
'Titular': {
'NombreRazon': company.name[:120],
'NIF': company.vat[2:],
},
'TipoComunicacion': 'A1' if csv_number else 'A0',
}
session = requests.Session()
session.cert = company.l10n_es_edi_certificate_id
session.mount('https://', PatchedHTTPAdapter())
transport = Transport(operation_timeout=60, timeout=60, session=session)
client = zeep.Client(connection_vals['url'], transport=transport)
if invoices[0].is_sale_document():
service_name = 'SuministroFactEmitidas'
else:
service_name = 'SuministroFactRecibidas'
if company.l10n_es_edi_test_env and not connection_vals.get('test_url'):
service_name += 'Pruebas'
# Establish the connection.
serv = client.bind('siiService', service_name)
if company.l10n_es_edi_test_env and connection_vals.get('test_url'):
serv._binding_options['address'] = connection_vals['test_url']
msg = ''
try:
if invoices[0].is_sale_document():
res = serv.SuministroLRFacturasEmitidas(header, info_list)
else:
res = serv.SuministroLRFacturasRecibidas(header, info_list)
except requests.exceptions.SSLError as error:
msg = _("The SSL certificate could not be validated.")
except zeep.exceptions.Error as error:
msg = _("Networking error:\n%s") % error
except Exception as error:
msg = str(error)
finally:
if msg:
return {inv: {
'error': msg,
'blocking_level': 'warning',
} for inv in invoices}
# Process response.
if not res or not res.RespuestaLinea:
return {inv: {
'error': _("The web service is not responding"),
'blocking_level': 'warning',
} for inv in invoices}
resp_state = res["EstadoEnvio"]
l10n_es_edi_csv = res['CSV']
if resp_state == 'Correcto':
invoices.write({'l10n_es_edi_csv': l10n_es_edi_csv})
return {inv: {'success': True} for inv in invoices}
results = {}
for respl in res.RespuestaLinea:
invoice_number = respl.IDFactura.NumSerieFacturaEmisor
# Retrieve the corresponding invoice.
# Note: ref can be the same for different partners but there is no enough information on the response
# to match the partner.
# Note: Invoices are batched per move_type.
if invoices[0].is_sale_document():
inv = invoices.filtered(lambda x: x.name[:60] == invoice_number)
else:
# 'ref' can be the same for different partners.
candidates = invoices.filtered(lambda x: x.ref[:60] == invoice_number)
if len(candidates) >= 1:
respl_partner_info = respl.IDFactura.IDEmisorFactura
inv = None
for candidate in candidates:
partner_info = self._l10n_es_edi_get_partner_info(candidate.commercial_partner_id)
if partner_info.get('NIF') and partner_info['NIF'] == respl_partner_info.NIF:
inv = candidate
break
if partner_info.get('IDOtro') and all(getattr(respl_partner_info.IDOtro, k) == v
for k, v in partner_info['IDOtro'].items()):
inv = candidate
break
if not inv:
# This case shouldn't happen and means there is something wrong in this code. However, we can't
# raise anything since the document has already been approved by the government. The result
# will only be a badly logged message into the chatter so, not a big deal.
inv = candidates[0]
else:
inv = candidates
resp_line_state = respl.EstadoRegistro
if resp_line_state in ('Correcto', 'AceptadoConErrores'):
inv.l10n_es_edi_csv = l10n_es_edi_csv
results[inv] = {'success': True}
if resp_line_state == 'AceptadoConErrores':
inv.message_post(body=_("This was accepted with errors: ") + html_escape(respl.DescripcionErrorRegistro))
elif respl.RegistroDuplicado:
results[inv] = {'success': True}
inv.message_post(body=_("We saw that this invoice was sent correctly before, but we did not treat "
"the response. Make sure it is not because of a wrong configuration."))
elif respl.CodigoErrorRegistro == 1117 and not self.env.context.get('error_1117'):
return self.with_context(error_1117=True)._post_invoice_edi(invoices)
else:
results[inv] = {
'error': _("[%s] %s", respl.CodigoErrorRegistro, respl.DescripcionErrorRegistro),
'blocking_level': 'error',
}
return results
# -------------------------------------------------------------------------
# EDI OVERRIDDEN METHODS
# -------------------------------------------------------------------------
def _get_invoice_edi_content(self, move):
if self.code != 'es_sii':
return super()._get_invoice_edi_content(move)
return json.dumps(self._l10n_es_edi_get_invoices_info(move)).encode()
def _is_required_for_invoice(self, invoice):
# OVERRIDE
if self.code != 'es_sii':
return super()._is_required_for_invoice(invoice)
return invoice.l10n_es_edi_is_required
def _needs_web_services(self):
# OVERRIDE
return self.code == 'es_sii' or super()._needs_web_services()
def _support_batching(self, move=None, state=None, company=None):
# OVERRIDE
if self.code != 'es_sii':
return super()._support_batching(move=move, state=state, company=company)
return state == 'to_send' and move.is_invoice()
def _get_batch_key(self, move, state):
# OVERRIDE
if self.code != 'es_sii':
return super()._get_batch_key(move, state)
return move.move_type, move.l10n_es_edi_csv
def _check_move_configuration(self, move):
# OVERRIDE
res = super()._check_move_configuration(move)
if self.code != 'es_sii':
return res
if not move.company_id.vat:
res.append(_("VAT number is missing on company %s", move.company_id.display_name))
for line in move.invoice_line_ids.filtered(lambda line: not line.display_type):
taxes = line.tax_ids.flatten_taxes_hierarchy()
recargo_count = taxes.mapped('l10n_es_type').count('recargo')
retention_count = taxes.mapped('l10n_es_type').count('retencion')
sujeto_count = taxes.mapped('l10n_es_type').count('sujeto')
no_sujeto_count = taxes.mapped('l10n_es_type').count('no_sujeto')
no_sujeto_loc_count = taxes.mapped('l10n_es_type').count('no_sujeto_loc')
if retention_count > 1:
res.append(_("Line %s should only have one retention tax.", line.display_name))
if recargo_count > 1:
res.append(_("Line %s should only have one recargo tax.", line.display_name))
if sujeto_count > 1:
res.append(_("Line %s should only have one sujeto tax.", line.display_name))
if no_sujeto_count > 1:
res.append(_("Line %s should only have one no sujeto tax.", line.display_name))
if no_sujeto_loc_count > 1:
res.append(_("Line %s should only have one no sujeto (localizations) tax.", line.display_name))
if sujeto_count + no_sujeto_loc_count + no_sujeto_count > 1:
res.append(_("Line %s should only have one main tax.", line.display_name))
if move.move_type in ('in_invoice', 'in_refund'):
if not move.ref:
res.append(_("You should put a vendor reference on this vendor bill. "))
return res
def _is_compatible_with_journal(self, journal):
# OVERRIDE
if self.code != 'es_sii':
return super()._is_compatible_with_journal(journal)
return journal.country_code == 'ES'
def _post_invoice_edi(self, invoices):
# OVERRIDE
if self.code != 'es_sii':
return super()._post_invoice_edi(invoices)
# Ensure a certificate is available.
certificate = invoices.company_id.l10n_es_edi_certificate_id
if not certificate:
return {inv: {
'error': _("Please configure the certificate for SII."),
'blocking_level': 'error',
} for inv in invoices}
# Ensure a tax agency is available.
l10n_es_edi_tax_agency = invoices.company_id.mapped('l10n_es_edi_tax_agency')[0]
if not l10n_es_edi_tax_agency:
return {inv: {
'error': _("Please specify a tax agency on your company for SII."),
'blocking_level': 'error',
} for inv in invoices}
# Generate the JSON.
info_list = self._l10n_es_edi_get_invoices_info(invoices)
# Call the web service.
res = self._l10n_es_edi_call_web_service_sign(invoices, info_list)
for inv in invoices:
if res.get(inv, {}).get('success'):
attachment = self.env['ir.attachment'].create({
'type': 'binary',
'name': 'jsondump.json',
'raw': json.dumps(info_list),
'mimetype': 'application/json',
'res_model': inv._name,
'res_id': inv.id,
})
res[inv]['attachment'] = attachment
return res
| 48.231988 | 33,473 |
3,675 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from base64 import b64decode
from pytz import timezone
from datetime import datetime
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import Encoding, NoEncryption, PrivateFormat, pkcs12
from odoo import _, api, fields, models, tools
from odoo.exceptions import ValidationError
class Certificate(models.Model):
_name = 'l10n_es_edi.certificate'
_description = 'Personal Digital Certificate'
_order = 'date_start desc, id desc'
_rec_name = 'date_start'
content = fields.Binary(string="File", required=True, help="PFX Certificate")
password = fields.Char(help="Passphrase for the PFX certificate", groups="base.group_system")
date_start = fields.Datetime(readonly=True, help="The date on which the certificate starts to be valid")
date_end = fields.Datetime(readonly=True, help="The date on which the certificate expires")
company_id = fields.Many2one(comodel_name='res.company', required=True, default=lambda self: self.env.company)
# -------------------------------------------------------------------------
# HELPERS
# -------------------------------------------------------------------------
@api.model
def _get_es_current_datetime(self):
"""Get the current datetime with the Peruvian timezone. """
return datetime.now(timezone('Europe/Madrid'))
@tools.ormcache('self.content', 'self.password')
def _decode_certificate(self):
"""Return the content (DER encoded) and the certificate decrypted based in the point 3.1 from the RS 097-2012
http://www.vauxoo.com/r/manualdeautorizacion#page=21
"""
self.ensure_one()
if not self.password:
return None, None, None
private_key, certificate, dummy = pkcs12.load_key_and_certificates(
b64decode(self.content),
self.password.encode(),
backend=default_backend(),
)
pem_certificate = certificate.public_bytes(Encoding.PEM)
pem_private_key = private_key.private_bytes(
Encoding.PEM,
format=PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=NoEncryption(),
)
return pem_certificate, pem_private_key, certificate
# -------------------------------------------------------------------------
# LOW-LEVEL METHODS
# -------------------------------------------------------------------------
@api.model
def create(self, vals):
record = super().create(vals)
spain_tz = timezone('Europe/Madrid')
spain_dt = self._get_es_current_datetime()
try:
pem_certificate, pem_private_key, certificate = record._decode_certificate()
cert_date_start = spain_tz.localize(certificate.not_valid_before)
cert_date_end = spain_tz.localize(certificate.not_valid_after)
except Exception:
raise ValidationError(_(
"There has been a problem with the certificate, some usual problems can be:\n"
"- The password given or the certificate are not valid.\n"
"- The certificate content is invalid."
))
# Assign extracted values from the certificate
record.write({
'date_start': fields.Datetime.to_string(cert_date_start),
'date_end': fields.Datetime.to_string(cert_date_end),
})
if spain_dt > cert_date_end:
raise ValidationError(_("The certificate is expired since %s", record.date_end))
return record
| 42.241379 | 3,675 |
1,527 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResCompany(models.Model):
_inherit = 'res.company'
l10n_es_edi_certificate_id = fields.Many2one(
string="Certificate (ES)",
store=True,
readonly=False,
comodel_name='l10n_es_edi.certificate',
compute="_compute_l10n_es_edi_certificate",
)
l10n_es_edi_certificate_ids = fields.One2many(
comodel_name='l10n_es_edi.certificate',
inverse_name='company_id',
)
l10n_es_edi_tax_agency = fields.Selection(
string="Tax Agency for SII",
selection=[
('aeat', "Agencia Tributaria española"),
('gipuzkoa', "Hacienda Foral de Gipuzkoa"),
('bizkaia', "Hacienda Foral de Bizkaia"),
],
default=False,
)
l10n_es_edi_test_env = fields.Boolean(
string="Test Mode",
help="Use the test environment",
default=True,
)
@api.depends('country_id', 'l10n_es_edi_certificate_ids')
def _compute_l10n_es_edi_certificate(self):
for company in self:
if company.country_code == 'ES':
company.l10n_es_edi_certificate_id = self.env['l10n_es_edi.certificate'].search(
[('company_id', '=', company.id)],
order='date_end desc',
limit=1,
)
else:
company.l10n_es_edi_certificate_id = False
| 33.911111 | 1,526 |
469 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
from odoo import fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
l10n_es_edi_certificate_ids = fields.One2many(related='company_id.l10n_es_edi_certificate_ids', readonly=False)
l10n_es_edi_tax_agency = fields.Selection(related='company_id.l10n_es_edi_tax_agency', readonly=False)
l10n_es_edi_test_env = fields.Boolean(related='company_id.l10n_es_edi_test_env', readonly=False)
| 42.636364 | 469 |
715 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Skills Certification',
'category': 'Hidden',
'version': '1.0',
'summary': 'Add certification to resumé of your employees',
'description':
"""
Certification and Skills for HR
===============================
This module adds certification to resumé for employees.
""",
'depends': ['hr_skills', 'survey'],
'data': [
'views/hr_templates.xml',
'data/hr_resume_data.xml',
],
'auto_install': True,
'assets': {
'web.assets_qweb': [
'hr_skills_survey/static/src/xml/**/*',
],
},
'license': 'LGPL-3',
}
| 25.464286 | 713 |
1,838 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
from odoo.tools import html2plaintext
class SurveyUserInput(models.Model):
_inherit = 'survey.user_input'
def _mark_done(self):
""" Will add certification to employee's resumé if
- The survey is a certification
- The user is linked to an employee
- The user succeeded the test """
super(SurveyUserInput, self)._mark_done()
certification_user_inputs = self.filtered(lambda user_input: user_input.survey_id.certification and user_input.scoring_success)
partner_has_completed = {user_input.partner_id.id: user_input.survey_id for user_input in certification_user_inputs}
employees = self.env['hr.employee'].sudo().search([('user_id.partner_id', 'in', certification_user_inputs.mapped('partner_id').ids)])
for employee in employees:
line_type = self.env.ref('hr_skills_survey.resume_type_certification', raise_if_not_found=False)
survey = partner_has_completed.get(employee.user_id.partner_id.id)
self.env['hr.resume.line'].create({
'employee_id': employee.id,
'name': survey.title,
'date_start': fields.Date.today(),
'date_end': fields.Date.today(),
'description': html2plaintext(survey.description),
'line_type_id': line_type and line_type.id,
'display_type': 'certification',
'survey_id': survey.id
})
class ResumeLine(models.Model):
_inherit = 'hr.resume.line'
display_type = fields.Selection(selection_add=[('certification', 'Certification')])
survey_id = fields.Many2one('survey.survey', string='Certification', readonly=True)
| 44.804878 | 1,837 |
903 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
{
'name': "delivery_mondialrelay",
'summary': """ Let's choose a Point Relais® as shipping address """,
'description': """
This module allow your customer to choose a Point Relais® and use it as shipping address.
This module doesn't implement the WebService. It is only the integration of the widget.
Delivery price pre-configured is an example, you need to adapt the pricing's rules.
""",
'category': 'Inventory/Delivery',
'version': '0.1',
'depends': ['delivery'],
'data': [
'data/data.xml',
'views/views.xml',
'wizard/choose_delivery_carrier_views.xml',
],
'assets': {
'web.assets_backend': [
'delivery_mondialrelay/static/src/js/mondialrelay.js',
'delivery_mondialrelay/static/src/scss/mondialrelay.scss',
],
},
'license': 'LGPL-3',
}
| 33.37037 | 901 |
2,746 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, api, _
from odoo.tools.json import scriptsafe as json_safe
from odoo.exceptions import ValidationError
class ChooseDeliveryCarrier(models.TransientModel):
_inherit = 'choose.delivery.carrier'
shipping_zip = fields.Char(related='order_id.partner_shipping_id.zip')
shipping_country_code = fields.Char(related='order_id.partner_shipping_id.country_id.code')
is_mondialrelay = fields.Boolean(compute='_compute_is_mondialrelay')
mondialrelay_last_selected = fields.Char(string="Last Relay Selected")
mondialrelay_last_selected_id = fields.Char(compute='_compute_mr_last_selected_id')
mondialrelay_brand = fields.Char(related='carrier_id.mondialrelay_brand')
mondialrelay_colLivMod = fields.Char(related='carrier_id.mondialrelay_packagetype')
mondialrelay_allowed_countries = fields.Char(compute='_compute_mr_allowed_countries')
@api.depends('carrier_id')
def _compute_is_mondialrelay(self):
self.ensure_one()
self.is_mondialrelay = self.carrier_id.product_id.default_code == "MR"
@api.depends('carrier_id', 'order_id.partner_shipping_id')
def _compute_mr_last_selected_id(self):
self.ensure_one()
if self.order_id.partner_shipping_id.is_mondialrelay:
self.mondialrelay_last_selected_id = '%s-%s' % (
self.shipping_country_code,
self.order_id.partner_shipping_id.ref.lstrip('MR#'),
)
else:
self.mondialrelay_last_selected_id = ''
@api.depends('carrier_id')
def _compute_mr_allowed_countries(self):
self.ensure_one()
self.mondialrelay_allowed_countries = ','.join(self.carrier_id.country_ids.mapped('code')).upper() or ''
def button_confirm(self):
if self.carrier_id.is_mondialrelay:
if not self.mondialrelay_last_selected:
raise ValidationError(_('Please, choose a Parcel Point'))
data = json_safe.loads(self.mondialrelay_last_selected)
partner_shipping = self.order_id.partner_id._mondialrelay_search_or_create({
'id': data['id'],
'name': data['name'],
'street': data['street'],
'street2': data['street2'],
'zip': data['zip'],
'city': data['city'],
'country_code': data['country'][:2].lower(),
})
if partner_shipping != self.order_id.partner_shipping_id:
self.order_id.partner_shipping_id = partner_shipping
self.order_id.onchange_partner_shipping_id()
return super().button_confirm()
| 45.016393 | 2,746 |
675 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, _
from odoo.exceptions import UserError
class SaleOrderMondialRelay(models.Model):
_inherit = 'sale.order'
def action_confirm(self):
unmatch = self.filtered(lambda so: so.carrier_id.is_mondialrelay != so.partner_shipping_id.is_mondialrelay)
if unmatch:
error = _('Mondial Relay mismatching between delivery method and shipping address.')
if len(self) > 1:
error += ' (%s)' % ','.join(unmatch.mapped('name'))
raise UserError(error)
return super().action_confirm()
| 37.5 | 675 |
1,244 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, api
class DeliveryCarrierMondialRelay(models.Model):
_inherit = 'delivery.carrier'
is_mondialrelay = fields.Boolean(compute='_compute_is_mondialrelay')
mondialrelay_brand = fields.Char(string='Brand Code', default='BDTEST ')
mondialrelay_packagetype = fields.Char(default="24R", groups="base.group_system") # Advanced
@api.depends('product_id.default_code')
def _compute_is_mondialrelay(self):
for c in self:
c.is_mondialrelay = c.product_id.default_code == "MR"
def fixed_get_tracking_link(self, picking):
return self.base_on_rule_get_tracking_link(picking)
def base_on_rule_get_tracking_link(self, picking):
if self.is_mondialrelay:
return 'https://www.mondialrelay.com/public/permanent/tracking.aspx?ens=%(brand)s&exp=%(track)s&language=%(lang)s' % {
'brand': picking.carrier_id.mondialrelay_brand,
'track': picking.carrier_tracking_ref,
'lang': (picking.partner_id.lang or 'fr').split('_')[0],
}
return super().base_on_rule_get_tracking_link(picking)
| 42.896552 | 1,244 |
1,501 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, api
class ResPartnerMondialRelay(models.Model):
_inherit = 'res.partner'
is_mondialrelay = fields.Boolean(compute='_compute_is_mondialrelay')
@api.depends('ref')
def _compute_is_mondialrelay(self):
for p in self:
p.is_mondialrelay = p.ref and p.ref.startswith('MR#')
@api.model
def _mondialrelay_search_or_create(self, data):
ref = 'MR#%s' % data['id']
partner = self.search([
('id', 'child_of', self.commercial_partner_id.ids),
('ref', '=', ref),
# fast check that address always the same
('street', '=', data['street']),
('zip', '=', data['zip']),
])
if not partner:
partner = self.create({
'ref': ref,
'name': data['name'],
'street': data['street'],
'street2': data['street2'],
'zip': data['zip'],
'city': data['city'],
'country_id': self.env.ref('base.%s' % data['country_code']).id,
'type': 'delivery',
'parent_id': self.id,
})
return partner
def _avatar_get_placeholder_path(self):
if self.is_mondialrelay:
return "delivery_mondialrelay/static/src/img/truck_mr.png"
return super()._avatar_get_placeholder_path()
| 34.113636 | 1,501 |
584 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Indian - UPI',
'version': '1.0',
'description': """
Invoice with UPI QR code
=========================
This module adds QR code in invoice report for UPI payment allowing to make payment via any UPI app.
To print UPI Qr code add UPI id in company and tick "QR Codes" in configuration
""",
'category': 'Accounting/Localizations',
'depends': ['l10n_in'],
'data': ['views/res_company_views.xml'],
'license': 'LGPL-3',
'auto_install': True,
}
| 32.444444 | 584 |
925 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
from odoo import models
from odoo.tools.image import image_data_uri
class AccountMove(models.Model):
_inherit = "account.move"
def generate_qr_code(self):
self.ensure_one()
if self.company_id.country_code == 'IN' and self.is_sale_document(include_receipts=True):
payment_url = 'upi://pay?pa=%s&pn=%s&am=%s&tr=%s&tn=%s' % (
self.company_id.l10n_in_upi_id,
self.company_id.name,
self.amount_residual,
self.payment_reference or self.name,
("Payment for %s" % self.name))
barcode = self.env['ir.actions.report'].barcode(barcode_type="QR", value=payment_url, width=120, height=120)
return image_data_uri(base64.b64encode(barcode))
return super().generate_qr_code()
| 40.217391 | 925 |
243 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class Company(models.Model):
_inherit = 'res.company'
l10n_in_upi_id = fields.Char(string="UPI Id")
| 24.3 | 243 |
1,013 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Sales Expense',
'version': '1.0',
'category': 'Sales/Sales',
'summary': 'Quotation, Sales Orders, Delivery & Invoicing Control',
'description': """
Reinvoice Employee Expense
==========================
Create some products for which you can re-invoice the costs.
This module allow to reinvoice employee expense, by setting the SO directly on the expense.
""",
'depends': ['sale_management', 'hr_expense'],
'data': [
'views/product_view.xml',
'views/hr_expense_views.xml',
'views/sale_order_views.xml',
],
'demo': ['data/sale_expense_demo.xml'],
'test': [],
'installable': True,
'auto_install': True,
'assets': {
'web.assets_backend': [
'sale_expense/static/src/**/*',
],
'web.qunit_suite_tests': [
'sale_expense/static/tests/**/*',
],
},
'license': 'LGPL-3',
}
| 28.942857 | 1,013 |
5,103 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.hr_expense.tests.common import TestExpenseCommon
from odoo.addons.sale.tests.common import TestSaleCommon
from odoo.tests import tagged
@tagged('post_install', '-at_install')
class TestSaleExpense(TestExpenseCommon, TestSaleCommon):
def test_sale_expense(self):
""" Test the behaviour of sales orders when managing expenses """
# create a so with a product invoiced on delivery
so = self.env['sale.order'].create({
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'order_line': [(0, 0, {
'name': self.company_data['product_delivery_no'].name,
'product_id': self.company_data['product_delivery_no'].id,
'product_uom_qty': 2,
'product_uom': self.company_data['product_delivery_no'].uom_id.id,
'price_unit': self.company_data['product_delivery_no'].list_price,
})],
'pricelist_id': self.env.ref('product.list0').id,
})
so._compute_tax_id()
so.action_confirm()
so._create_analytic_account() # normally created at so confirmation when you use the right products
init_price = so.amount_total
# create some expense and validate it (expense at cost)
# Submit to Manager
sheet = self.env['hr.expense.sheet'].create({
'name': 'Expense for John Smith',
'employee_id': self.expense_employee.id,
'journal_id': self.company_data['default_journal_purchase'].id,
})
exp = self.env['hr.expense'].create({
'name': 'Air Travel',
'product_id': self.company_data['product_delivery_cost'].id,
'analytic_account_id': so.analytic_account_id.id,
'unit_amount': 621.54,
'employee_id': self.expense_employee.id,
'sheet_id': sheet.id,
'sale_order_id': so.id,
})
# Approve
sheet.approve_expense_sheets()
# Create Expense Entries
sheet.action_sheet_move_create()
# expense should now be in sales order
self.assertIn(self.company_data['product_delivery_cost'], so.mapped('order_line.product_id'), 'Sale Expense: expense product should be in so')
sol = so.order_line.filtered(lambda sol: sol.product_id.id == self.company_data['product_delivery_cost'].id)
self.assertEqual((sol.price_unit, sol.qty_delivered), (621.54, 1.0), 'Sale Expense: error when invoicing an expense at cost')
self.assertEqual(so.amount_total, init_price + sol.price_unit, 'Sale Expense: price of so should be updated after adding expense')
# create some expense and validate it (expense at sale price)
init_price = so.amount_total
prod_exp_2 = self.env['product.product'].create({
'name': 'Car Travel',
'expense_policy': 'sales_price',
'type': 'service',
'can_be_expensed': True,
'invoice_policy': 'delivery',
'list_price': 0.50,
'uom_id': self.env.ref('uom.product_uom_km').id,
'uom_po_id': self.env.ref('uom.product_uom_km').id,
})
# Submit to Manager
sheet = self.env['hr.expense.sheet'].create({
'name': 'Expense for John Smith',
'employee_id': self.expense_employee.id,
'journal_id': self.company_data['default_journal_purchase'].id,
})
exp = self.env['hr.expense'].create({
'name': 'Car Travel',
'product_id': prod_exp_2.id,
'analytic_account_id': so.analytic_account_id.id,
'product_uom_id': self.env.ref('uom.product_uom_km').id,
'unit_amount': 0.15,
'quantity': 100,
'employee_id': self.expense_employee.id,
'sheet_id': sheet.id,
'sale_order_id': so.id,
})
# Approve
sheet.approve_expense_sheets()
# Create Expense Entries
sheet.action_sheet_move_create()
# expense should now be in sales order
self.assertIn(prod_exp_2, so.mapped('order_line.product_id'), 'Sale Expense: expense product should be in so')
sol = so.order_line.filtered(lambda sol: sol.product_id.id == prod_exp_2.id)
self.assertEqual((sol.price_unit, sol.qty_delivered), (prod_exp_2.list_price, 100.0), 'Sale Expense: error when invoicing an expense at cost')
self.assertEqual(so.amount_untaxed, init_price + (prod_exp_2.list_price * 100.0), 'Sale Expense: price of so should be updated after adding expense')
# self.assertTrue(so.invoice_status, 'no', 'Sale Expense: expenses should not impact the invoice_status of the so')
# both expenses should be invoiced
inv = so._create_invoices()
self.assertEqual(inv.amount_untaxed, 621.54 + (prod_exp_2.list_price * 100.0), 'Sale Expense: invoicing of expense is wrong')
| 50.029412 | 5,103 |
5,057 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.hr_expense.tests.common import TestExpenseCommon
from odoo.addons.sale.tests.common import TestSaleCommon
from odoo.tests import tagged
@tagged('-at_install', 'post_install')
class TestReInvoice(TestExpenseCommon, TestSaleCommon):
def test_expenses_reinvoice(self):
(self.company_data['product_order_sales_price'] + self.company_data['product_delivery_sales_price']).write({
'can_be_expensed': True,
})
# create SO line and confirm SO (with only one line)
sale_order = self.env['sale.order'].with_context(mail_notrack=True, mail_create_nolog=True).create({
'partner_id': self.partner_a.id,
'partner_invoice_id': self.partner_a.id,
'partner_shipping_id': self.partner_a.id,
'order_line': [(0, 0, {
'name': self.company_data['product_order_sales_price'].name,
'product_id': self.company_data['product_order_sales_price'].id,
'product_uom_qty': 2.0,
'price_unit': 1000.0,
})],
})
sale_order.action_confirm()
expense_sheet = self.env['hr.expense.sheet'].create({
'name': 'First Expense for employee',
'employee_id': self.expense_employee.id,
'journal_id': self.company_data['default_journal_purchase'].id,
'accounting_date': '2017-01-01',
'expense_line_ids': [
(0, 0, {
'name': 'expense_1',
'date': '2016-01-01',
'product_id': self.company_data['product_order_sales_price'].id,
'unit_amount': self.company_data['product_order_sales_price'].list_price,
'analytic_account_id': self.analytic_account_1.id,
'employee_id': self.expense_employee.id,
'sale_order_id': sale_order.id,
}),
(0, 0, {
'name': 'expense_2',
'date': '2016-01-01',
'product_id': self.company_data['product_delivery_sales_price'].id,
'unit_amount': self.company_data['product_delivery_sales_price'].list_price,
'analytic_account_id': self.analytic_account_1.id,
'employee_id': self.expense_employee.id,
'sale_order_id': sale_order.id,
}),
(0, 0, {
'name': 'expense_3',
'date': '2016-01-01',
'product_id': self.company_data['product_order_sales_price'].id,
'unit_amount': self.company_data['product_order_sales_price'].list_price,
'analytic_account_id': self.analytic_account_1.id,
'employee_id': self.expense_employee.id,
'sale_order_id': sale_order.id,
}),
(0, 0, {
'name': 'expense_4',
'date': '2016-01-01',
'product_id': self.company_data['product_delivery_sales_price'].id,
'unit_amount': self.company_data['product_delivery_sales_price'].list_price,
'analytic_account_id': self.analytic_account_1.id,
'employee_id': self.expense_employee.id,
'sale_order_id': sale_order.id,
}),
(0, 0, {
'name': 'expense_5',
'date': '2016-01-01',
'product_id': self.company_data['product_delivery_sales_price'].id,
'unit_amount': self.company_data['product_delivery_sales_price'].list_price,
'analytic_account_id': self.analytic_account_1.id,
'employee_id': self.expense_employee.id,
'sale_order_id': sale_order.id,
}),
],
})
expense_sheet.approve_expense_sheets()
expense_sheet.action_sheet_move_create()
self.assertRecordValues(sale_order.order_line, [
# Original SO line:
{
'qty_delivered': 0.0,
'product_uom_qty': 2.0,
'is_expense': False,
},
# Expense lines:
{
'qty_delivered': 1.0,
'product_uom_qty': 1.0,
'is_expense': True,
},
{
'qty_delivered': 3.0,
'product_uom_qty': 1.0,
'is_expense': True,
},
{
'qty_delivered': 1.0,
'product_uom_qty': 1.0,
'is_expense': True,
},
])
self.assertRecordValues(sale_order.order_line[1:], [
{'qty_delivered_method': 'analytic'},
{'qty_delivered_method': 'analytic'},
{'qty_delivered_method': 'analytic'},
])
| 43.222222 | 5,057 |
1,875 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
def _sale_can_be_reinvoice(self):
""" determine if the generated analytic line should be reinvoiced or not.
For Expense flow, if the product has a 'reinvoice policy' and a Sales Order is set on the expense, then we will reinvoice the AAL
"""
self.ensure_one()
if self.expense_id: # expense flow is different from vendor bill reinvoice flow
return self.expense_id.product_id.expense_policy in ['sales_price', 'cost'] and self.expense_id.sale_order_id
return super(AccountMoveLine, self)._sale_can_be_reinvoice()
def _sale_determine_order(self):
""" For move lines created from expense, we override the normal behavior.
Note: if no SO but an AA is given on the expense, we will determine anyway the SO from the AA, using the same
mecanism as in Vendor Bills.
"""
mapping_from_invoice = super(AccountMoveLine, self)._sale_determine_order()
mapping_from_expense = {}
for move_line in self.filtered(lambda move_line: move_line.expense_id):
mapping_from_expense[move_line.id] = move_line.expense_id.sale_order_id or None
mapping_from_invoice.update(mapping_from_expense)
return mapping_from_invoice
def _sale_prepare_sale_line_values(self, order, price):
# Add expense quantity to sales order line and update the sales order price because it will be charged to the customer in the end.
self.ensure_one()
res = super()._sale_prepare_sale_line_values(order, price)
if self.expense_id:
res.update({'product_uom_qty': self.expense_id.quantity})
return res
| 48.076923 | 1,875 |
2,503 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class Expense(models.Model):
_inherit = "hr.expense"
sale_order_id = fields.Many2one('sale.order', compute='_compute_sale_order_id', store=True, string='Customer to Reinvoice', readonly=False, tracking=True,
states={'done': [('readonly', True)], 'refused': [('readonly', True)]},
# NOTE: only confirmed SO can be selected, but this domain in activated throught the name search with the `sale_expense_all_order`
# context key. So, this domain is not the one applied.
domain="[('state', '=', 'sale'), ('company_id', '=', company_id)]",
help="If the category has an expense policy, it will be reinvoiced on this sales order")
can_be_reinvoiced = fields.Boolean("Can be reinvoiced", compute='_compute_can_be_reinvoiced')
analytic_account_id = fields.Many2one(compute='_compute_analytic_account_id', store=True, readonly=False)
@api.depends('product_id.expense_policy')
def _compute_can_be_reinvoiced(self):
for expense in self:
expense.can_be_reinvoiced = expense.product_id.expense_policy in ['sales_price', 'cost']
@api.depends('can_be_reinvoiced')
def _compute_sale_order_id(self):
for expense in self.filtered(lambda e: not e.can_be_reinvoiced):
expense.sale_order_id = False
@api.depends('sale_order_id')
def _compute_analytic_account_id(self):
for expense in self.filtered('sale_order_id'):
expense.analytic_account_id = expense.sale_order_id.sudo().analytic_account_id # `sudo` required for normal employee without sale access rights
def action_move_create(self):
""" When posting expense, if the AA is given, we will track cost in that
If a SO is set, this means we want to reinvoice the expense. But to do so, we
need the analytic entries to be generated, so a AA is required to reinvoice. So,
we ensure the AA if a SO is given.
"""
for expense in self.filtered(lambda expense: expense.sale_order_id and not expense.analytic_account_id):
if not expense.sale_order_id.analytic_account_id:
expense.sale_order_id._create_analytic_account()
expense.write({
'analytic_account_id': expense.sale_order_id.analytic_account_id.id
})
return super(Expense, self).action_move_create()
| 54.413043 | 2,503 |
1,771 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
from odoo import SUPERUSER_ID
from odoo.osv import expression
class SaleOrder(models.Model):
_inherit = 'sale.order'
expense_ids = fields.One2many('hr.expense', 'sale_order_id', string='Expenses', domain=[('state', '=', 'done')], readonly=True, copy=False)
expense_count = fields.Integer("# of Expenses", compute='_compute_expense_count', compute_sudo=True)
@api.model
def _name_search(self, name='', args=None, operator='ilike', limit=100, name_get_uid=None):
""" For expense, we want to show all sales order but only their name_get (no ir.rule applied), this is the only way to do it. """
if self._context.get('sale_expense_all_order') and self.user_has_groups('sales_team.group_sale_salesman') and not self.user_has_groups('sales_team.group_sale_salesman_all_leads'):
domain = expression.AND([args or [], ['&', ('state', '=', 'sale'), ('company_id', 'in', self.env.companies.ids)]])
return super(SaleOrder, self.sudo())._name_search(name=name, args=domain, operator=operator, limit=limit, name_get_uid=SUPERUSER_ID)
return super(SaleOrder, self)._name_search(name=name, args=args, operator=operator, limit=limit, name_get_uid=name_get_uid)
@api.depends('expense_ids')
def _compute_expense_count(self):
expense_data = self.env['hr.expense'].read_group([('sale_order_id', 'in', self.ids)], ['sale_order_id'], ['sale_order_id'])
mapped_data = dict([(item['sale_order_id'][0], item['sale_order_id_count']) for item in expense_data])
for sale_order in self:
sale_order.expense_count = mapped_data.get(sale_order.id, 0)
| 63.25 | 1,771 |
814 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class ProductTemplate(models.Model):
_inherit = 'product.template'
@api.depends('can_be_expensed')
def _compute_visible_expense_policy(self):
expense_products = self.filtered(lambda p: p.can_be_expensed)
for product_template in self - expense_products:
product_template.visible_expense_policy = False
super(ProductTemplate, expense_products)._compute_visible_expense_policy()
visibility = self.user_has_groups('hr_expense.group_hr_expense_user')
for product_template in expense_products:
if not product_template.visible_expense_policy:
product_template.visible_expense_policy = visibility
| 40.7 | 814 |
720 |
py
|
PYTHON
|
15.0
|
#-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Work Entries',
'category': 'Human Resources/Employees',
'sequence': 39,
'summary': 'Manage work entries',
'description': "",
'installable': True,
'depends': [
'hr',
],
'data': [
'security/hr_work_entry_security.xml',
'security/ir.model.access.csv',
'data/hr_work_entry_data.xml',
'views/hr_work_entry_views.xml',
'views/hr_employee_views.xml',
'views/resource_calendar_views.xml',
],
'assets': {
'web.assets_qweb': [
'hr_work_entry/static/**/*',
],
},
'license': 'LGPL-3',
}
| 25.714286 | 720 |
1,069 |
py
|
PYTHON
|
15.0
|
# -*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields
class ResourceCalendarAttendance(models.Model):
_inherit = 'resource.calendar.attendance'
def _default_work_entry_type_id(self):
return self.env.ref('hr_work_entry.work_entry_type_attendance', raise_if_not_found=False)
work_entry_type_id = fields.Many2one(
'hr.work.entry.type', 'Work Entry Type', default=_default_work_entry_type_id,
groups="hr.group_hr_user")
def _copy_attendance_vals(self):
res = super()._copy_attendance_vals()
res['work_entry_type_id'] = self.work_entry_type_id.id
return res
class ResourceCalendarLeave(models.Model):
_inherit = 'resource.calendar.leaves'
work_entry_type_id = fields.Many2one(
'hr.work.entry.type', 'Work Entry Type',
groups="hr.group_hr_user")
def _copy_leave_vals(self):
res = super()._copy_leave_vals()
res['work_entry_type_id'] = self.work_entry_type_id.id
return res
| 32.393939 | 1,069 |
590 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, _
class HrEmployee(models.Model):
_inherit = 'hr.employee'
def action_open_work_entries(self):
self.ensure_one()
return {
'type': 'ir.actions.act_window',
'name': _('%s work entries', self.display_name),
'view_mode': 'calendar,tree,form',
'res_model': 'hr.work.entry',
'context': {'default_employee_id': self.id},
'domain': [('employee_id', '=', self.id)],
}
| 32.777778 | 590 |
11,095 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from contextlib import contextmanager
from dateutil.relativedelta import relativedelta
import itertools
from psycopg2 import OperationalError
from odoo import api, fields, models, tools, _
class HrWorkEntry(models.Model):
_name = 'hr.work.entry'
_description = 'HR Work Entry'
_order = 'conflict desc,state,date_start'
name = fields.Char(required=True, compute='_compute_name', store=True, readonly=False)
active = fields.Boolean(default=True)
employee_id = fields.Many2one('hr.employee', required=True, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]", index=True)
date_start = fields.Datetime(required=True, string='From')
date_stop = fields.Datetime(compute='_compute_date_stop', store=True, readonly=False, string='To')
duration = fields.Float(compute='_compute_duration', store=True, string="Period")
work_entry_type_id = fields.Many2one('hr.work.entry.type', index=True)
color = fields.Integer(related='work_entry_type_id.color', readonly=True)
state = fields.Selection([
('draft', 'Draft'),
('validated', 'Validated'),
('conflict', 'Conflict'),
('cancelled', 'Cancelled')
], default='draft')
company_id = fields.Many2one('res.company', string='Company', readonly=True, required=True,
default=lambda self: self.env.company)
conflict = fields.Boolean('Conflicts', compute='_compute_conflict', store=True) # Used to show conflicting work entries first
department_id = fields.Many2one('hr.department', related='employee_id.department_id', store=True)
# There is no way for _error_checking() to detect conflicts in work
# entries that have been introduced in concurrent transactions, because of the transaction
# isolation.
# So if 2 transactions create work entries in parallel it is possible to create a conflict
# that will not be visible by either transaction. There is no way to detect conflicts
# between different records in a safe manner unless a SQL constraint is used, e.g. via
# an EXCLUSION constraint [1]. This (obscure) type of constraint allows comparing 2 rows
# using special operator classes and it also supports partial WHERE clauses. Similarly to
# CHECK constraints, it's backed by an index.
# 1: https://www.postgresql.org/docs/9.6/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
_sql_constraints = [
('_work_entry_has_end', 'check (date_stop IS NOT NULL)', 'Work entry must end. Please define an end date or a duration.'),
('_work_entry_start_before_end', 'check (date_stop > date_start)', 'Starting time should be before end time.'),
(
'_work_entries_no_validated_conflict',
"""
EXCLUDE USING GIST (
tsrange(date_start, date_stop, '()') WITH &&,
int4range(employee_id, employee_id, '[]') WITH =
)
WHERE (state = 'validated' AND active = TRUE)
""",
'Validated work entries cannot overlap'
),
]
def init(self):
tools.create_index(self._cr, "hr_work_entry_date_start_date_stop_index", self._table, ["date_start", "date_stop"])
@api.depends('work_entry_type_id', 'employee_id')
def _compute_name(self):
for work_entry in self:
if not work_entry.employee_id:
work_entry.name = _('Undefined')
else:
work_entry.name = "%s: %s" % (work_entry.work_entry_type_id.name or _('Undefined Type'), work_entry.employee_id.name)
@api.depends('state')
def _compute_conflict(self):
for rec in self:
rec.conflict = rec.state == 'conflict'
@api.depends('date_stop', 'date_start')
def _compute_duration(self):
for work_entry in self:
work_entry.duration = work_entry._get_duration(work_entry.date_start, work_entry.date_stop)
@api.depends('date_start', 'duration')
def _compute_date_stop(self):
for work_entry in self.filtered(lambda w: w.date_start and w.duration):
work_entry.date_stop = work_entry.date_start + relativedelta(hours=work_entry.duration)
def _get_duration(self, date_start, date_stop):
if not date_start or not date_stop:
return 0
dt = date_stop - date_start
return dt.days * 24 + dt.seconds / 3600 # Number of hours
def action_validate(self):
"""
Try to validate work entries.
If some errors are found, set `state` to conflict for conflicting work entries
and validation fails.
:return: True if validation succeded
"""
work_entries = self.filtered(lambda work_entry: work_entry.state != 'validated')
if not work_entries._check_if_error():
work_entries.write({'state': 'validated'})
return True
return False
def _check_if_error(self):
if not self:
return False
undefined_type = self.filtered(lambda b: not b.work_entry_type_id)
undefined_type.write({'state': 'conflict'})
conflict = self._mark_conflicting_work_entries(min(self.mapped('date_start')), max(self.mapped('date_stop')))
return undefined_type or conflict
def _mark_conflicting_work_entries(self, start, stop):
"""
Set `state` to `conflict` for overlapping work entries
between two dates.
If `self.ids` is truthy then check conflicts with the corresponding work entries.
Return True if overlapping work entries were detected.
"""
# Use the postgresql range type `tsrange` which is a range of timestamp
# It supports the intersection operator (&&) useful to detect overlap.
# use '()' to exlude the lower and upper bounds of the range.
# Filter on date_start and date_stop (both indexed) in the EXISTS clause to
# limit the resulting set size and fasten the query.
self.flush(['date_start', 'date_stop', 'employee_id', 'active'])
query = """
SELECT b1.id,
b2.id
FROM hr_work_entry b1
JOIN hr_work_entry b2
ON b1.employee_id = b2.employee_id
AND b1.id <> b2.id
WHERE b1.date_start <= %(stop)s
AND b1.date_stop >= %(start)s
AND b1.active = TRUE
AND b2.active = TRUE
AND tsrange(b1.date_start, b1.date_stop, '()') && tsrange(b2.date_start, b2.date_stop, '()')
AND {}
""".format("b2.id IN %(ids)s" if self.ids else "b2.date_start <= %(stop)s AND b2.date_stop >= %(start)s")
self.env.cr.execute(query, {"stop": stop, "start": start, "ids": tuple(self.ids)})
conflicts = set(itertools.chain.from_iterable(self.env.cr.fetchall()))
self.browse(conflicts).write({
'state': 'conflict',
})
return bool(conflicts)
@api.model_create_multi
def create(self, vals_list):
work_entries = super().create(vals_list)
work_entries._check_if_error()
return work_entries
def write(self, vals):
skip_check = not bool({'date_start', 'date_stop', 'employee_id', 'work_entry_type_id', 'active'} & vals.keys())
if 'state' in vals:
if vals['state'] == 'draft':
vals['active'] = True
elif vals['state'] == 'cancelled':
vals['active'] = False
skip_check &= all(self.mapped(lambda w: w.state != 'conflict'))
if 'active' in vals:
vals['state'] = 'draft' if vals['active'] else 'cancelled'
with self._error_checking(skip=skip_check):
return super(HrWorkEntry, self).write(vals)
def unlink(self):
with self._error_checking():
return super().unlink()
def _reset_conflicting_state(self):
self.filtered(lambda w: w.state == 'conflict').write({'state': 'draft'})
@contextmanager
def _error_checking(self, start=None, stop=None, skip=False):
"""
Context manager used for conflicts checking.
When exiting the context manager, conflicts are checked
for all work entries within a date range. By default, the start and end dates are
computed according to `self` (min and max respectively) but it can be overwritten by providing
other values as parameter.
:param start: datetime to overwrite the default behaviour
:param stop: datetime to overwrite the default behaviour
:param skip: If True, no error checking is done
"""
try:
skip = skip or self.env.context.get('hr_work_entry_no_check', False)
start = start or min(self.mapped('date_start'), default=False)
stop = stop or max(self.mapped('date_stop'), default=False)
if not skip and start and stop:
work_entries = self.sudo().with_context(hr_work_entry_no_check=True).search([
('date_start', '<', stop),
('date_stop', '>', start),
('state', 'not in', ('validated', 'cancelled')),
])
work_entries._reset_conflicting_state()
yield
except OperationalError:
# the cursor is dead, do not attempt to use it or we will shadow the root exception
# with a "psycopg2.InternalError: current transaction is aborted, ..."
skip = True
raise
finally:
if not skip and start and stop:
# New work entries are handled in the create method,
# no need to reload work entries.
work_entries.exists()._check_if_error()
class HrWorkEntryType(models.Model):
_name = 'hr.work.entry.type'
_description = 'HR Work Entry Type'
name = fields.Char(required=True, translate=True)
code = fields.Char(required=True, help="Carefull, the Code is used in many references, changing it could lead to unwanted changes.")
color = fields.Integer(default=0)
sequence = fields.Integer(default=25)
active = fields.Boolean(
'Active', default=True,
help="If the active field is set to false, it will allow you to hide the work entry type without removing it.")
_sql_constraints = [
('unique_work_entry_code', 'UNIQUE(code)', 'The same code cannot be associated to multiple work entry types.'),
]
class Contacts(models.Model):
""" Personnal calendar filter """
_name = 'hr.user.work.entry.employee'
_description = 'Work Entries Employees'
user_id = fields.Many2one('res.users', 'Me', required=True, default=lambda self: self.env.user)
employee_id = fields.Many2one('hr.employee', 'Employee', required=True)
active = fields.Boolean('Active', default=True)
_sql_constraints = [
('user_id_employee_id_unique', 'UNIQUE(user_id,employee_id)', 'You cannot have the same employee twice.')
]
| 45.847107 | 11,095 |
548 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Mass mailing on track speakers',
'category': 'Hidden',
'version': '1.0',
'description':
"""
Mass mail event track speakers
==============================
Bridge module adding UX requirements to ease mass mailing of event track speakers.
""",
'depends': ['website_event_track', 'mass_mailing'],
'data': [
'views/event_views.xml'
],
'auto_install': True,
'license': 'LGPL-3',
}
| 26.095238 | 548 |
750 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class Event(models.Model):
_inherit = "event.event"
def action_mass_mailing_track_speakers(self):
mass_mailing_action = dict(
name='Mass Mail Attendees',
type='ir.actions.act_window',
res_model='mailing.mailing',
view_mode='form',
target='current',
context=dict(
default_mailing_model_id=self.env.ref('website_event_track.model_event_track').id,
default_mailing_domain=repr([('event_id', 'in', self.ids), ('stage_id.is_cancel', '!=', True)]),
),
)
return mass_mailing_action
| 34.090909 | 750 |
320 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class EventTrack(models.Model):
_inherit = 'event.track'
_mailing_enabled = True
def _mailing_get_default_domain(self, mailing):
return [('stage_id.is_cancel', '=', False)]
| 26.666667 | 320 |
534 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': "Calendar - SMS",
'version': "1.1",
'summary': 'Send text messages as event reminders',
'description': "Send text messages as event reminders",
'category': 'Hidden',
'depends': ['calendar', 'sms'],
'data': [
'security/sms_security.xml',
'data/sms_data.xml',
'views/calendar_views.xml',
],
'application': False,
'auto_install': True,
'license': 'LGPL-3',
}
| 28.105263 | 534 |
1,246 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from odoo.tests.common import SingleTransactionCase
class TestCalendarSms(SingleTransactionCase):
@classmethod
def setUpClass(cls):
super(TestCalendarSms, cls).setUpClass()
cls.partner_phone = cls.env['res.partner'].create({
'name': 'Partner With Phone Number',
'phone': '0477777777',
'country_id': cls.env.ref('base.be').id,
})
cls.partner_no_phone = cls.env['res.partner'].create({
'name': 'Partner With No Phone Number',
'country_id': cls.env.ref('base.be').id,
})
def test_attendees_with_number(self):
"""Test if only partners with sanitized number are returned."""
attendees = self.env['calendar.event'].create({
'name': "Boostrap vs Foundation",
'start': datetime(2022, 1, 1, 11, 11),
'stop': datetime(2022, 2, 2, 22, 22),
'partner_ids': [(6, 0, [self.partner_phone.id, self.partner_no_phone.id])],
})._sms_get_default_partners()
self.assertEqual(len(attendees), 1, "There should be only one partner retrieved")
| 37.757576 | 1,246 |
896 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class AlarmManager(models.AbstractModel):
_inherit = 'calendar.alarm_manager'
@api.model
def _send_reminder(self):
""" Cron method, overridden here to send SMS reminders as well
"""
super()._send_reminder()
events_by_alarm = self._get_events_by_alarm_to_notify('sms')
if not events_by_alarm:
return
event_ids = list(set(event_id for event_ids in events_by_alarm.values() for event_id in event_ids))
events = self.env['calendar.event'].browse(event_ids)
alarms = self.env['calendar.alarm'].browse(events_by_alarm.keys())
for event in events:
alarm = event.alarm_ids.filtered(lambda alarm : alarm.id in alarms.ids)
event._do_sms_reminder(alarm)
| 37.333333 | 896 |
1,791 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, _
from odoo.exceptions import UserError
class CalendarEvent(models.Model):
_inherit = 'calendar.event'
def _sms_get_default_partners(self):
""" Method overridden from mail.thread (defined in the sms module).
SMS text messages will be sent to attendees that haven't declined the event(s).
"""
return self.mapped('attendee_ids').filtered(lambda att: att.state != 'declined' and att.partner_id.phone_sanitized).mapped('partner_id')
def _do_sms_reminder(self, alarm):
""" Send an SMS text reminder to attendees that haven't declined the event """
for event in self:
event._message_sms_with_template(
template=alarm.sms_template_id,
template_fallback=_("Event reminder: %(name)s, %(time)s.", name=event.name, time=event.display_time),
partner_ids=self._sms_get_default_partners().ids,
put_in_queue=False
)
def action_send_sms(self):
if not self.partner_ids:
raise UserError(_("There are no attendees on these events"))
return {
'type': 'ir.actions.act_window',
'name': _("Send SMS Text Message"),
'res_model': 'sms.composer',
'view_mode': 'form',
'target': 'new',
'context': {
'default_composition_mode': 'mass',
'default_res_model': 'res.partner',
'default_res_ids': self.partner_ids.ids,
'default_mass_keep_log': True,
},
}
def _get_trigger_alarm_types(self):
return super()._get_trigger_alarm_types() + ['sms']
| 40.704545 | 1,791 |
1,052 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class CalendarAlarm(models.Model):
_inherit = 'calendar.alarm'
alarm_type = fields.Selection(selection_add=[
('sms', 'SMS Text Message')
], ondelete={'sms': 'set default'})
sms_template_id = fields.Many2one(
'sms.template', string="SMS Template",
domain=[('model', 'in', ['calendar.event'])],
compute='_compute_sms_template_id', readonly=False, store=True,
help="Template used to render SMS reminder content.")
@api.depends('alarm_type', 'mail_template_id')
def _compute_sms_template_id(self):
for alarm in self:
if alarm.alarm_type == 'sms' and not alarm.sms_template_id:
alarm.sms_template_id = self.env['ir.model.data']._xmlid_to_res_id('calendar_sms.sms_template_data_calendar_reminder')
elif alarm.alarm_type != 'sms' or not alarm.sms_template_id:
alarm.sms_template_id = False
| 42.08 | 1,052 |
1,196 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Google Calendar',
'version': '1.0',
'category': 'Productivity',
'description': "",
'depends': ['google_account', 'calendar'],
'data': [
'data/google_calendar_data.xml',
'security/ir.model.access.csv',
'wizard/reset_account_views.xml',
'views/res_config_settings_views.xml',
'views/res_users_views.xml',
'views/google_calendar_views.xml',
],
'demo': [],
'installable': True,
'auto_install': False,
'assets': {
'web.assets_backend': [
'google_calendar/static/src/js/google_calendar_popover.js',
'google_calendar/static/src/js/google_calendar.js',
'google_calendar/static/src/scss/google_calendar.scss',
],
'web.qunit_suite_tests': [
'google_calendar/static/tests/**/*',
],
'web.qunit_mobile_suite_tests': [
'google_calendar/static/tests/mock_server.js',
],
'web.assets_qweb': [
'google_calendar/static/src/xml/*.xml',
],
},
'license': 'LGPL-3',
}
| 31.473684 | 1,196 |
3,203 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from unittest.mock import MagicMock, patch
from odoo.addons.google_calendar.utils.google_calendar import GoogleCalendarService
from odoo.addons.google_account.models.google_service import GoogleService
from odoo.addons.google_calendar.models.res_users import User
from odoo.addons.google_calendar.models.google_sync import GoogleSync
from odoo.tests.common import HttpCase
def patch_api(func):
@patch.object(GoogleSync, '_google_insert', MagicMock(spec=GoogleSync._google_insert))
@patch.object(GoogleSync, '_google_delete', MagicMock(spec=GoogleSync._google_delete))
@patch.object(GoogleSync, '_google_patch', MagicMock(spec=GoogleSync._google_patch))
def patched(self, *args, **kwargs):
return func(self, *args, **kwargs)
return patched
@patch.object(User, '_get_google_calendar_token', lambda user: 'dummy-token')
class TestSyncGoogle(HttpCase):
def setUp(self):
super().setUp()
self.google_service = GoogleCalendarService(self.env['google.service'])
def assertGoogleEventDeleted(self, google_id):
GoogleSync._google_delete.assert_called()
args, dummy = GoogleSync._google_delete.call_args
self.assertEqual(args[1], google_id, "Event should have been deleted")
def assertGoogleEventNotDeleted(self):
GoogleSync._google_delete.assert_not_called()
def assertGoogleEventInserted(self, values, timeout=None):
expected_args = (values,)
expected_kwargs = {'timeout': timeout} if timeout else {}
GoogleSync._google_insert.assert_called_once()
args, kwargs = GoogleSync._google_insert.call_args
self.assertEqual(args[1:], expected_args) # skip Google service arg
self.assertEqual(kwargs, expected_kwargs)
def assertGoogleEventNotInserted(self):
GoogleSync._google_insert.assert_not_called()
def assertGoogleEventPatched(self, google_id, values, timeout=None):
expected_args = (google_id, values)
expected_kwargs = {'timeout': timeout} if timeout else {}
GoogleSync._google_patch.assert_called_once()
args, kwargs = GoogleSync._google_patch.call_args
self.assertEqual(args[1:], expected_args) # skip Google service arg
self.assertEqual(kwargs, expected_kwargs)
def assertGoogleEventNotPatched(self):
GoogleSync._google_patch.assert_not_called()
def assertGoogleAPINotCalled(self):
self.assertGoogleEventNotPatched()
self.assertGoogleEventNotInserted()
self.assertGoogleEventNotDeleted()
def assertGoogleEventSendUpdates(self, expected_value):
GoogleService._do_request.assert_called_once()
args, _ = GoogleService._do_request.call_args
val = "?sendUpdates=%s" % expected_value
self.assertTrue(val in args[0], "The URL should contain %s" % val)
def call_post_commit_hooks(self):
"""
manually calls postcommit hooks defined with the decorator @after_commit
"""
funcs = self.env.cr.postcommit._funcs.copy()
while funcs:
func = funcs.popleft()
func()
| 42.144737 | 3,203 |
71,245 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import pytz
from datetime import datetime, date
from dateutil.relativedelta import relativedelta
from odoo.tests.common import new_test_user
from odoo.addons.google_calendar.tests.test_sync_common import TestSyncGoogle, patch_api
from odoo.addons.google_calendar.utils.google_calendar import GoogleEvent
from odoo import Command, tools
class TestSyncGoogle2Odoo(TestSyncGoogle):
def setUp(self):
super().setUp()
self.private_partner = self.env['res.partner'].create({
'name': 'Private Contact',
'email': '[email protected]',
'type': 'private',
})
@property
def now(self):
return pytz.utc.localize(datetime.now()).isoformat()
def sync(self, events):
events.clear_type_ambiguity(self.env)
google_recurrence = events.filter(GoogleEvent.is_recurrence)
self.env['calendar.recurrence']._sync_google2odoo(google_recurrence)
self.env['calendar.event']._sync_google2odoo(events - google_recurrence)
@patch_api
def test_new_google_event(self):
description = '<script>alert("boom")</script><p style="white-space: pre"><h1>HELLO</h1></p><ul><li>item 1</li><li>item 2</li></ul>'
values = {
'id': 'oj44nep1ldf8a3ll02uip0c9aa',
'description': description,
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [{
'displayName': 'Mitchell Admin',
'email': '[email protected]',
'responseStatus': 'needsAction'
},],
'reminders': {'useDefault': True},
'start': {
'dateTime': '2020-01-13T16:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': '2020-01-13T19:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
event = self.env['calendar.event'].search([('google_id', '=', values.get('id'))])
self.assertTrue(event, "It should have created an event")
self.assertEqual(event.name, values.get('summary'))
self.assertFalse(event.allday)
self.assertEqual(event.description, tools.html_sanitize(description))
self.assertEqual(event.start, datetime(2020, 1, 13, 15, 55))
self.assertEqual(event.stop, datetime(2020, 1, 13, 18, 55))
admin_attendee = event.attendee_ids.filtered(lambda e: e.email == '[email protected]')
self.assertEqual('[email protected]', admin_attendee.email)
self.assertEqual('Mitchell Admin', admin_attendee.partner_id.name)
self.assertEqual(event.partner_ids, event.attendee_ids.partner_id)
self.assertEqual('needsAction', admin_attendee.state)
self.assertGoogleAPINotCalled()
@patch_api
def test_invalid_owner_property(self):
values = {
'id': 'oj44nep1ldf8a3ll02uip0c9aa',
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [],
'reminders': {'useDefault': True},
'start': {
'dateTime': '2020-01-13T16:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'extendedProperties': {
'shared': {'%s_owner_id' % self.env.cr.dbname: "invalid owner id"}
},
'end': {
'dateTime': '2020-01-13T19:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
event = self.env['calendar.event'].search([('google_id', '=', values.get('id'))])
self.assertEqual(event.user_id, self.env.user)
self.assertGoogleAPINotCalled()
@patch_api
def test_valid_owner_property(self):
user = new_test_user(self.env, login='calendar-user')
values = {
'id': 'oj44nep1ldf8a3ll02uip0c9aa',
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [],
'reminders': {'useDefault': True},
'start': {
'dateTime': '2020-01-13T16:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'extendedProperties': {
'shared': {'%s_owner_id' % self.env.cr.dbname: str(user.id)}
},
'end': {
'dateTime': '2020-01-13T19:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
event = self.env['calendar.event'].search([('google_id', '=', values.get('id'))])
self.assertEqual(event.user_id, user)
self.assertGoogleAPINotCalled()
@patch_api
def test_cancelled(self):
""" Cancel event when the current user is the organizer """
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
event = self.env['calendar.event'].create({
'name': 'coucou',
'start': date(2020, 1, 6),
'stop': date(2020, 1, 6),
'google_id': google_id,
'user_id': self.env.user.id,
'need_sync': False,
'partner_ids': [(6, 0, self.env.user.partner_id.ids)] # current user is attendee
})
gevent = GoogleEvent([{
'id': google_id,
'status': 'cancelled',
}])
self.sync(gevent)
self.assertFalse(event.exists())
self.assertGoogleAPINotCalled()
@patch_api
def test_attendee_cancelled(self):
""" Cancel event when the current user is not the organizer """
user = new_test_user(self.env, login='calendar-user')
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
event = self.env['calendar.event'].create({
'name': 'coucou',
'start': date(2020, 1, 5),
'stop': date(2020, 1, 6),
'allday': True,
'google_id': google_id,
'need_sync': False,
'user_id': False, # Not the current user
'partner_ids': [Command.set(user.partner_id.ids)],
})
gevent = GoogleEvent([{
'id': google_id,
'status': 'cancelled',
}])
user_attendee = event.attendee_ids
self.assertEqual(user_attendee.state, 'needsAction')
# We have to call sync with the attendee user
gevent.clear_type_ambiguity(self.env)
self.env['calendar.event'].with_user(user)._sync_google2odoo(gevent)
self.assertTrue(event.active)
user_attendee = event.attendee_ids
self.assertTrue(user_attendee)
self.assertEqual(user_attendee.state, 'declined')
self.assertGoogleAPINotCalled()
@patch_api
def test_private_extended_properties(self):
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
event = self.env['calendar.event'].create({
'name': 'coucou',
'start': date(2020, 1, 6),
'stop': date(2020, 1, 6),
'allday': True,
'google_id': google_id,
'need_sync': False,
'user_id': False, # Not the current user
'partner_ids': [(6, 0, self.env.user.partner_id.ids)] # current user is attendee
})
user_attendee = event.attendee_ids
self.assertTrue(user_attendee)
self.assertEqual(user_attendee.state, 'accepted')
user_attendee.do_decline()
# To avoid 403 errors, we send a limited dictionnary when we don't have write access.
# guestsCanModify property is not properly handled yet
self.assertGoogleEventPatched(event.google_id, {
'id': event.google_id,
'summary': 'coucou',
'start': {'date': str(event.start_date)},
'end': {'date': str(event.stop_date + relativedelta(days=1))},
'attendees': [{'email': '[email protected]', 'responseStatus': 'declined'}],
'extendedProperties': {'private': {'%s_odoo_id' % self.env.cr.dbname: event.id}},
'reminders': {'overrides': [], 'useDefault': False},
})
@patch_api
def test_attendee_removed(self):
user = new_test_user(self.env, login='calendar-user')
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
event = self.env['calendar.event'].with_user(user).create({
'name': 'coucou',
'start': date(2020, 1, 6),
'stop': date(2020, 1, 6),
'google_id': google_id,
'user_id': False, # user is not owner
'need_sync': False,
'partner_ids': [(6, 0, user.partner_id.ids)], # but user is attendee
})
gevent = GoogleEvent([{
'id': google_id,
'description': 'Small mini desc',
"updated": self.now,
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [], # <= attendee removed in Google
'reminders': {'useDefault': True},
'start': {
'dateTime': '2020-01-13T16:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': '2020-01-13T19:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
}])
self.assertEqual(event.partner_ids, user.partner_id)
self.assertEqual(event.attendee_ids.partner_id, user.partner_id)
self.sync(gevent)
# User attendee removed but gevent owner might be added after synch.
self.assertNotEqual(event.attendee_ids.partner_id, user.partner_id)
self.assertNotEqual(event.partner_ids, user.partner_id)
self.assertGoogleAPINotCalled()
@patch_api
def test_attendee_removed_multiple(self):
user = new_test_user(self.env, login='calendar-user')
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
base_event = self.env['calendar.event'].create({
'name': 'coucou',
'allday': True,
'start': datetime(2020, 1, 6),
'stop': datetime(2020, 1, 6),
'user_id': False, # user is not owner
'need_sync': False,
'partner_ids': [(6, 0, user.partner_id.ids)], # but user is attendee
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=MO',
'need_sync': False,
'base_event_id': base_event.id,
'calendar_event_ids': [(4, base_event.id)],
})
recurrence._apply_recurrence()
gevent = GoogleEvent([{
'id': google_id,
"updated": self.now,
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'coucou',
'visibility': 'public',
'attendees': [], # <= attendee removed in Google
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=2;BYDAY=MO'],
'reminders': {'useDefault': True},
'start': {'date': '2020-01-6'},
'end': {'date': '2020-01-7'},
}])
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(events.partner_ids, user.partner_id)
self.assertEqual(events.attendee_ids.partner_id, user.partner_id)
self.sync(gevent)
# User attendee removed but gevent owner might be added after synch.
self.assertNotEqual(events.attendee_ids.partner_id, user.partner_id)
self.assertNotEqual(events.partner_ids, user.partner_id)
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence(self):
recurrence_id = 'oj44nep1ldf8a3ll02uip0c9aa'
values = {
'id': recurrence_id,
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=MO'],
'reminders': {'useDefault': True},
'start': {'date': '2020-01-6'},
'end': {'date': '2020-01-7'},
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', values.get('id'))])
self.assertTrue(recurrence, "it should have created a recurrence")
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 3, "it should have created a recurrence with 3 events")
self.assertTrue(all(events.mapped('recurrency')))
self.assertEqual(events[0].start_date, date(2020, 1, 6))
self.assertEqual(events[1].start_date, date(2020, 1, 13))
self.assertEqual(events[2].start_date, date(2020, 1, 20))
self.assertEqual(events[0].start_date, date(2020, 1, 6))
self.assertEqual(events[1].start_date, date(2020, 1, 13))
self.assertEqual(events[2].start_date, date(2020, 1, 20))
self.assertEqual(events[0].google_id, '%s_20200106' % recurrence_id)
self.assertEqual(events[1].google_id, '%s_20200113' % recurrence_id)
self.assertEqual(events[2].google_id, '%s_20200120' % recurrence_id)
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_datetime(self):
recurrence_id = 'oj44nep1ldf8a3ll02uip0c9aa'
values = {
'id': recurrence_id,
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=MO'],
'reminders': {'useDefault': True},
'start': {'dateTime': '2020-01-06T18:00:00+01:00'},
'end': {'dateTime': '2020-01-06T19:00:00+01:00'},
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', values.get('id'))])
self.assertTrue(recurrence, "it should have created a recurrence")
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 3, "it should have created a recurrence with 3 events")
self.assertTrue(all(events.mapped('recurrency')))
self.assertEqual(events[0].start, datetime(2020, 1, 6, 17, 0))
self.assertEqual(events[1].start, datetime(2020, 1, 13, 17, 0))
self.assertEqual(events[2].start, datetime(2020, 1, 20, 17, 0))
self.assertEqual(events[0].google_id, '%s_20200106T170000Z' % recurrence_id)
self.assertEqual(events[1].google_id, '%s_20200113T170000Z' % recurrence_id)
self.assertEqual(events[2].google_id, '%s_20200120T170000Z' % recurrence_id)
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_exdate(self):
recurrence_id = 'oj44nep1ldf8a3ll02uip0c9aa'
events = GoogleEvent([{
'id': recurrence_id,
'summary': 'Pricing new update',
'organizer': {'email': self.env.user.email, 'self': True},
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=MO'],
'reminders': {'useDefault': True},
'start': {'date': '2020-01-6'},
'end': {'date': '2020-01-7'},
}, { # Third event has been deleted
'id': '%s_20200113' % recurrence_id,
'originalStartTime': {'dateTime': '2020-01-13'},
'recurringEventId': 'oj44nep1ldf8a3ll02uip0c9pk',
'reminders': {'useDefault': True},
'status': 'cancelled',
}])
self.sync(events)
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', recurrence_id)])
self.assertTrue(recurrence, "it should have created a recurrence")
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 2, "it should have created a recurrence with 2 events")
self.assertEqual(events[0].start_date, date(2020, 1, 6))
self.assertEqual(events[1].start_date, date(2020, 1, 20))
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_first_exdate(self):
recurrence_id = "4c0de517evkk3ra294lmut57vm"
events = GoogleEvent([{
"id": recurrence_id,
"updated": "2020-01-13T16:17:03.806Z",
"summary": "r rul",
"start": {"date": "2020-01-6"},
'organizer': {'email': self.env.user.email, 'self': True},
"end": {"date": "2020-01-7"},
'reminders': {'useDefault': True},
"recurrence": ["RRULE:FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=MO"],
}, {
"id": "%s_20200106" % recurrence_id,
"status": "cancelled",
"recurringEventId": "4c0de517evkk3ra294lmut57vm",
'reminders': {'useDefault': True},
"originalStartTime": {
"date": "2020-01-06"
}
}])
self.sync(events)
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', recurrence_id)])
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 2, "it should have created a recurrence with 2 events")
self.assertEqual(events[0].start_date, date(2020, 1, 13))
self.assertEqual(events[1].start_date, date(2020, 1, 20))
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrencde_first_updated(self):
recurrence_id = "4c0de517evkk3ra294lmut57vm"
events = GoogleEvent([{
'id': recurrence_id,
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=WE'],
'start': {'date': '2020-01-01'},
'end': {'date': '2020-01-02'},
'status': 'confirmed',
'summary': 'rrule',
'reminders': {'useDefault': True},
'updated': self.now
}, {
'summary': 'edited', # Name changed
'id': '%s_20200101' % recurrence_id,
'originalStartTime': {'date': '2020-01-01'},
'recurringEventId': recurrence_id,
'start': {'date': '2020-01-01'},
'end': {'date': '2020-01-02'},
'reminders': {'useDefault': True},
'updated': self.now,
}])
self.sync(events)
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', recurrence_id)])
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 3, "it should have created a recurrence with 3 events")
self.assertEqual(events[0].name, 'edited')
self.assertEqual(events[1].name, 'rrule')
self.assertEqual(events[2].name, 'rrule')
self.assertGoogleAPINotCalled()
@patch_api
def test_existing_recurrence_first_updated(self):
recurrence_id = "4c0de517evkk3ra294lmut57vm"
base_event = self.env['calendar.event'].create({
'name': 'coucou',
'allday': True,
'start': datetime(2020, 1, 6),
'stop': datetime(2020, 1, 6),
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': recurrence_id,
'rrule': 'FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=MO',
'need_sync': False,
'base_event_id': base_event.id,
})
recurrence._apply_recurrence()
values = [{
'summary': 'edited', # Name changed
'id': '%s_20200106' % recurrence_id,
'originalStartTime': {'date': '2020-01-06'},
'recurringEventId': recurrence_id,
'start': {'date': '2020-01-06'},
'end': {'date': '2020-01-07'},
'reminders': {'useDefault': True},
'updated': self.now,
}]
self.env['calendar.event']._sync_google2odoo(GoogleEvent(values))
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', recurrence_id)])
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 3, "it should have created a recurrence with 3 events")
self.assertEqual(events[0].name, 'edited')
self.assertEqual(events[1].name, 'coucou')
self.assertEqual(events[2].name, 'coucou')
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_outlier(self):
recurrence_id = 'oj44nep1ldf8a3ll02uip0c9aa'
events = GoogleEvent([{
'id': recurrence_id,
'summary': 'Pricing new update',
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=MO'],
'start': {'date': '2020-01-6'},
'end': {'date': '2020-01-7'},
'reminders': {'useDefault': True},
'updated': self.now,
},
{ # Third event has been moved
'id': '%s_20200113' % recurrence_id,
'summary': 'Pricing new update',
'start': {'date': '2020-01-18'},
'end': {'date': '2020-01-19'},
'originalStartTime': {'date': '2020-01-13'},
'reminders': {'useDefault': True},
'updated': self.now,
}])
self.sync(events)
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', recurrence_id)])
self.assertTrue(recurrence, "it should have created a recurrence")
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 3, "it should have created a recurrence with 3 events")
self.assertEqual(events[0].start_date, date(2020, 1, 6))
self.assertEqual(events[1].start_date, date(2020, 1, 18), "It should not be in sync with the recurrence")
self.assertEqual(events[2].start_date, date(2020, 1, 20))
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_moved(self):
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
base_event = self.env['calendar.event'].create({
'name': 'coucou',
'allday': True,
'start': datetime(2020, 1, 6),
'stop': datetime(2020, 1, 6),
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=MO',
'need_sync': False,
'base_event_id': base_event.id,
'calendar_event_ids': [(4, base_event.id)],
})
recurrence._apply_recurrence()
values = {
'id': google_id,
'summary': 'coucou',
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=2;BYDAY=WE'], # Now wednesday
'start': {'date': '2020-01-08'},
'end': {'date': '2020-01-09'},
'reminders': {'useDefault': True},
"attendees": [
{
"email": "[email protected]", "state": "accepted",
},
],
'updated': self.now,
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 2)
self.assertEqual(recurrence.rrule, 'FREQ=WEEKLY;COUNT=2;BYDAY=WE')
self.assertEqual(events[0].start_date, date(2020, 1, 8))
self.assertEqual(events[1].start_date, date(2020, 1, 15))
self.assertEqual(events[0].google_id, '%s_20200108' % google_id)
self.assertEqual(events[1].google_id, '%s_20200115' % google_id)
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_name_updated(self):
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
base_event = self.env['calendar.event'].create({
'name': 'coucou',
'allday': True,
'start': datetime(2020, 1, 6),
'stop': datetime(2020, 1, 6),
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=MO',
'need_sync': False,
'base_event_id': base_event.id,
'calendar_event_ids': [(4, base_event.id)],
})
recurrence._apply_recurrence()
values = {
'id': google_id,
'summary': 'coucou again',
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=2;BYDAY=MO'],
'start': {'date': '2020-01-06'},
'end': {'date': '2020-01-07'},
'reminders': {'useDefault': True},
"attendees": [
{
"email": "[email protected]", "state": "accepted",
},
],
'updated': self.now,
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 2)
self.assertEqual(recurrence.rrule, 'FREQ=WEEKLY;COUNT=2;BYDAY=MO')
self.assertEqual(events.mapped('name'), ['coucou again', 'coucou again'])
self.assertEqual(events[0].start_date, date(2020, 1, 6))
self.assertEqual(events[1].start_date, date(2020, 1, 13))
self.assertEqual(events[0].google_id, '%s_20200106' % google_id)
self.assertEqual(events[1].google_id, '%s_20200113' % google_id)
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_write_with_outliers(self):
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
base_event = self.env['calendar.event'].create({
'name': 'coucou',
'start': datetime(2021, 2, 15, 8, 0, 0),
'stop': datetime(2021, 2, 15, 10, 0, 0),
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=3;BYDAY=MO',
'need_sync': False,
'base_event_id': base_event.id,
'calendar_event_ids': [(4, base_event.id)],
})
recurrence._apply_recurrence()
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(events[0].google_id, '%s_20210215T080000Z' % google_id)
self.assertEqual(events[1].google_id, '%s_20210222T080000Z' % google_id)
self.assertEqual(events[2].google_id, '%s_20210301T080000Z' % google_id)
# Modify start of one of the events.
middle_event = recurrence.calendar_event_ids.filtered(lambda e: e.start == datetime(2021, 2, 22, 8, 0, 0))
middle_event.write({
'start': datetime(2021, 2, 22, 16, 0, 0),
'need_sync': False,
})
values = {
'id': google_id,
'summary': 'coucou again',
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=3;BYDAY=MO'],
'start': {'dateTime': '2021-02-15T09:00:00+01:00'}, # 8:00 UTC
'end': {'dateTime': '2021-02-15-T11:00:00+01:00'},
'reminders': {'useDefault': True},
"attendees": [
{
"email": "[email protected]", "state": "accepted",
},
],
'updated': self.now,
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 3)
self.assertEqual(recurrence.rrule, 'FREQ=WEEKLY;COUNT=3;BYDAY=MO')
self.assertEqual(events.mapped('name'), ['coucou again', 'coucou again', 'coucou again'])
self.assertEqual(events[0].start, datetime(2021, 2, 15, 8, 0, 0))
self.assertEqual(events[1].start, datetime(2021, 2, 22, 16, 0, 0))
self.assertEqual(events[2].start, datetime(2021, 3, 1, 8, 0, 0))
# the google_id of recurrent events should not be modified when events start is modified.
# the original start date or datetime should always be present.
self.assertEqual(events[0].google_id, '%s_20210215T080000Z' % google_id)
self.assertEqual(events[1].google_id, '%s_20210222T080000Z' % google_id)
self.assertEqual(events[2].google_id, '%s_20210301T080000Z' % google_id)
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_write_time_fields(self):
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
base_event = self.env['calendar.event'].create({
'name': 'coucou',
'start': datetime(2021, 2, 15, 8, 0, 0),
'stop': datetime(2021, 2, 15, 10, 0, 0),
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=3;BYDAY=MO',
'need_sync': False,
'base_event_id': base_event.id,
'calendar_event_ids': [(4, base_event.id)],
})
recurrence._apply_recurrence()
# Google modifies the start/stop of the base event
# When the start/stop or all day values are updated, the recurrence should reapplied.
values = {
'id': google_id,
'summary': "It's me again",
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=4;BYDAY=MO'],
'start': {'dateTime': '2021-02-15T12:00:00+01:00'}, # 11:00 UTC
'end': {'dateTime': '2021-02-15-T15:00:00+01:00'},
'reminders': {'useDefault': True},
"attendees": [
{
"email": "[email protected]", "state": "accepted",
},
],
'updated': self.now,
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(events[0].start, datetime(2021, 2, 15, 11, 0, 0))
self.assertEqual(events[1].start, datetime(2021, 2, 22, 11, 0, 0))
self.assertEqual(events[2].start, datetime(2021, 3, 1, 11, 0, 0))
self.assertEqual(events[3].start, datetime(2021, 3, 8, 11, 0, 0))
# We ensure that our modifications are pushed
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_deleted(self):
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
base_event = self.env['calendar.event'].create({
'name': 'coucou',
'start': datetime(2021, 2, 15, 8, 0, 0),
'stop': datetime(2021, 2, 15, 10, 0, 0),
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=3;BYDAY=MO',
'need_sync': False,
'base_event_id': base_event.id,
'calendar_event_ids': [(4, base_event.id)],
})
recurrence._apply_recurrence()
events = recurrence.calendar_event_ids
values = {
'id': google_id,
'status': 'cancelled',
}
self.sync(GoogleEvent([values]))
self.assertFalse(recurrence.exists(), "The recurrence should be deleted")
self.assertFalse(events.exists(), "All events should be deleted")
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_timezone(self):
""" Ensure that the timezone of the base_event is saved on the recurrency
Google save the TZ on the event and we save it on the recurrency.
"""
recurrence_id = 'oj44nep1ldf8a3ll02uip0c9aa'
values = {
'id': recurrence_id,
'description': '',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Event with ',
'visibility': 'public',
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=MO'],
'reminders': {'useDefault': True},
'start': {'dateTime': '2020-01-06T18:00:00+01:00', 'timeZone': 'Pacific/Auckland'},
'end': {'dateTime': '2020-01-06T19:00:00+01:00', 'timeZone': 'Pacific/Auckland'},
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', values.get('id'))])
self.assertEqual(recurrence.event_tz, 'Pacific/Auckland', "The Google event Timezone should be saved on the recurrency")
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_no_duplicate(self):
values = [
{
"attendees": [
{
"email": "[email protected]",
"responseStatus": "needsAction",
"self": True,
},
{"email": "[email protected]", "responseStatus": "needsAction"},
{
"email": "[email protected]",
"organizer": True,
"responseStatus": "accepted",
},
],
"created": "2023-02-20T11:45:07.000Z",
"creator": {"email": "[email protected]"},
"end": {"dateTime": "2023-02-25T16:20:00+01:00", "timeZone": "Europe/Zurich"},
"etag": '"4611038912699385"',
"eventType": "default",
"iCalUID": "[email protected]",
"id": "9lxiofipomymx2yr1yt0hpep99",
"kind": "calendar#event",
"organizer": {"email": "[email protected]"},
"recurrence": ["RRULE:FREQ=WEEKLY;BYDAY=SA"],
"reminders": {"useDefault": True},
"sequence": 0,
"start": {"dateTime": "2023-02-25T15:30:00+01:00", "timeZone": "Europe/Zurich"},
"status": "confirmed",
"summary": "Weekly test",
"updated": "2023-02-20T11:45:08.547Z",
},
{
"attendees": [
{
"email": "[email protected]",
"responseStatus": "needsAction",
"self": True,
},
{
"email": "[email protected]",
"organizer": True,
"responseStatus": "needsAction",
},
{"email": "[email protected]", "responseStatus": "accepted"},
],
"created": "2023-02-20T11:45:44.000Z",
"creator": {"email": "[email protected]"},
"end": {"dateTime": "2023-02-26T15:20:00+01:00", "timeZone": "Europe/Zurich"},
"etag": '"5534851880843722"',
"eventType": "default",
"iCalUID": "[email protected]",
"id": "hhb5t0cffjkndvlg7i22f7byn1",
"kind": "calendar#event",
"organizer": {"email": "[email protected]"},
"recurrence": ["RRULE:FREQ=WEEKLY;BYDAY=SU"],
"reminders": {"useDefault": True},
"sequence": 0,
"start": {"dateTime": "2023-02-26T14:30:00+01:00", "timeZone": "Europe/Zurich"},
"status": "confirmed",
"summary": "Weekly test 2",
"updated": "2023-02-20T11:48:00.634Z",
},
]
google_events = GoogleEvent(values)
self.env['calendar.recurrence']._sync_google2odoo(google_events)
no_duplicate_gevent = google_events.filter(lambda e: e.id == "9lxiofipomymx2yr1yt0hpep99")
dt_start = datetime.fromisoformat(no_duplicate_gevent.start["dateTime"]).astimezone(pytz.utc).replace(tzinfo=None).replace(hour=0)
dt_end = datetime.fromisoformat(no_duplicate_gevent.end["dateTime"]).astimezone(pytz.utc).replace(tzinfo=None).replace(hour=23)
no_duplicate_event = self.env["calendar.event"].search(
[
("name", "=", no_duplicate_gevent.summary),
("start", ">=", dt_start),
("stop", "<=", dt_end,)
]
)
self.assertEqual(len(no_duplicate_event), 1)
@patch_api
def test_simple_event_into_recurrency(self):
""" Synched single events should be converted in recurrency without problems"""
google_id = 'aaaaaaaaaaaa'
values = {
'id': google_id,
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [{
'displayName': 'Mitchell Admin',
'email': '[email protected]',
'responseStatus': 'needsAction'
}, ],
'reminders': {'useDefault': True},
'start': {
'dateTime': '2020-01-06T18:00:00+01:00',
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': '2020-01-13T19:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
# The event is transformed into a recurrency on google
values = {
'id': google_id,
'description': '',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Event with ',
'visibility': 'public',
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=MO'],
'reminders': {'useDefault': True},
'start': {'dateTime': '2020-01-06T18:00:00+01:00', 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': '2020-01-06T19:00:00+01:00', 'timeZone': 'Europe/Brussels'},
}
recurrence = self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 3, "it should have created a recurrence with 3 events")
event = self.env['calendar.event'].search([('google_id', '=', values.get('id'))])
self.assertFalse(event.exists(), "The old event should not exits anymore")
self.assertGoogleAPINotCalled()
@patch_api
def test_new_google_notifications(self):
""" Event from Google should not create notifications and trigger. It ruins the perfs on large databases """
cron_id = self.env.ref('calendar.ir_cron_scheduler_alarm').id
triggers_before = self.env['ir.cron.trigger'].search([('cron_id', '=', cron_id)])
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
start = datetime.today() + relativedelta(months=1, day=1, hours=1)
end = datetime.today() + relativedelta(months=1, day=1, hours=2)
updated = datetime.today() + relativedelta(minutes=1)
values = {
'id': google_id,
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [{
'displayName': 'Mitchell Admin',
'email': '[email protected]',
'responseStatus': 'needsAction'
}, ],
'reminders': {'overrides': [{"method": "email", "minutes": 10}], 'useDefault': False},
'start': {
'dateTime': pytz.utc.localize(start).isoformat(),
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': pytz.utc.localize(end).isoformat(),
'timeZone': 'Europe/Brussels'
},
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
triggers_after = self.env['ir.cron.trigger'].search([('cron_id', '=', cron_id)])
new_triggers = triggers_after - triggers_before
self.assertFalse(new_triggers, "The event should not be created with triggers.")
# Event was created from Google and now it will be Updated from Google.
# No further notifications should be created.
values = {
'id': google_id,
'updated': pytz.utc.localize(updated).isoformat(),
'description': 'New Super description',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing was not good, now it is correct',
'visibility': 'public',
'attendees': [{
'displayName': 'Mitchell Admin',
'email': '[email protected]',
'responseStatus': 'needsAction'
}, ],
'reminders': {'overrides': [{"method": "email", "minutes": 10}], 'useDefault': False},
'start': {
'dateTime': pytz.utc.localize(start).isoformat(),
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': pytz.utc.localize(end).isoformat(),
'timeZone': 'Europe/Brussels'
},
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
triggers_after = self.env['ir.cron.trigger'].search([('cron_id', '=', cron_id)])
new_triggers = triggers_after - triggers_before
self.assertFalse(new_triggers, "The event should not be created with triggers.")
self.assertGoogleAPINotCalled()
@patch_api
def test_attendee_state(self):
user = new_test_user(self.env, login='calendar-user')
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
event = self.env['calendar.event'].with_user(user).create({
'name': 'Event with me',
'start': date(2020, 1, 6),
'stop': date(2020, 1, 6),
'google_id': google_id,
'user_id': False, # user is not owner
'need_sync': False,
'partner_ids': [(6, 0, user.partner_id.ids)], # but user is attendee
})
self.assertEqual(event.attendee_ids.state, 'accepted')
# The event is declined from Google
values = {
'id': google_id,
'description': 'Changed my mind',
"updated": self.now,
'organizer': {'email': '[email protected]', 'self': True},
'summary': """I don't want to be with me anymore""",
'visibility': 'public',
'attendees': [{
'displayName': 'calendar-user (base.group_user)',
'email': '[email protected]',
'responseStatus': 'declined'
}, ],
'reminders': {'useDefault': True},
'start': {
'dateTime': '2020-01-13T16:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': '2020-01-13T19:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
self.assertEqual(event.attendee_ids.state, 'declined')
self.assertGoogleAPINotCalled()
@patch_api
def test_attendees_same_event_both_share(self):
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
other_user = new_test_user(self.env, login='calendar-user')
event = self.env['calendar.event'].create({
'name': 'coucou',
'start': date(2020, 1, 6),
'stop': date(2020, 1, 6),
'allday': True,
'google_id': google_id,
'need_sync': False,
'user_id': other_user.id, # Not the current user
'partner_ids': [(6, 0, [self.env.user.partner_id.id, other_user.partner_id.id], )] # current user is attendee
})
event.write({'start': date(2020, 1, 7), 'stop': date(2020, 1, 8)})
# To avoid 403 errors, we send a limited dictionnary when we don't have write access.
# guestsCanModify property is not properly handled yet
self.assertGoogleEventPatched(event.google_id, {
'id': event.google_id,
'start': {'date': str(event.start_date)},
'end': {'date': str(event.stop_date + relativedelta(days=1))},
'summary': 'coucou',
'description': '',
'location': '',
'guestsCanModify': True,
'organizer': {'email': '[email protected]', 'self': False},
'attendees': [{'email': '[email protected]', 'responseStatus': 'needsAction'},
{'email': '[email protected]', 'responseStatus': 'accepted'},],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: event.id,
'%s_owner_id' % self.env.cr.dbname: other_user.id}},
'reminders': {'overrides': [], 'useDefault': False},
'visibility': 'public',
}, timeout=3)
@patch_api
def test_attendee_recurrence_answer(self):
""" Write on a recurrence to update all attendee answers """
other_user = new_test_user(self.env, login='calendar-user')
google_id = "aaaaaaaaaaa"
base_event = self.env['calendar.event'].create({
'name': 'coucou',
'start': datetime(2021, 2, 15, 7, 0, 0),
'stop': datetime(2021, 2, 15, 9, 0, 0),
'event_tz': 'Europe/Brussels',
'need_sync': False,
'partner_ids': [(6, 0, [other_user.partner_id.id])]
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=3;BYDAY=MO',
'need_sync': False,
'base_event_id': base_event.id,
'calendar_event_ids': [(4, base_event.id)],
})
recurrence._apply_recurrence()
recurrence.calendar_event_ids.attendee_ids.state = 'accepted'
values = {
'id': google_id,
"updated": self.now,
'description': '',
'attendees': [{'email': '[email protected]', 'responseStatus': 'declined'}],
'summary': 'coucou',
# 'visibility': 'public',
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=3;BYDAY=MO'],
'reminders': {'useDefault': True},
'start': {'dateTime': '2021-02-15T8:00:00+01:00', 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': '2021-02-15T10:00:00+01:00', 'timeZone': 'Europe/Brussels'},
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
attendee = recurrence.calendar_event_ids.attendee_ids.mapped('state')
self.assertEqual(attendee, ['declined', 'declined', 'declined'], "All events should be declined")
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_creation_with_attendee_answer(self):
""" Create a recurrence with predefined attendee answers """
google_id = "aaaaaaaaaaa"
values = {
'id': google_id,
"updated": self.now,
'description': '',
'attendees': [{'email': '[email protected]', 'responseStatus': 'declined'}],
'summary': 'coucou',
# 'visibility': 'public',
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=3;BYDAY=MO'],
'reminders': {'useDefault': True},
'start': {'dateTime': '2021-02-15T8:00:00+01:00', 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': '2021-02-15T10:00:00+01:00', 'timeZone': 'Europe/Brussels'},
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', google_id)])
attendee = recurrence.calendar_event_ids.attendee_ids.mapped('state')
self.assertEqual(attendee, ['declined', 'declined', 'declined'], "All events should be declined")
self.assertGoogleAPINotCalled()
@patch_api
def test_several_attendee_have_the_same_mail(self):
"""
In google, One mail = One attendee but on Odoo, some partners could share the same mail
This test checks that the deletion of such attendee has no harm: all attendee but the given mail are deleted.
"""
partner1 = self.env['res.partner'].create({
'name': 'joe',
'email': '[email protected]',
})
partner2 = self.env['res.partner'].create({
'name': 'william',
'email': '[email protected]',
})
partner3 = self.env['res.partner'].create({
'name': 'jack',
'email': '[email protected]',
})
partner4 = self.env['res.partner'].create({
'name': 'averell',
'email': '[email protected]',
})
google_id = "aaaaaaaaaaaaaaaaa"
event = self.env['calendar.event'].create({
'name': 'coucou',
'start': datetime(2020, 1, 13, 16, 0),
'stop': datetime(2020, 1, 13, 20),
'allday': False,
'google_id': google_id,
'need_sync': False,
'user_id': self.env.user.partner_id.id,
'partner_ids': [(6, 0, [self.env.user.partner_id.id, partner1.id, partner2.id, partner3.id, partner4.id],)]
# current user is attendee
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=3;BYDAY=MO',
'need_sync': False,
'base_event_id': event.id,
'calendar_event_ids': [(4, event.id)],
})
recurrence._apply_recurrence()
recurrence.calendar_event_ids.attendee_ids.state = 'accepted'
mails = sorted(set(event.attendee_ids.mapped('email')))
self.assertEqual(mails, ['[email protected]', '[email protected]'])
gevent = GoogleEvent([{
'id': google_id,
'description': 'coucou',
"updated": self.now,
'organizer': {'email': '[email protected]', 'self': True},
'summary': False,
'visibility': 'public',
'attendees': [],
'reminders': {'useDefault': True},
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: event.id, }},
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=3;BYDAY=MO'],
'start': {
'dateTime': '2020-01-13T16:00:00+01:00',
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': '2020-01-13T20:00:00+01:00',
'timeZone': 'Europe/Brussels'
},
}])
self.sync(gevent)
# User attendee removed but gevent owner might be added after synch.
mails = sorted(set(event.attendee_ids.mapped('email')))
self.assertEqual(mails, ['[email protected]'])
self.assertGoogleAPINotCalled()
@patch_api
def test_event_with_meeting_url(self):
values = {
'id': 'oj44nep1ldf8a3ll02uip0c9aa',
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [{
'displayName': 'Mitchell Admin',
'email': '[email protected]',
'responseStatus': 'needsAction'
},],
'reminders': {'useDefault': True},
'start': {
'dateTime': '2020-01-13T16:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': '2020-01-13T19:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'conferenceData': {
'entryPoints': [{
'entryPointType': 'video',
'uri': 'https://meet.google.com/odoo-random-test',
'label': 'meet.google.com/odoo-random-test'
}, {
'entryPointType': 'more',
'uri':'https://tel.meet/odoo-random-test?pin=42424242424242',
'pin':'42424242424242'
}]
}
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
event = self.env['calendar.event'].search([('google_id', '=', values.get('id'))])
self.assertTrue(event, "It should have created an event")
self.assertEqual(event.videocall_location, 'https://meet.google.com/odoo-random-test')
self.assertGoogleAPINotCalled()
@patch_api
def test_event_with_availability(self):
values = {
'id': 'oj44nep1ldf8a3ll02uip0c9aa',
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [{
'displayName': 'Mitchell Admin',
'email': '[email protected]',
'responseStatus': 'needsAction'
},],
'reminders': {'useDefault': True},
'start': {
'dateTime': '2020-01-13T16:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': '2020-01-13T19:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'transparency': 'transparent'
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
event = self.env['calendar.event'].search([('google_id', '=', values.get('id'))])
self.assertTrue(event, "It should have created an event")
self.assertEqual(event.show_as, 'free')
self.assertGoogleAPINotCalled
@patch_api
def test_private_partner_single_event(self):
values = {
'id': 'oj44nep1ldf8a3ll02uip0c9aa',
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [{
'displayName': 'Mitchell Admin',
'email': '[email protected]',
'responseStatus': 'needsAction'
}, {
'displayName': 'Attendee',
'email': self.private_partner.email,
'responseStatus': 'needsAction'
}, ],
'reminders': {'useDefault': True},
'start': {
'dateTime': '2020-01-13T16:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
'end': {
'dateTime': '2020-01-13T19:55:00+01:00',
'timeZone': 'Europe/Brussels'
},
}
self.env['calendar.event']._sync_google2odoo(GoogleEvent([values]))
event = self.env['calendar.event'].search([('google_id', '=', values.get('id'))])
private_attendee = event.attendee_ids.filtered(lambda e: e.email == self.private_partner.email)
self.assertNotEqual(self.private_partner.id, private_attendee.partner_id.id)
self.assertNotEqual(private_attendee.partner_id.type, 'private')
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_private_contact(self):
recurrence_id = 'oj44nep1ldf8a3ll02uip0c9aa'
values = {
'id': recurrence_id,
'description': 'Small mini desc',
'organizer': {'email': '[email protected]', 'self': True},
'summary': 'Pricing new update',
'visibility': 'public',
'attendees': [{
'displayName': 'Attendee',
'email': self.private_partner.email,
'responseStatus': 'needsAction'
}, ],
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=3;BYDAY=MO'],
'reminders': {'useDefault': True},
'start': {'date': '2020-01-6'},
'end': {'date': '2020-01-7'},
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
recurrence = self.env['calendar.recurrence'].search([('google_id', '=', values.get('id'))])
events = recurrence.calendar_event_ids
private_attendees = events.mapped('attendee_ids').filtered(lambda e: e.email == self.private_partner.email)
self.assertTrue(all([a.partner_id.id != self.private_partner.id for a in private_attendees]))
self.assertTrue(all([a.partner_id.type != 'private' for a in private_attendees]))
self.assertGoogleAPINotCalled()
@patch_api
def test_alias_email_sync_recurrence(self):
catchall_domain = self.env['ir.config_parameter'].sudo().get_param("mail.catchall.domain")
alias_model = self.env['ir.model'].search([('model', '=', 'calendar.event')])
self.env['mail.alias'].create({'alias_name': 'sale', 'alias_model_id': alias_model.id})
alias_email = 'sale@%s' % catchall_domain if catchall_domain else 'sale@'
google_id = 'oj44nep1ldf8a3ll02uip0c9aa'
base_event = self.env['calendar.event'].create({
'name': 'coucou',
'allday': True,
'start': datetime(2020, 1, 6),
'stop': datetime(2020, 1, 6),
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=MO',
'need_sync': False,
'base_event_id': base_event.id,
'calendar_event_ids': [(4, base_event.id)],
})
recurrence._apply_recurrence()
values = {
'id': google_id,
'summary': 'coucou',
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=2;BYDAY=MO'],
'start': {'date': '2020-01-06'},
'end': {'date': '2020-01-07'},
'reminders': {'useDefault': True},
"attendees": [
{
"email": alias_email, "state": "accepted",
},
],
'updated': self.now,
}
self.env['calendar.recurrence']._sync_google2odoo(GoogleEvent([values]))
events = recurrence.calendar_event_ids.sorted('start')
self.assertEqual(len(events), 2)
self.assertFalse(events.mapped('attendee_ids'))
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_range_start_date_in_other_dst_period(self):
"""
It is possible to create recurring events that are in the same DST period
but when calculating the start date for the range, it is possible to change the dst period.
This results in a duplication of the basic event.
"""
# DST change: 2023-03-26
frequency = "MONTHLY"
count = "1" # Just to go into the flow of the recurrence
recurrence_id = "9lxiofipomymx2yr1yt0hpep99"
google_value = [{
"summary": "Start date in DST period",
"id": recurrence_id,
"creator": {"email": "[email protected]"},
"organizer": {"email": "[email protected]"},
"created": "2023-03-27T11:45:07.000Z",
"start": {"dateTime": "2023-03-27T09:00:00+02:00", "timeZone": "Europe/Brussels"},
"end": {"dateTime": "2023-03-27T10:00:00+02:00", "timeZone": "Europe/Brussels"},
"recurrence": [f"RRULE:FREQ={frequency};COUNT={count}"],
"reminders": {"useDefault": True},
"updated": "2023-03-27T11:45:08.547Z",
}]
google_event = GoogleEvent(google_value)
self.env['calendar.recurrence']._sync_google2odoo(google_event)
# Get the time slot of the day
day_start = datetime.fromisoformat(google_event.start["dateTime"]).astimezone(pytz.utc).replace(tzinfo=None).replace(hour=0)
day_end = datetime.fromisoformat(google_event.end["dateTime"]).astimezone(pytz.utc).replace(tzinfo=None).replace(hour=23)
# Get created events
day_events = self.env["calendar.event"].search(
[
("name", "=", google_event.summary),
("start", ">=", day_start),
("stop", "<=", day_end)
]
)
self.assertGoogleAPINotCalled()
# Check for non-duplication
self.assertEqual(len(day_events), 1)
@patch_api
def test_recurrence_edit_specific_event(self):
google_values = [
{
'kind': 'calendar#event',
'etag': '"3367067678542000"',
'id': '59orfkiunbn2vlp6c2tndq6ui0',
'status': 'confirmed',
'created': '2023-05-08T08:16:54.000Z',
'updated': '2023-05-08T08:17:19.271Z',
'summary': 'First title',
'creator': {'email': '[email protected]', 'self': True},
'organizer': {'email': '[email protected]', 'self': True},
'start': {'dateTime': '2023-05-12T09:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': '2023-05-12T10:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;UNTIL=20230518T215959Z;BYDAY=FR'],
'iCalUID': '[email protected]',
'reminders': {'useDefault': True},
},
{
'kind': 'calendar#event',
'etag': '"3367067678542000"',
'id': '59orfkiunbn2vlp6c2tndq6ui0_R20230519T070000',
'status': 'confirmed',
'created': '2023-05-08T08:16:54.000Z',
'updated': '2023-05-08T08:17:19.271Z',
'summary': 'Second title',
'creator': {'email': '[email protected]', 'self': True},
'organizer': {'email': '[email protected]', 'self': True},
'start': {'dateTime': '2023-05-19T09:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': '2023-05-19T10:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=2;BYDAY=FR'],
'iCalUID': '[email protected]',
'reminders': {'useDefault': True},
},
{
'kind': 'calendar#event',
'etag': '"3367067704194000"',
'id': '59orfkiunbn2vlp6c2tndq6ui0_20230526T070000Z',
'status': 'confirmed',
'created': '2023-05-08T08:16:54.000Z',
'updated': '2023-05-08T08:17:32.097Z',
'summary': 'Second title',
'creator': {'email': '[email protected]', 'self': True},
'organizer': {'email': '[email protected]', 'self': True},
'start': {'dateTime': '2023-05-26T08:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': '2023-05-26T09:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'recurringEventId': '59orfkiunbn2vlp6c2tndq6ui0_R20230519T070000',
'originalStartTime': {'dateTime': '2023-05-26T09:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'reminders': {'useDefault': True},
}
]
google_events = GoogleEvent(google_values)
recurrent_events = google_events.filter(lambda e: e.is_recurrence())
specific_event = google_events - recurrent_events
# recurrence_event: 59orfkiunbn2vlp6c2tndq6ui0 and 59orfkiunbn2vlp6c2tndq6ui0_R20230519T070000
# specific_event: 59orfkiunbn2vlp6c2tndq6ui0_20230526T070000Z
# Range to check
day_start = datetime.fromisoformat(specific_event.start["dateTime"]).astimezone(pytz.utc).replace(tzinfo=None).replace(hour=0)
day_end = datetime.fromisoformat(specific_event.end["dateTime"]).astimezone(pytz.utc).replace(tzinfo=None).replace(hour=23)
# Synchronize recurrent events
self.env['calendar.recurrence']._sync_google2odoo(recurrent_events)
events = self.env["calendar.event"].search(
[
("name", "=", specific_event.summary),
("start", ">=", day_start),
("stop", "<=", day_end,)
]
)
self.assertEqual(len(events), 1)
# Events:
# 'First title' --> '59orfkiunbn2vlp6c2tndq6ui0_20230512T070000Z'
# 'Second title' --> '59orfkiunbn2vlp6c2tndq6ui0_R20230519T070000_20230519T070000Z'
# 'Second title' --> '59orfkiunbn2vlp6c2tndq6ui0_R20230519T070000_20230526T070000Z'
# We want to apply change on '59orfkiunbn2vlp6c2tndq6ui0_R20230519T070000_20230526T070000Z'
# with values from '59orfkiunbn2vlp6c2tndq6ui0_20230526T070000Z'
# To match the google ids, we create a new event and delete the old one to avoid duplication
# Synchronize specific event
self.env['calendar.event']._sync_google2odoo(specific_event)
events = self.env["calendar.event"].search(
[
("name", "=", specific_event.summary),
("start", ">=", day_start),
("stop", "<=", day_end,)
]
)
self.assertEqual(len(events), 1)
# Not call API
self.assertGoogleAPINotCalled()
@patch_api
def test_recurrence_edit_specific_event_backward_compatibility(self):
"""
Check that the creation of full recurrence ids does not crash
to avoid event duplication.
Note 1:
Not able to reproduce the payload in practice.
However, it exists in production.
Note 2:
This is the same test as 'test_recurrence_edit_specific_event',
with the range in 'recurringEventId' removed for the specific event.
"""
google_values = [
{
'kind': 'calendar#event',
'etag': '"3367067678542000"',
'id': '59orfkiunbn2vlp6c2tndq6ui0',
'status': 'confirmed',
'created': '2023-05-08T08:16:54.000Z',
'updated': '2023-05-08T08:17:19.271Z',
'summary': 'First title',
'creator': {'email': '[email protected]', 'self': True},
'organizer': {'email': '[email protected]', 'self': True},
'start': {'dateTime': '2023-05-12T09:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': '2023-05-12T10:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;UNTIL=20230518T215959Z;BYDAY=FR'],
'iCalUID': '[email protected]',
'reminders': {'useDefault': True},
},
{
'kind': 'calendar#event',
'etag': '"3367067678542000"',
'id': '59orfkiunbn2vlp6c2tndq6ui0_R20230519T070000',
'status': 'confirmed',
'created': '2023-05-08T08:16:54.000Z',
'updated': '2023-05-08T08:17:19.271Z',
'summary': 'Second title',
'creator': {'email': '[email protected]', 'self': True},
'organizer': {'email': '[email protected]', 'self': True},
'start': {'dateTime': '2023-05-19T09:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': '2023-05-19T10:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'recurrence': ['RRULE:FREQ=WEEKLY;WKST=SU;COUNT=2;BYDAY=FR'],
'iCalUID': '[email protected]',
'reminders': {'useDefault': True},
},
{
'kind': 'calendar#event',
'etag': '"3367067704194000"',
'id': '59orfkiunbn2vlp6c2tndq6ui0_20230526T070000Z',
'status': 'confirmed',
'created': '2023-05-08T08:16:54.000Z',
'updated': '2023-05-08T08:17:32.097Z',
'summary': 'Second title',
'creator': {'email': '[email protected]', 'self': True},
'organizer': {'email': '[email protected]', 'self': True},
'start': {'dateTime': '2023-05-26T08:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': '2023-05-26T09:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'recurringEventId': '59orfkiunbn2vlp6c2tndq6ui0', # Range removed
'originalStartTime': {'dateTime': '2023-05-26T09:00:00+02:00', 'timeZone': 'Europe/Brussels'},
'reminders': {'useDefault': True},
}
]
google_events = GoogleEvent(google_values)
recurrent_events = google_events.filter(lambda e: e.is_recurrence())
specific_event = google_events - recurrent_events
# Range to check
day_start = datetime.fromisoformat(specific_event.start["dateTime"]).astimezone(pytz.utc).replace(tzinfo=None).replace(hour=0)
day_end = datetime.fromisoformat(specific_event.end["dateTime"]).astimezone(pytz.utc).replace(tzinfo=None).replace(hour=23)
# Synchronize recurrent events
self.env['calendar.recurrence']._sync_google2odoo(recurrent_events)
events = self.env["calendar.event"].search(
[
("name", "=", specific_event.summary),
("start", ">=", day_start),
("stop", "<=", day_end,)
]
)
self.assertEqual(len(events), 1)
# Synchronize specific event
self.env['calendar.event']._sync_google2odoo(specific_event)
events = self.env["calendar.event"].search(
[
("name", "=", specific_event.summary),
("start", ">=", day_start),
("stop", "<=", day_end,)
]
)
self.assertEqual(len(events), 2) # Two because in this case we does not detect the existing event
# The stream is not blocking, but there is a duplicate
# Not call API
self.assertGoogleAPINotCalled()
| 46.113269 | 71,245 |
26,237 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from dateutil.relativedelta import relativedelta
from unittest.mock import patch
from odoo.addons.google_calendar.utils.google_calendar import GoogleCalendarService
from odoo.addons.google_account.models.google_service import GoogleService
from odoo.addons.google_calendar.models.res_users import User
from odoo.addons.google_calendar.tests.test_sync_common import TestSyncGoogle, patch_api
from odoo.tests.common import users, warmup
from odoo.tests import tagged
from odoo import tools
@tagged('odoo2google')
@patch.object(User, '_get_google_calendar_token', lambda user: 'dummy-token')
class TestSyncOdoo2Google(TestSyncGoogle):
def setUp(self):
super().setUp()
self.google_service = GoogleCalendarService(self.env['google.service'])
# Make sure this test will work for the next 30 years
self.env['ir.config_parameter'].set_param('google_calendar.sync.range_days', 10000)
@patch_api
def test_event_creation(self):
partner = self.env['res.partner'].create({'name': 'Jean-Luc', 'email': '[email protected]'})
alarm = self.env['calendar.alarm'].create({
'name': 'Notif',
'alarm_type': 'notification',
'interval': 'minutes',
'duration': 18,
})
description = '<script>alert("boom")</script><p style="white-space: pre"><h1>HELLO</h1></p><ul><li>item 1</li><li>item 2</li></ul>'
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15, 8, 0),
'stop': datetime(2020, 1, 15, 18, 0),
'partner_ids': [(4, partner.id)],
'alarm_ids': [(4, alarm.id)],
'privacy': 'private',
'need_sync': False,
'description': description,
})
event._sync_odoo2google(self.google_service)
self.assertGoogleEventInserted({
'id': False,
'start': {'dateTime': '2020-01-15T08:00:00+00:00'},
'end': {'dateTime': '2020-01-15T18:00:00+00:00'},
'summary': 'Event',
'description': tools.html_sanitize(description),
'location': '',
'visibility': 'private',
'guestsCanModify': True,
'reminders': {'useDefault': False, 'overrides': [{'method': 'popup', 'minutes': alarm.duration_minutes}]},
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'needsAction'}],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: event.id}}
})
@patch_api
@users('__system__')
@warmup
def test_event_creation_perf(self):
EVENT_COUNT = 100
partners = self.env['res.partner'].create([
{'name': 'Jean-Luc %s' % (i), 'email': 'jean-luc-%[email protected]' % (i)} for i in range(EVENT_COUNT)])
alarm = self.env['calendar.alarm'].create({
'name': 'Notif',
'alarm_type': 'notification',
'interval': 'minutes',
'duration': 18,
})
partner_model = self.env.ref('base.model_res_partner')
partner = self.env['res.partner'].search([], limit=1)
with self.assertQueryCount(__system__=1211):
events = self.env['calendar.event'].create([{
'name': "Event %s" % (i),
'start': datetime(2020, 1, 15, 8, 0),
'stop': datetime(2020, 1, 15, 18, 0),
'partner_ids': [(4, partners[i].id), (4, self.env.user.partner_id.id)],
'alarm_ids': [(4, alarm.id)],
'privacy': 'private',
'need_sync': False,
'res_model_id': partner_model.id,
'res_id': partner.id,
} for i in range(EVENT_COUNT)])
events._sync_odoo2google(self.google_service)
with self.assertQueryCount(__system__=130):
events.unlink()
@patch_api
@users('__system__')
@warmup
def test_recurring_event_creation_perf(self):
partner = self.env['res.partner'].create({'name': 'Jean-Luc', 'email': '[email protected]'})
alarm = self.env['calendar.alarm'].create({
'name': 'Notif',
'alarm_type': 'notification',
'interval': 'minutes',
'duration': 18,
})
partner_model = self.env.ref('base.model_res_partner')
partner = self.env['res.partner'].search([], limit=1)
with self.assertQueryCount(__system__=3634):
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15, 8, 0),
'stop': datetime(2020, 1, 15, 18, 0),
'partner_ids': [(4, partner.id)],
'alarm_ids': [(4, alarm.id)],
'privacy': 'private',
'need_sync': False,
'interval': 1,
'recurrency': True,
'rrule_type': 'daily',
'end_type': 'forever',
'res_model_id': partner_model.id,
'res_id': partner.id,
})
with self.assertQueryCount(__system__=35):
event.unlink()
def test_event_without_user(self):
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15, 8, 0),
'stop': datetime(2020, 1, 15, 18, 0),
'user_id': False,
'privacy': 'private',
'need_sync': False,
})
values = event._google_values()
self.assertFalse('%s_owner_id' % self.env.cr.dbname in values.get('extendedProperties', {}).get('shared', {}))
@patch_api
def test_event_without_attendee_state(self):
partner_1 = self.env['res.partner'].create({'name': 'Jean-Luc', 'email': '[email protected]'})
partner_2 = self.env['res.partner'].create({'name': 'Phineas', 'email': '[email protected]'})
partner_3 = self.env['res.partner'].create({'name': 'Ferb'})
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15, 8, 0),
'stop': datetime(2020, 1, 15, 18, 0),
'partner_ids': [(4, partner_1.id), (4, partner_2.id), (4, partner_3.id)],
'privacy': 'private',
'need_sync': False,
})
attendee_2 = event.attendee_ids.filtered(lambda a: a.partner_id.id == partner_2.id)
attendee_2.write({
'state': False,
})
event._sync_odoo2google(self.google_service)
self.assertGoogleEventInserted({
'id': False,
'start': {'dateTime': '2020-01-15T08:00:00+00:00'},
'end': {'dateTime': '2020-01-15T18:00:00+00:00'},
'summary': 'Event',
'description': '',
'location': '',
'visibility': 'private',
'guestsCanModify': True,
'reminders': {'useDefault': False, 'overrides': []},
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'needsAction'},
{'email': '[email protected]', 'responseStatus': 'needsAction'}],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: event.id}}
})
@patch_api
def test_event_allday_creation(self):
event = self.env['calendar.event'].create({
'name': "Event",
'allday': True,
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'need_sync': False,
})
event._sync_odoo2google(self.google_service)
self.assertGoogleEventInserted({
'id': False,
'start': {'date': '2020-01-15'},
'end': {'date': '2020-01-16'},
'summary': 'Event',
'description': '',
'location': '',
'visibility': 'public',
'guestsCanModify': True,
'reminders': {'overrides': [], 'useDefault': False},
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'accepted'}],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: event.id}}
})
@patch_api
def test_inactive_event(self):
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'active': False,
'need_sync': False,
})
event._sync_odoo2google(self.google_service)
self.assertGoogleEventNotInserted()
self.assertGoogleEventNotDeleted()
@patch_api
def test_synced_inactive_event(self):
google_id = 'aaaaaaaaa'
# writing on synced event will put it in a need_sync state.
# Delete api will not be called but the state of the event will be set as 'cancelled'
event = self.env['calendar.event'].create({
'google_id': google_id,
'name': "Event",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'active': False,
'need_sync': True,
})
event._sync_odoo2google(self.google_service)
self.assertGoogleEventDeleted(google_id)
@patch_api
def test_recurrence(self):
google_id = 'aaaaaaaaa'
event = self.env['calendar.event'].create({
'google_id': google_id,
'name': "Event",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'allday': True,
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=WE',
'calendar_event_ids': [(4, event.id)],
'need_sync': False,
})
recurrence._sync_odoo2google(self.google_service)
self.assertGoogleEventInserted({
'id': False,
'start': {'date': '2020-01-15'},
'end': {'date': '2020-01-16'},
'summary': 'Event',
'description': '',
'location': '',
'visibility': 'public',
'guestsCanModify': True,
'reminders': {'overrides': [], 'useDefault': False},
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'accepted'}],
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=2;BYDAY=WE'],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: recurrence.id}}
})
@patch_api
def test_event_added_to_recurrence(self):
google_id = 'aaaaaaaaa'
event = self.env['calendar.event'].create({
'google_id': google_id,
'name': "Event",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'allday': True,
'need_sync': False,
})
event.write({
'recurrency': True,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=WE',
})
to_delete = self.env['calendar.event'].with_context(active_test=False).search([('google_id', '=', google_id)])
self.assertTrue(to_delete)
self.assertFalse(to_delete.active)
self.assertFalse(event.google_id, "The google id will be set after the API call")
self.assertGoogleEventInserted({
'id': False,
'start': {'date': '2020-01-15'},
'end': {'date': '2020-01-16'},
'summary': 'Event',
'description': '',
'location': '',
'visibility': 'public',
'guestsCanModify': True,
'reminders': {'overrides': [], 'useDefault': False},
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'accepted'}],
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=2;BYDAY=WE'],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: event.recurrence_id.id}}
}, timeout=3)
self.assertGoogleEventDeleted(google_id)
@patch_api
def test_following_event_updated(self):
google_id = 'aaaaaaaaa'
event_1 = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'allday': True,
'need_sync': False,
})
event_2 = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 22),
'stop': datetime(2020, 1, 22),
'allday': True,
'need_sync': False,
})
self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=WE',
'calendar_event_ids': [(4, event_1.id), (4, event_2.id)],
'need_sync': False,
})
event = event_2
# Update only some events in the recurrence
event.write({
'name': 'New name',
'recurrence_update': 'future_events',
})
self.assertGoogleEventPatched(event.google_id, {
'id': event.google_id,
'start': {'date': str(event.start_date)},
'end': {'date': str(event.stop_date + relativedelta(days=1))},
'summary': 'New name',
'description': '',
'location': '',
'guestsCanModify': True,
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'accepted'}],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: event.id}},
'reminders': {'overrides': [], 'useDefault': False},
'visibility': 'public',
}, timeout=3)
@patch_api
def test_stop_synchronization(self):
self.env.user.stop_google_synchronization()
self.assertTrue(self.env.user.google_synchronization_stopped, "The google synchronization flag should be switched on")
self.assertFalse(self.env.user._sync_google_calendar(self.google_service), "The google synchronization should be stopped")
# If synchronization stopped, creating a new event should not call _google_insert.
self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15, 8, 0),
'stop': datetime(2020, 1, 15, 18, 0),
'privacy': 'private',
})
self.assertGoogleEventNotInserted()
@patch_api
def test_restart_synchronization(self):
# Test new event created after stopping synchronization are correctly patched when restarting sync.
google_id = 'aaaaaaaaa'
partner = self.env['res.partner'].create({'name': 'Jean-Luc', 'email': '[email protected]'})
user = self.env['res.users'].create({
'name': 'Test user Calendar',
'login': '[email protected]',
'partner_id': partner.id,
})
user.stop_google_synchronization()
event = self.env['calendar.event'].with_user(user).create({
'google_id': google_id,
'name': "Event",
'start': datetime(2020, 1, 15, 8, 0),
'stop': datetime(2020, 1, 15, 18, 0),
'partner_ids': [(4, partner.id)],
})
user.with_user(user).restart_google_synchronization()
self.assertGoogleEventPatched(event.google_id, {
'id': event.google_id,
'start': {'dateTime': '2020-01-15T08:00:00+00:00'},
'end': {'dateTime': '2020-01-15T18:00:00+00:00'},
'summary': 'Event',
'description': '',
'location': '',
'visibility': 'public',
'guestsCanModify': True,
'reminders': {'overrides': [], 'useDefault': False},
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'accepted'}],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: event.id}}
}, timeout=3)
@patch_api
def test_all_event_updated(self):
google_id = 'aaaaaaaaa'
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'allday': True,
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=WE',
'base_event_id': event.id,
'need_sync': False,
})
recurrence._apply_recurrence()
event.write({
'name': 'New name',
'recurrence_update': 'all_events',
})
self.assertGoogleEventPatched(recurrence.google_id, {
'id': recurrence.google_id,
'start': {'date': str(event.start_date)},
'end': {'date': str(event.stop_date + relativedelta(days=1))},
'summary': 'New name',
'description': '',
'location': '',
'guestsCanModify': True,
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'accepted'}],
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=2;BYDAY=WE'],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: recurrence.id}},
'reminders': {'overrides': [], 'useDefault': False},
'visibility': 'public',
}, timeout=3)
@patch_api
def test_event_need_sync(self):
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'allday': True,
'recurrence_id': False,
'recurrency': True,
})
self.assertFalse(event.need_sync,
"Event created with True recurrency should not be synched to avoid "
"duplicate event on google")
recurrence = self.env['calendar.recurrence'].create({
'google_id': False,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=WE',
'base_event_id': event.id,
'need_sync': False,
})
event_2 = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'allday': True,
'recurrence_id': recurrence.id,
})
self.assertFalse(event_2.need_sync,
"Event created with recurrence_id should not be synched to avoid "
"duplicate event on google")
self.assertGoogleEventNotInserted()
self.assertGoogleEventNotDeleted()
@patch_api
def test_event_until_utc(self):
""" UNTIl rrule value must be in UTC: ending with a 'Z """
google_id = 'aaaaaaaaa'
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'allday': True,
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=DAILY;UNTIL=20200117T235959',
'base_event_id': event.id,
'need_sync': False,
})
recurrence._apply_recurrence()
self.assertEqual(recurrence._google_values()['recurrence'][0], 'RRULE:FREQ=DAILY;UNTIL=20200117T235959Z',
"The rrule sent to google should be in UTC: end with Z")
# Add it even if it is not the end of the string
recurrence.write({'rrule': 'FREQ=DAILY;UNTIL=20200118T235959;INTERVAL=3'})
recurrence._apply_recurrence()
self.assertEqual(recurrence._google_values()['recurrence'][0],
'RRULE:FREQ=DAILY;UNTIL=20200118T235959Z;INTERVAL=3',
"The rrule sent to google should be in UTC: end with Z and preserve the following parameters")
# Don't add two Z at the end of the UNTIL value
recurrence.write({'rrule': 'FREQ=DAILY;UNTIL=20200119T235959Z'})
recurrence._apply_recurrence()
self.assertEqual(recurrence._google_values()['recurrence'][0], 'RRULE:FREQ=DAILY;UNTIL=20200119T235959Z',
"The rrule sent to google should be in UTC: end with one Z")
@patch_api
def test_write_unsynced_field(self):
google_id = 'aaaaaaaaa'
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2021, 3, 10),
'stop': datetime(2021, 3, 10),
'allday': True,
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=WE',
'base_event_id': event.id,
'need_sync': False,
})
recurrence._apply_recurrence()
event.write({
'start': datetime(2021, 3, 11),
'stop': datetime(2021, 3, 11),
'need_sync': False,
})
event_type = self.env['calendar.event.type'].create({'name': 'type'})
event.write({
'recurrence_update': 'all_events',
'categ_ids': [(4, event_type.id)]
})
self.assertTrue(all(e.categ_ids == event_type for e in recurrence.calendar_event_ids))
self.assertGoogleAPINotCalled()
@patch_api
def test_attendee_state(self):
""" Sync attendee state immediately """
partner = self.env['res.partner'].create({'name': 'Jean-Luc', 'email': '[email protected]'})
event = self.env['calendar.event'].create({
'name': "Event with attendees",
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'allday': True,
'need_sync': False,
'partner_ids': [(4, partner.id)],
'google_id': 'aaaaaaaaa',
})
self.assertEqual(event.attendee_ids.state, 'needsAction',
"The attendee state should be 'needsAction")
event.attendee_ids.do_decline()
self.assertGoogleEventPatched(event.google_id, {
'id': event.google_id,
'start': {'date': str(event.start_date)},
'end': {'date': str(event.stop_date + relativedelta(days=1))},
'summary': 'Event with attendees',
'description': '',
'location': '',
'guestsCanModify': True,
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'declined'}],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: event.id}},
'reminders': {'overrides': [], 'useDefault': False},
'visibility': 'public',
})
@patch_api
def test_all_event_with_tz_updated(self):
google_id = 'aaaaaaaaa'
event = self.env['calendar.event'].create({
'name': "Event",
'start': datetime(2020, 1, 15, 8, 0),
'stop': datetime(2020, 1, 15, 9, 0),
'need_sync': False,
})
recurrence = self.env['calendar.recurrence'].create({
'google_id': google_id,
'rrule': 'FREQ=WEEKLY;COUNT=2;BYDAY=WE',
'base_event_id': event.id,
'need_sync': False,
})
recurrence._apply_recurrence()
event.write({
'name': 'New name',
'recurrence_update': 'all_events',
})
self.assertGoogleEventPatched(recurrence.google_id, {
'id': recurrence.google_id,
'start': {'dateTime': "2020-01-15T08:00:00+00:00", 'timeZone': 'Europe/Brussels'},
'end': {'dateTime': "2020-01-15T09:00:00+00:00", 'timeZone': 'Europe/Brussels'},
'summary': 'New name',
'description': '',
'location': '',
'guestsCanModify': True,
'organizer': {'email': '[email protected]', 'self': True},
'attendees': [{'email': '[email protected]', 'responseStatus': 'accepted'}],
'recurrence': ['RRULE:FREQ=WEEKLY;COUNT=2;BYDAY=WE'],
'extendedProperties': {'shared': {'%s_odoo_id' % self.env.cr.dbname: recurrence.id}},
'reminders': {'overrides': [], 'useDefault': False},
'visibility': 'public',
}, timeout=3)
@patch.object(GoogleService, '_do_request')
def test_send_update_do_request(self, mock_do_request):
self.env.cr.postcommit.clear()
event = self.env['calendar.event'].create({
'name': "Event",
'allday': True,
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'need_sync': False,
})
event.with_context(send_updates=True)._sync_odoo2google(self.google_service)
self.call_post_commit_hooks()
self.assertGoogleEventSendUpdates('all')
@patch.object(GoogleService, '_do_request')
def test_not_send_update_do_request(self, mock_do_request):
event = self.env['calendar.event'].create({
'name': "Event",
'allday': True,
'start': datetime(2020, 1, 15),
'stop': datetime(2020, 1, 15),
'need_sync': False,
})
event.with_context(send_updates=False)._sync_odoo2google(self.google_service)
self.call_post_commit_hooks()
self.assertGoogleEventSendUpdates('none')
| 42.046474 | 26,237 |
2,091 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
from odoo.addons.google_calendar.models.google_sync import google_calendar_token
from odoo.addons.google_calendar.utils.google_calendar import GoogleCalendarService
class ResetGoogleAccount(models.TransientModel):
_name = 'google.calendar.account.reset'
_description = 'Google Calendar Account Reset'
user_id = fields.Many2one('res.users', required=True)
delete_policy = fields.Selection(
[('dont_delete', "Leave them untouched"),
('delete_google', "Delete from the current Google Calendar account"),
('delete_odoo', "Delete from Odoo"),
('delete_both', "Delete from both"),
], string="User's Existing Events", required=True, default='dont_delete',
help="This will only affect events for which the user is the owner")
sync_policy = fields.Selection([
('new', "Synchronize only new events"),
('all', "Synchronize all existing events"),
], string="Next Synchronization", required=True, default='new')
def reset_account(self):
google = GoogleCalendarService(self.env['google.service'])
events = self.env['calendar.event'].search([
('user_id', '=', self.user_id.id),
('google_id', '!=', False)])
if self.delete_policy in ('delete_google', 'delete_both'):
with google_calendar_token(self.user_id) as token:
for event in events:
google.delete(event.google_id, token=token)
if self.delete_policy in ('delete_odoo', 'delete_both'):
events.google_id = False
events.unlink()
if self.sync_policy == 'all':
events.write({
'google_id': False,
'need_sync': True,
})
self.user_id.google_cal_account_id._set_auth_tokens(False, False, 0)
self.user_id.write({
'google_calendar_sync_token': False,
'google_calendar_cal_id': False,
})
| 40.211538 | 2,091 |
9,717 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tools import email_normalize
import logging
from typing import Iterator, Mapping
from collections import abc
import re
_logger = logging.getLogger(__name__)
class GoogleEvent(abc.Set):
"""This helper class holds the values of a Google event.
Inspired by Odoo recordset, one instance can be a single Google event or a
(immutable) set of Google events.
All usual set operations are supported (union, intersection, etc).
A list of all attributes can be found in the API documentation.
https://developers.google.com/calendar/v3/reference/events#resource
:param iterable: iterable of GoogleCalendar instances or iterable of dictionnaries
"""
def __init__(self, iterable=()):
self._events = {}
for item in iterable:
if isinstance(item, self.__class__):
self._events[item.id] = item._events[item.id]
elif isinstance(item, Mapping):
self._events[item.get('id')] = item
else:
raise ValueError("Only %s or iterable of dict are supported" % self.__class__.__name__)
def __iter__(self) -> Iterator['GoogleEvent']:
return iter(GoogleEvent([vals]) for vals in self._events.values())
def __contains__(self, google_event):
return google_event.id in self._events
def __len__(self):
return len(self._events)
def __bool__(self):
return bool(self._events)
def __getattr__(self, name):
# ensure_one
try:
event, = self._events.keys()
except ValueError:
raise ValueError("Expected singleton: %s" % self)
event_id = list(self._events.keys())[0]
return self._events[event_id].get(name)
def __repr__(self):
return '%s%s' % (self.__class__.__name__, self.ids)
@property
def ids(self):
return tuple(e.id for e in self)
@property
def rrule(self):
if self.recurrence:
# Find the rrule in the list
rrule = next(rr for rr in self.recurrence if 'RRULE:' in rr)
return rrule[6:] # skip "RRULE:" in the rrule string
def odoo_id(self, env):
self.odoo_ids(env) # load ids
return self._odoo_id
def _meta_odoo_id(self, dbname):
"""Returns the Odoo id stored in the Google Event metadata.
This id might not actually exists in the database.
"""
properties = self.extendedProperties and (self.extendedProperties.get('shared', {}) or self.extendedProperties.get('private', {})) or {}
o_id = properties.get('%s_odoo_id' % dbname)
if o_id:
return int(o_id)
def odoo_ids(self, env):
ids = tuple(e._odoo_id for e in self if e._odoo_id)
if len(ids) == len(self):
return ids
model = self._get_model(env)
found = self._load_odoo_ids_from_db(env, model)
unsure = self - found
if unsure:
unsure._load_odoo_ids_from_metadata(env, model)
return tuple(e._odoo_id for e in self)
def _load_odoo_ids_from_metadata(self, env, model):
unsure_odoo_ids = tuple(e._meta_odoo_id(env.cr.dbname) for e in self)
odoo_events = model.browse(_id for _id in unsure_odoo_ids if _id)
# Extended properties are copied when splitting a recurrence Google side.
# Hence, we may have two Google recurrences linked to the same Odoo id.
# Therefore, we only consider Odoo records without google id when trying
# to match events.
o_ids = odoo_events.exists().filtered(lambda e: not e.google_id).ids
for e in self:
odoo_id = e._meta_odoo_id(env.cr.dbname)
if odoo_id in o_ids:
e._events[e.id]['_odoo_id'] = odoo_id
def _load_odoo_ids_from_db(self, env, model):
odoo_events = model.with_context(active_test=False)._from_google_ids(self.ids)
mapping = {e.google_id: e.id for e in odoo_events} # {google_id: odoo_id}
existing_google_ids = odoo_events.mapped('google_id')
for e in self:
odoo_id = mapping.get(e.id)
if odoo_id:
e._events[e.id]['_odoo_id'] = odoo_id
return self.filter(lambda e: e.id in existing_google_ids)
def owner(self, env):
# Owner/organizer could be desynchronised between Google and Odoo.
# Let userA, userB be two new users (never synced to Google before).
# UserA creates an event in Odoo (he is the owner) but userB syncs first.
# There is no way to insert the event into userA's calendar since we don't have
# any authentication access. The event is therefore inserted into userB's calendar
# (he is the organizer in Google). The "real" owner (in Odoo) is stored as an
# extended property. There is currently no support to "transfert" ownership when
# userA syncs his calendar the first time.
real_owner_id = self.extendedProperties and self.extendedProperties.get('shared', {}).get('%s_owner_id' % env.cr.dbname)
try:
# If we create an event without user_id, the event properties will be 'false'
# and python will interpret this a a NoneType, that's why we have the 'except TypeError'
real_owner_id = int(real_owner_id)
except (ValueError, TypeError):
real_owner_id = False
real_owner = real_owner_id and env['res.users'].browse(real_owner_id) or env['res.users']
if real_owner_id and real_owner.exists():
return real_owner
elif self.organizer and self.organizer.get('self'):
return env.user
elif self.organizer and self.organizer.get('email'):
# In Google: 1 email = 1 user; but in Odoo several users might have the same email :/
org_email = email_normalize(self.organizer.get('email'))
return env['res.users'].search([('email_normalized', '=', org_email)], limit=1)
else:
return env['res.users']
def filter(self, func) -> 'GoogleEvent':
return GoogleEvent(e for e in self if func(e))
def clear_type_ambiguity(self, env):
ambiguous_events = self.filter(GoogleEvent._is_type_ambiguous)
recurrences = ambiguous_events._load_odoo_ids_from_db(env, env['calendar.recurrence'])
for recurrence in recurrences:
self._events[recurrence.id]['recurrence'] = True
for event in ambiguous_events - recurrences:
self._events[event.id]['recurrence'] = False
def is_recurrence(self):
if self._is_type_ambiguous():
_logger.warning("Ambiguous event type: cannot accurately tell whether a cancelled event is a recurrence or not")
return bool(self.recurrence)
def is_recurrent(self):
return bool(self.recurringEventId or self.is_recurrence())
def is_cancelled(self):
return self.status == 'cancelled'
def is_recurrence_follower(self):
return bool(not self.originalStartTime or self.originalStartTime == self.start)
def full_recurring_event_id(self):
"""
Give the complete identifier with elements
in `id` and `recurringEventId`.
:return: concatenation of the id created by the recurrence
and the id created by the modification of a specific event
:rtype: string if recurrent event and correct ids, `None` otherwise
"""
# Regex expressions to match elements (according to the google support [not documented]):
# - ID: [a-zA-Z0-9]+
# - RANGE: R[0-9]+T[0-9]+
# - TIMESTAMP: [0-9]+T[0-9]+Z
# With:
# - id: 'ID_TIMESTAMP'
# - recurringEventID: 'ID_RANGE'
# Find: 'ID_RANGE_TIMESTAMP'
if not self.is_recurrent():
return None
# Check if ids are the same
id_value = re.match(r'(\w+_)', self.id)
recurringEventId_value = re.match(r'(\w+_)', self.recurringEventId)
if not id_value or not recurringEventId_value or id_value.group(1) != recurringEventId_value.group(1):
return None
ID_RANGE = re.search(r'\w+_R\d+T\d+', self.recurringEventId).group()
TIMESTAMP = re.search(r'\d+T\d+Z', self.id).group()
return f"{ID_RANGE}_{TIMESTAMP}"
def cancelled(self):
return self.filter(lambda e: e.status == 'cancelled')
def exists(self, env) -> 'GoogleEvent':
recurrences = self.filter(GoogleEvent.is_recurrence)
events = self - recurrences
recurrences.odoo_ids(env)
events.odoo_ids(env)
return self.filter(lambda e: e._odoo_id)
def _is_type_ambiguous(self):
"""For cancelled events/recurrences, Google only send the id and
the cancelled status. There is no way to know if it was a recurrence
or simple event."""
return self.is_cancelled() and 'recurrence' not in self._events[self.id]
def _get_model(self, env):
if all(e.is_recurrence() for e in self):
return env['calendar.recurrence']
if all(not e.is_recurrence() for e in self):
return env['calendar.event']
raise TypeError("Mixing Google events and Google recurrences")
def get_meeting_url(self):
if not self.conferenceData:
return False
video_meeting = list(filter(lambda entryPoints: entryPoints['entryPointType'] == 'video', self.conferenceData['entryPoints']))
return video_meeting[0]['uri'] if video_meeting else False
def is_available(self):
return self.transparency == 'transparent'
| 41.883621 | 9,717 |
5,099 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from uuid import uuid4
import requests
import json
import logging
from odoo import fields
from odoo.tools import exception_to_unicode
from odoo.addons.google_calendar.utils.google_event import GoogleEvent
from odoo.addons.google_account.models.google_service import TIMEOUT
_logger = logging.getLogger(__name__)
def requires_auth_token(func):
def wrapped(self, *args, **kwargs):
if not kwargs.get('token'):
raise AttributeError("An authentication token is required")
return func(self, *args, **kwargs)
return wrapped
class InvalidSyncToken(Exception):
pass
class GoogleCalendarService():
def __init__(self, google_service):
self.google_service = google_service
@requires_auth_token
def get_events(self, sync_token=None, token=None, timeout=TIMEOUT):
url = "/calendar/v3/calendars/primary/events"
headers = {'Content-type': 'application/json'}
params = {'access_token': token}
if sync_token:
params['syncToken'] = sync_token
else:
# full sync, limit to a range of 1y in past to 1y in the futur by default
ICP = self.google_service.env['ir.config_parameter'].sudo()
day_range = int(ICP.get_param('google_calendar.sync.range_days', default=365))
_logger.info("Full cal sync, restricting to %s days range", day_range)
lower_bound = fields.Datetime.subtract(fields.Datetime.now(), days=day_range)
upper_bound = fields.Datetime.add(fields.Datetime.now(), days=day_range)
params['timeMin'] = lower_bound.isoformat() + 'Z' # Z = UTC (RFC3339)
params['timeMax'] = upper_bound.isoformat() + 'Z' # Z = UTC (RFC3339)
try:
status, data, time = self.google_service._do_request(url, params, headers, method='GET', timeout=timeout)
except requests.HTTPError as e:
if e.response.status_code == 410 and 'fullSyncRequired' in str(e.response.content):
raise InvalidSyncToken("Invalid sync token. Full sync required")
raise e
events = data.get('items', [])
next_page_token = data.get('nextPageToken')
while next_page_token:
params = {'access_token': token, 'pageToken': next_page_token}
status, data, time = self.google_service._do_request(url, params, headers, method='GET', timeout=timeout)
next_page_token = data.get('nextPageToken')
events += data.get('items', [])
next_sync_token = data.get('nextSyncToken')
default_reminders = data.get('defaultReminders')
return GoogleEvent(events), next_sync_token, default_reminders
@requires_auth_token
def insert(self, values, token=None, timeout=TIMEOUT):
send_updates = self.google_service._context.get('send_updates', True)
url = "/calendar/v3/calendars/primary/events?sendUpdates=%s" % ("all" if send_updates else "none")
headers = {'Content-type': 'application/json', 'Authorization': 'Bearer %s' % token}
if not values.get('id'):
values['id'] = uuid4().hex
self.google_service._do_request(url, json.dumps(values), headers, method='POST', timeout=timeout)
return values['id']
@requires_auth_token
def patch(self, event_id, values, token=None, timeout=TIMEOUT):
url = "/calendar/v3/calendars/primary/events/%s?sendUpdates=all" % event_id
headers = {'Content-type': 'application/json', 'Authorization': 'Bearer %s' % token}
self.google_service._do_request(url, json.dumps(values), headers, method='PATCH', timeout=timeout)
@requires_auth_token
def delete(self, event_id, token=None, timeout=TIMEOUT):
url = "/calendar/v3/calendars/primary/events/%s?sendUpdates=all" % event_id
headers = {'Content-type': 'application/json'}
params = {'access_token': token}
try:
self.google_service._do_request(url, params, headers=headers, method='DELETE', timeout=timeout)
except requests.HTTPError as e:
# For some unknown reason Google can also return a 403 response when the event is already cancelled.
if e.response.status_code not in (410, 403):
raise e
_logger.info("Google event %s was already deleted" % event_id)
#################################
## MANAGE CONNEXION TO GMAIL ##
#################################
def is_authorized(self, user):
return bool(user.sudo().google_calendar_rtoken)
def _get_calendar_scope(self, RO=False):
readonly = '.readonly' if RO else ''
return 'https://www.googleapis.com/auth/calendar%s' % (readonly)
def _google_authentication_url(self, from_url='http://www.odoo.com'):
return self.google_service._get_authorize_uri(from_url, service='calendar', scope=self._get_calendar_scope())
def _can_authorize_google(self, user):
return user.has_group('base.group_erp_manager')
| 44.72807 | 5,099 |
15,960 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from contextlib import contextmanager
from functools import wraps
from requests import HTTPError
import pytz
from dateutil.parser import parse
from odoo import api, fields, models, registry, _
from odoo.tools import ormcache_context
from odoo.exceptions import UserError
from odoo.osv import expression
from odoo.addons.google_calendar.utils.google_event import GoogleEvent
from odoo.addons.google_calendar.utils.google_calendar import GoogleCalendarService
from odoo.addons.google_account.models.google_service import TIMEOUT
_logger = logging.getLogger(__name__)
# API requests are sent to Google Calendar after the current transaction ends.
# This ensures changes are sent to Google only if they really happened in the Odoo database.
# It is particularly important for event creation , otherwise the event might be created
# twice in Google if the first creation crashed in Odoo.
def after_commit(func):
@wraps(func)
def wrapped(self, *args, **kwargs):
dbname = self.env.cr.dbname
context = self.env.context
uid = self.env.uid
if self.env.context.get('no_calendar_sync'):
return
@self.env.cr.postcommit.add
def called_after():
db_registry = registry(dbname)
with db_registry.cursor() as cr:
env = api.Environment(cr, uid, context)
try:
func(self.with_env(env), *args, **kwargs)
except Exception as e:
_logger.warning("Could not sync record now: %s" % self)
_logger.exception(e)
return wrapped
@contextmanager
def google_calendar_token(user):
yield user._get_google_calendar_token()
class GoogleSync(models.AbstractModel):
_name = 'google.calendar.sync'
_description = "Synchronize a record with Google Calendar"
google_id = fields.Char('Google Calendar Id', copy=False)
need_sync = fields.Boolean(default=True, copy=False)
active = fields.Boolean(default=True)
def write(self, vals):
google_service = GoogleCalendarService(self.env['google.service'])
if 'google_id' in vals:
self._event_ids_from_google_ids.clear_cache(self)
synced_fields = self._get_google_synced_fields()
if 'need_sync' not in vals and vals.keys() & synced_fields and not self.env.user.google_synchronization_stopped:
vals['need_sync'] = True
result = super().write(vals)
for record in self.filtered('need_sync'):
if record.google_id:
record.with_user(record._get_event_user())._google_patch(google_service, record.google_id, record._google_values(), timeout=3)
return result
@api.model_create_multi
def create(self, vals_list):
if any(vals.get('google_id') for vals in vals_list):
self._event_ids_from_google_ids.clear_cache(self)
if self.env.user.google_synchronization_stopped:
for vals in vals_list:
vals.update({'need_sync': False})
records = super().create(vals_list)
google_service = GoogleCalendarService(self.env['google.service'])
records_to_sync = records.filtered(lambda r: r.need_sync and r.active)
for record in records_to_sync:
record.with_user(record._get_event_user())._google_insert(google_service, record._google_values(), timeout=3)
return records
def unlink(self):
"""We can't delete an event that is also in Google Calendar. Otherwise we would
have no clue that the event must must deleted from Google Calendar at the next sync.
"""
synced = self.filtered('google_id')
# LUL TODO find a way to get rid of this context key
if self.env.context.get('archive_on_error') and self._active_name:
synced.write({self._active_name: False})
self = self - synced
elif synced:
# Since we can not delete such an event (see method comment), we archive it.
# Notice that archiving an event will delete the associated event on Google.
# Then, since it has been deleted on Google, the event is also deleted on Odoo DB (_sync_google2odoo).
self.action_archive()
return True
return super().unlink()
def _from_google_ids(self, google_ids):
if not google_ids:
return self.browse()
return self.browse(self._event_ids_from_google_ids(google_ids))
@api.model
@ormcache_context('google_ids', keys=('active_test',))
def _event_ids_from_google_ids(self, google_ids):
return self.search([('google_id', 'in', google_ids)]).ids
def _sync_odoo2google(self, google_service: GoogleCalendarService):
if not self:
return
if self._active_name:
records_to_sync = self.filtered(self._active_name)
else:
records_to_sync = self
cancelled_records = self - records_to_sync
updated_records = records_to_sync.filtered('google_id')
new_records = records_to_sync - updated_records
for record in cancelled_records.filtered(lambda e: e.google_id and e.need_sync):
record.with_user(record._get_event_user())._google_delete(google_service, record.google_id)
for record in new_records:
record.with_user(record._get_event_user())._google_insert(google_service, record._google_values())
for record in updated_records:
record.with_user(record._get_event_user())._google_patch(google_service, record.google_id, record._google_values())
def _cancel(self):
self.google_id = False
self.unlink()
@api.model
def _sync_google2odoo(self, google_events: GoogleEvent, default_reminders=()):
"""Synchronize Google recurrences in Odoo. Creates new recurrences, updates
existing ones.
:param google_recurrences: Google recurrences to synchronize in Odoo
:return: synchronized odoo recurrences
"""
existing = google_events.exists(self.env)
new = google_events - existing - google_events.cancelled()
odoo_values = [
dict(self._odoo_values(e, default_reminders), need_sync=False)
for e in new
]
new_odoo = self.with_context(dont_notify=True)._create_from_google(new, odoo_values)
cancelled = existing.cancelled()
cancelled_odoo = self.browse(cancelled.odoo_ids(self.env))
# Check if it is a recurring event that has been rescheduled.
# We have to check if an event already exists in Odoo.
# Explanation:
# A recurrent event with `google_id` is equal to ID_RANGE_TIMESTAMP can be rescheduled.
# The new `google_id` will be equal to ID_TIMESTAMP.
# We have to delete the event created under the old `google_id`.
rescheduled_events = new.filter(lambda gevent: not gevent.is_recurrence_follower())
if rescheduled_events:
google_ids_to_remove = [event.full_recurring_event_id() for event in rescheduled_events]
cancelled_odoo += self.env['calendar.event'].search([('google_id', 'in', google_ids_to_remove)])
cancelled_odoo._cancel()
synced_records = new_odoo + cancelled_odoo
for gevent in existing - cancelled:
# Last updated wins.
# This could be dangerous if google server time and odoo server time are different
updated = parse(gevent.updated)
odoo_record = self.browse(gevent.odoo_id(self.env))
# Migration from 13.4 does not fill write_date. Therefore, we force the update from Google.
if not odoo_record.write_date or updated >= pytz.utc.localize(odoo_record.write_date):
vals = dict(self._odoo_values(gevent, default_reminders), need_sync=False)
odoo_record.with_context(dont_notify=True)._write_from_google(gevent, vals)
synced_records |= odoo_record
return synced_records
def _google_error_handling(self, http_error):
# We only handle the most problematic errors of sync events.
if http_error.response.status_code in (403, 400):
response = http_error.response.json()
if not self.exists():
reason = "Google gave the following explanation: %s" % response['error'].get('message')
error_log = "Error while syncing record. It does not exists anymore in the database. %s" % reason
_logger.error(error_log)
return
if self._name == 'calendar.event':
start = self.start and self.start.strftime('%Y-%m-%d at %H:%M') or _("undefined time")
event_ids = self.id
name = self.name
error_log = "Error while syncing event: "
event = self
else:
# calendar recurrence is triggering the error
event = self.base_event_id or self._get_first_event(include_outliers=True)
start = event.start and event.start.strftime('%Y-%m-%d at %H:%M') or _("undefined time")
event_ids = _("%(id)s and %(length)s following", id=event.id, length=len(self.calendar_event_ids.ids))
name = event.name
# prevent to sync other events
self.calendar_event_ids.need_sync = False
error_log = "Error while syncing recurrence [{id} - {name} - {rrule}]: ".format(id=self.id, name=self.name, rrule=self.rrule)
# We don't have right access on the event or the request paramaters were bad.
# https://developers.google.com/calendar/v3/errors#403_forbidden_for_non-organizer
if http_error.response.status_code == 403 and "forbiddenForNonOrganizer" in http_error.response.text:
reason = _("you don't seem to have permission to modify this event on Google Calendar")
else:
reason = _("Google gave the following explanation: %s", response['error'].get('message'))
error_log += "The event (%(id)s - %(name)s at %(start)s) could not be synced. It will not be synced while " \
"it is not updated. Reason: %(reason)s" % {'id': event_ids, 'start': start, 'name': name,
'reason': reason}
_logger.warning(error_log)
body = _(
"The following event could not be synced with Google Calendar. </br>"
"It will not be synced as long at it is not updated.</br>"
"%(reason)s", reason=reason)
if event:
event.message_post(
body=body,
message_type='comment',
subtype_xmlid='mail.mt_note',
)
@after_commit
def _google_delete(self, google_service: GoogleCalendarService, google_id, timeout=TIMEOUT):
with google_calendar_token(self.env.user.sudo()) as token:
if token:
google_service.delete(google_id, token=token, timeout=timeout)
# When the record has been deleted on our side, we need to delete it on google but we don't want
# to raise an error because the record don't exists anymore.
self.exists().with_context(dont_notify=True).need_sync = False
@after_commit
def _google_patch(self, google_service: GoogleCalendarService, google_id, values, timeout=TIMEOUT):
with google_calendar_token(self.env.user.sudo()) as token:
if token:
try:
google_service.patch(google_id, values, token=token, timeout=timeout)
except HTTPError as e:
if e.response.status_code in (400, 403):
self._google_error_handling(e)
self.exists().with_context(dont_notify=True).need_sync = False
@after_commit
def _google_insert(self, google_service: GoogleCalendarService, values, timeout=TIMEOUT):
if not values:
return
with google_calendar_token(self.env.user.sudo()) as token:
if token:
try:
send_updates = self._context.get('send_updates', True)
google_service.google_service = google_service.google_service.with_context(send_updates=send_updates)
google_id = google_service.insert(values, token=token, timeout=timeout)
# Everything went smoothly
self.with_context(dont_notify=True).write({
'google_id': google_id,
'need_sync': False,
})
except HTTPError as e:
if e.response.status_code in (400, 403):
self._google_error_handling(e)
self.with_context(dont_notify=True).need_sync = False
def _get_records_to_sync(self, full_sync=False):
"""Return records that should be synced from Odoo to Google
:param full_sync: If True, all events attended by the user are returned
:return: events
"""
domain = self._get_sync_domain()
if not full_sync:
is_active_clause = (self._active_name, '=', True) if self._active_name else expression.TRUE_LEAF
domain = expression.AND([domain, [
'|',
'&', ('google_id', '=', False), is_active_clause,
('need_sync', '=', True),
]])
# We want to limit to 200 event sync per transaction, it shouldn't be a problem for the day to day
# but it allows to run the first synchro within an acceptable time without timeout.
# If there is a lot of event to synchronize to google the first time,
# they will be synchronized eventually with the cron running few times a day
return self.with_context(active_test=False).search(domain, limit=200)
def _write_from_google(self, gevent, vals):
self.write(vals)
@api.model
def _create_from_google(self, gevents, vals_list):
return self.create(vals_list)
@api.model
def _odoo_values(self, google_event: GoogleEvent, default_reminders=()):
"""Implements this method to return a dict of Odoo values corresponding
to the Google event given as parameter
:return: dict of Odoo formatted values
"""
raise NotImplementedError()
def _google_values(self):
"""Implements this method to return a dict with values formatted
according to the Google Calendar API
:return: dict of Google formatted values
"""
raise NotImplementedError()
def _get_sync_domain(self):
"""Return a domain used to search records to synchronize.
e.g. return a domain to synchronize records owned by the current user.
"""
raise NotImplementedError()
def _get_google_synced_fields(self):
"""Return a set of field names. Changing one of these fields
marks the record to be re-synchronized.
"""
raise NotImplementedError()
@api.model
def _restart_google_sync(self):
""" Turns on the google synchronization for all the events of
a given user.
"""
raise NotImplementedError()
def _get_event_user(self):
""" Return the correct user to send the request to Google.
It's possible that a user creates an event and sets another user as the organizer. Using self.env.user will
cause some issues, and It might not be possible to use this user for sending the request, so this method gets
the appropriate user accordingly.
"""
raise NotImplementedError()
| 46.395349 | 15,960 |
4,045 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import requests
from datetime import timedelta
from odoo import fields, models, _
from odoo.exceptions import UserError
from odoo.addons.google_account.models.google_service import GOOGLE_TOKEN_ENDPOINT
from odoo.addons.google_calendar.utils.google_calendar import GoogleCalendarService, InvalidSyncToken
from odoo.addons.google_calendar.models.google_sync import google_calendar_token
_logger = logging.getLogger(__name__)
class GoogleCredentials(models.Model):
""""Google Account of res_users"""
_name = 'google.calendar.credentials'
_description = 'Google Calendar Account Data'
user_ids = fields.One2many('res.users', 'google_cal_account_id', required=True)
calendar_rtoken = fields.Char('Refresh Token', copy=False, groups="base.group_system")
calendar_token = fields.Char('User token', copy=False, groups="base.group_system")
calendar_token_validity = fields.Datetime('Token Validity', copy=False, groups="base.group_system")
calendar_sync_token = fields.Char('Next Sync Token', copy=False, groups="base.group_system")
calendar_cal_id = fields.Char('Calendar ID', copy=False, help='Last Calendar ID who has been synchronized. If it is changed, we remove all links between GoogleID and Odoo Google Internal ID')
synchronization_stopped = fields.Boolean('Google Synchronization stopped', copy=False)
def _set_auth_tokens(self, access_token, refresh_token, ttl):
self.write({
'calendar_rtoken': refresh_token,
'calendar_token': access_token,
'calendar_token_validity': fields.Datetime.now() + timedelta(seconds=ttl) if ttl else False,
})
def _google_calendar_authenticated(self):
self.ensure_one()
return bool(self.sudo().calendar_rtoken)
def _is_google_calendar_valid(self):
self.ensure_one()
return self.calendar_token_validity and self.calendar_token_validity >= (fields.Datetime.now() + timedelta(minutes=1))
def _refresh_google_calendar_token(self):
# LUL TODO similar code exists in google_drive. Should be factorized in google_account
self.ensure_one()
get_param = self.env['ir.config_parameter'].sudo().get_param
client_id = get_param('google_calendar_client_id')
client_secret = get_param('google_calendar_client_secret')
if not client_id or not client_secret:
raise UserError(_("The account for the Google Calendar service is not configured."))
headers = {"content-type": "application/x-www-form-urlencoded"}
data = {
'refresh_token': self.calendar_rtoken,
'client_id': client_id,
'client_secret': client_secret,
'grant_type': 'refresh_token',
}
try:
_dummy, response, _dummy = self.env['google.service']._do_request(GOOGLE_TOKEN_ENDPOINT, params=data, headers=headers, method='POST', preuri='')
ttl = response.get('expires_in')
self.write({
'calendar_token': response.get('access_token'),
'calendar_token_validity': fields.Datetime.now() + timedelta(seconds=ttl),
})
except requests.HTTPError as error:
if error.response.status_code in (400, 401): # invalid grant or invalid client
# Delete refresh token and make sure it's commited
self.env.cr.rollback()
self._set_auth_tokens(False, False, 0)
self.env.cr.commit()
error_key = error.response.json().get("error", "nc")
error_msg = _("An error occurred while generating the token. Your authorization code may be invalid or has already expired [%s]. "
"You should check your Client ID and secret on the Google APIs plateform or try to stop and restart your calendar synchronisation.",
error_key)
raise UserError(error_msg)
| 50.5625 | 4,045 |
2,177 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
from odoo.addons.google_calendar.models.google_sync import google_calendar_token
from odoo.addons.google_calendar.utils.google_calendar import GoogleCalendarService
class Attendee(models.Model):
_name = 'calendar.attendee'
_inherit = 'calendar.attendee'
def _send_mail_to_attendees(self, mail_template, force_send=False):
""" Override
If not synced with Google, let Odoo in charge of sending emails
Otherwise, nothing to do: Google will send them
"""
with google_calendar_token(self.env.user.sudo()) as token:
if not token:
super()._send_mail_to_attendees(mail_template, force_send)
def do_tentative(self):
# Synchronize event after state change
res = super().do_tentative()
self._sync_event()
return res
def do_accept(self):
# Synchronize event after state change
res = super().do_accept()
self._sync_event()
return res
def do_decline(self):
# Synchronize event after state change
res = super().do_decline()
self._sync_event()
return res
def _sync_event(self):
# For weird reasons, we can't sync status when we are not the responsible
# We can't adapt google_value to only keep ['id', 'summary', 'attendees', 'start', 'end', 'reminders']
# and send that. We get a Forbidden for non-organizer error even if we only send start, end that are mandatory !
all_events = self.mapped('event_id').filtered(lambda e: e.google_id)
other_events = all_events.filtered(lambda e: e.user_id and e.user_id.id != self.env.user.id)
for user in other_events.mapped('user_id'):
service = GoogleCalendarService(self.env['google.service'].with_user(user))
other_events.filtered(lambda ev: ev.user_id.id == user.id).with_user(user)._sync_odoo2google(service)
google_service = GoogleCalendarService(self.env['google.service'])
(all_events - other_events)._sync_odoo2google(google_service)
| 42.686275 | 2,177 |
14,204 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import pytz
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models, tools, _
class Meeting(models.Model):
_name = 'calendar.event'
_inherit = ['calendar.event', 'google.calendar.sync']
google_id = fields.Char(
'Google Calendar Event Id', compute='_compute_google_id', store=True, readonly=False)
@api.depends('recurrence_id.google_id')
def _compute_google_id(self):
# google ids of recurring events are built from the recurrence id and the
# original starting time in the recurrence.
# The `start` field does not appear in the dependencies on purpose!
# Event if the event is moved, the google_id remains the same.
for event in self:
google_recurrence_id = event.recurrence_id._get_event_google_id(event)
if not event.google_id and google_recurrence_id:
event.google_id = google_recurrence_id
elif not event.google_id:
event.google_id = False
@api.model
def _get_google_synced_fields(self):
return {'name', 'description', 'allday', 'start', 'date_end', 'stop',
'attendee_ids', 'alarm_ids', 'location', 'privacy', 'active'}
@api.model
def _restart_google_sync(self):
self.env['calendar.event'].search(self._get_sync_domain()).write({
'need_sync': True,
})
@api.model_create_multi
def create(self, vals_list):
notify_context = self.env.context.get('dont_notify', False)
return super(Meeting, self.with_context(dont_notify=notify_context)).create([
dict(vals, need_sync=False) if vals.get('recurrence_id') or vals.get('recurrency') else vals
for vals in vals_list
])
def write(self, values):
recurrence_update_setting = values.get('recurrence_update')
if recurrence_update_setting in ('all_events', 'future_events') and len(self) == 1:
values = dict(values, need_sync=False)
notify_context = self.env.context.get('dont_notify', False)
res = super(Meeting, self.with_context(dont_notify=notify_context)).write(values)
if recurrence_update_setting in ('all_events',) and len(self) == 1 and values.keys() & self._get_google_synced_fields():
self.recurrence_id.need_sync = True
return res
def _get_sync_domain(self):
# in case of full sync, limit to a range of 1y in past and 1y in the future by default
ICP = self.env['ir.config_parameter'].sudo()
day_range = int(ICP.get_param('google_calendar.sync.range_days', default=365))
lower_bound = fields.Datetime.subtract(fields.Datetime.now(), days=day_range)
upper_bound = fields.Datetime.add(fields.Datetime.now(), days=day_range)
return [
('partner_ids.user_ids', 'in', self.env.user.id),
('stop', '>', lower_bound),
('start', '<', upper_bound),
# Do not sync events that follow the recurrence, they are already synced at recurrence creation
'!', '&', '&', ('recurrency', '=', True), ('recurrence_id', '!=', False), ('follow_recurrence', '=', True)
]
@api.model
def _odoo_values(self, google_event, default_reminders=()):
if google_event.is_cancelled():
return {'active': False}
# default_reminders is never () it is set to google's default reminder (30 min before)
# we need to check 'useDefault' for the event to determine if we have to use google's
# default reminder or not
reminder_command = google_event.reminders.get('overrides')
if not reminder_command:
reminder_command = google_event.reminders.get('useDefault') and default_reminders or ()
alarm_commands = self._odoo_reminders_commands(reminder_command)
attendee_commands, partner_commands = self._odoo_attendee_commands(google_event)
related_event = self.search([('google_id', '=', google_event.id)], limit=1)
name = google_event.summary or related_event and related_event.name or _("(No title)")
values = {
'name': name,
'description': google_event.description and tools.html_sanitize(google_event.description),
'location': google_event.location,
'user_id': google_event.owner(self.env).id,
'privacy': google_event.visibility or self.default_get(['privacy'])['privacy'],
'attendee_ids': attendee_commands,
'alarm_ids': alarm_commands,
'recurrency': google_event.is_recurrent(),
'videocall_location': google_event.get_meeting_url(),
'show_as': 'free' if google_event.is_available() else 'busy'
}
if partner_commands:
# Add partner_commands only if set from Google. The write method on calendar_events will
# override attendee commands if the partner_ids command is set but empty.
values['partner_ids'] = partner_commands
if not google_event.is_recurrence():
values['google_id'] = google_event.id
if google_event.is_recurrent() and not google_event.is_recurrence():
# Propagate the follow_recurrence according to the google result
values['follow_recurrence'] = google_event.is_recurrence_follower()
if google_event.start.get('dateTime'):
# starting from python3.7, use the new [datetime, date].fromisoformat method
start = parse(google_event.start.get('dateTime')).astimezone(pytz.utc).replace(tzinfo=None)
stop = parse(google_event.end.get('dateTime')).astimezone(pytz.utc).replace(tzinfo=None)
values['allday'] = False
else:
start = parse(google_event.start.get('date'))
stop = parse(google_event.end.get('date')) - relativedelta(days=1)
# Stop date should be exclusive as defined here https://developers.google.com/calendar/v3/reference/events#resource
# but it seems that's not always the case for old event
if stop < start:
stop = parse(google_event.end.get('date'))
values['allday'] = True
values['start'] = start
values['stop'] = stop
return values
@api.model
def _odoo_attendee_commands(self, google_event):
attendee_commands = []
partner_commands = []
google_attendees = google_event.attendees or []
if len(google_attendees) == 0 and google_event.organizer and google_event.organizer.get('self', False):
user = google_event.owner(self.env)
google_attendees += [{
'email': user.partner_id.email,
'responseStatus': 'needsAction',
}]
emails = [a.get('email') for a in google_attendees]
existing_attendees = self.env['calendar.attendee']
if google_event.exists(self.env):
existing_attendees = self.browse(google_event.odoo_id(self.env)).attendee_ids
attendees_by_emails = {tools.email_normalize(a.email): a for a in existing_attendees}
partners = self.env['mail.thread']._mail_find_partner_from_emails(emails, records=self, force_create=True, extra_domain=[('type', '!=', 'private')])
for attendee in zip(emails, partners, google_attendees):
email = attendee[0]
if email in attendees_by_emails:
# Update existing attendees
attendee_commands += [(1, attendees_by_emails[email].id, {'state': attendee[2].get('responseStatus')})]
else:
# Create new attendees
if attendee[2].get('self'):
partner = self.env.user.partner_id
elif attendee[1]:
partner = attendee[1]
else:
continue
attendee_commands += [(0, 0, {'state': attendee[2].get('responseStatus'), 'partner_id': partner.id})]
partner_commands += [(4, partner.id)]
if attendee[2].get('displayName') and not partner.name:
partner.name = attendee[2].get('displayName')
for odoo_attendee in attendees_by_emails.values():
# Remove old attendees but only if it does not correspond to the current user.
email = tools.email_normalize(odoo_attendee.email)
if email not in emails and email != self.env.user.email:
attendee_commands += [(2, odoo_attendee.id)]
partner_commands += [(3, odoo_attendee.partner_id.id)]
return attendee_commands, partner_commands
@api.model
def _odoo_reminders_commands(self, reminders=()):
commands = []
for reminder in reminders:
alarm_type = 'email' if reminder.get('method') == 'email' else 'notification'
alarm_type_label = _("Email") if alarm_type == 'email' else _("Notification")
minutes = reminder.get('minutes', 0)
alarm = self.env['calendar.alarm'].search([
('alarm_type', '=', alarm_type),
('duration_minutes', '=', minutes)
], limit=1)
if alarm:
commands += [(4, alarm.id)]
else:
if minutes % (60*24) == 0:
interval = 'days'
duration = minutes / 60 / 24
name = _(
"%(reminder_type)s - %(duration)s Days",
reminder_type=alarm_type_label,
duration=duration,
)
elif minutes % 60 == 0:
interval = 'hours'
duration = minutes / 60
name = _(
"%(reminder_type)s - %(duration)s Hours",
reminder_type=alarm_type_label,
duration=duration,
)
else:
interval = 'minutes'
duration = minutes
name = _(
"%(reminder_type)s - %(duration)s Minutes",
reminder_type=alarm_type_label,
duration=duration,
)
commands += [(0, 0, {'duration': duration, 'interval': interval, 'name': name, 'alarm_type': alarm_type})]
return commands
def _google_values(self):
if self.allday:
start = {'date': self.start_date.isoformat()}
end = {'date': (self.stop_date + relativedelta(days=1)).isoformat()}
else:
start = {'dateTime': pytz.utc.localize(self.start).isoformat()}
end = {'dateTime': pytz.utc.localize(self.stop).isoformat()}
reminders = [{
'method': "email" if alarm.alarm_type == "email" else "popup",
'minutes': alarm.duration_minutes
} for alarm in self.alarm_ids]
attendees = self.attendee_ids
attendee_values = [{
'email': attendee.partner_id.email_normalized,
'responseStatus': attendee.state or 'needsAction',
} for attendee in attendees if attendee.partner_id.email_normalized]
# We sort the attendees to avoid undeterministic test fails. It's not mandatory for Google.
attendee_values.sort(key=lambda k: k['email'])
values = {
'id': self.google_id,
'start': start,
'end': end,
'summary': self.name,
'description': tools.html_sanitize(self.description) if not tools.is_html_empty(self.description) else '',
'location': self.location or '',
'guestsCanModify': True,
'organizer': {'email': self.user_id.email, 'self': self.user_id == self.env.user},
'attendees': attendee_values,
'extendedProperties': {
'shared': {
'%s_odoo_id' % self.env.cr.dbname: self.id,
},
},
'reminders': {
'overrides': reminders,
'useDefault': False,
}
}
if self.privacy:
values['visibility'] = self.privacy
if not self.active:
values['status'] = 'cancelled'
if self.user_id and self.user_id != self.env.user and not bool(self.user_id.sudo().google_calendar_token):
# The organizer is an Odoo user that do not sync his calendar
values['extendedProperties']['shared']['%s_owner_id' % self.env.cr.dbname] = self.user_id.id
elif not self.user_id:
# We can't store on the shared properties in that case without getting a 403. It can happen when
# the owner is not an Odoo user: We don't store the real owner identity (mail)
# If we are not the owner, we should change the post values to avoid errors because we don't have
# write permissions
# See https://developers.google.com/calendar/concepts/sharing
keep_keys = ['id', 'summary', 'attendees', 'start', 'end', 'reminders']
values = {key: val for key, val in values.items() if key in keep_keys}
# values['extendedProperties']['private] should be used if the owner is not an odoo user
values['extendedProperties'] = {
'private': {
'%s_odoo_id' % self.env.cr.dbname: self.id,
},
}
return values
def _cancel(self):
# only owner can delete => others refuse the event
user = self.env.user
my_cancelled_records = self.filtered(lambda e: e.user_id == user)
super(Meeting, my_cancelled_records)._cancel()
attendees = (self - my_cancelled_records).attendee_ids.filtered(lambda a: a.partner_id == user.partner_id)
attendees.state = 'declined'
def _get_event_user(self):
self.ensure_one()
if self.user_id and self.user_id.sudo().google_calendar_token:
return self.user_id
return self.env.user
| 49.838596 | 14,204 |
5,463 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, fields, models, Command
from odoo.addons.google_calendar.utils.google_calendar import GoogleCalendarService, InvalidSyncToken
from odoo.addons.google_calendar.models.google_sync import google_calendar_token
from odoo.loglevels import exception_to_unicode
_logger = logging.getLogger(__name__)
class User(models.Model):
_inherit = 'res.users'
google_cal_account_id = fields.Many2one('google.calendar.credentials')
google_calendar_rtoken = fields.Char(related='google_cal_account_id.calendar_rtoken', groups="base.group_system")
google_calendar_token = fields.Char(related='google_cal_account_id.calendar_token')
google_calendar_token_validity = fields.Datetime(related='google_cal_account_id.calendar_token_validity')
google_calendar_sync_token = fields.Char(related='google_cal_account_id.calendar_sync_token')
google_calendar_cal_id = fields.Char(related='google_cal_account_id.calendar_cal_id')
google_synchronization_stopped = fields.Boolean(related='google_cal_account_id.synchronization_stopped', readonly=False)
_sql_constraints = [
('google_token_uniq', 'unique (google_cal_account_id)', "The user has already a google account"),
]
@property
def SELF_READABLE_FIELDS(self):
return super().SELF_READABLE_FIELDS + ['google_synchronization_stopped', 'google_cal_account_id']
@property
def SELF_WRITEABLE_FIELDS(self):
return super().SELF_WRITEABLE_FIELDS + ['google_synchronization_stopped', 'google_cal_account_id']
def _get_google_calendar_token(self):
self.ensure_one()
if self.google_cal_account_id.calendar_rtoken and not self.google_cal_account_id._is_google_calendar_valid():
self.sudo().google_cal_account_id._refresh_google_calendar_token()
return self.google_cal_account_id.calendar_token
def _sync_google_calendar(self, calendar_service: GoogleCalendarService):
self.ensure_one()
if self.google_synchronization_stopped:
return False
# don't attempt to sync when another sync is already in progress, as we wouldn't be
# able to commit the transaction anyway (row is locked)
self.env.cr.execute("""SELECT id FROM res_users WHERE id = %s FOR NO KEY UPDATE SKIP LOCKED""", [self.id])
if not self.env.cr.rowcount:
_logger.info("skipping calendar sync, locked user %s", self.login)
return False
full_sync = not bool(self.google_calendar_sync_token)
with google_calendar_token(self) as token:
try:
events, next_sync_token, default_reminders = calendar_service.get_events(self.google_cal_account_id.calendar_sync_token, token=token)
except InvalidSyncToken:
events, next_sync_token, default_reminders = calendar_service.get_events(token=token)
full_sync = True
self.google_cal_account_id.calendar_sync_token = next_sync_token
# Google -> Odoo
send_updates = not full_sync
events.clear_type_ambiguity(self.env)
recurrences = events.filter(lambda e: e.is_recurrence())
synced_recurrences = self.env['calendar.recurrence']._sync_google2odoo(recurrences)
synced_events = self.env['calendar.event']._sync_google2odoo(events - recurrences, default_reminders=default_reminders)
# Odoo -> Google
recurrences = self.env['calendar.recurrence']._get_records_to_sync(full_sync=full_sync)
recurrences -= synced_recurrences
recurrences.with_context(send_updates=send_updates)._sync_odoo2google(calendar_service)
synced_events |= recurrences.calendar_event_ids - recurrences._get_outliers()
synced_events |= synced_recurrences.calendar_event_ids - synced_recurrences._get_outliers()
events = self.env['calendar.event']._get_records_to_sync(full_sync=full_sync)
(events - synced_events).with_context(send_updates=send_updates)._sync_odoo2google(calendar_service)
return bool(events | synced_events) or bool(recurrences | synced_recurrences)
@api.model
def _sync_all_google_calendar(self):
""" Cron job """
users = self.env['res.users'].search([('google_calendar_rtoken', '!=', False), ('google_synchronization_stopped', '=', False)])
google = GoogleCalendarService(self.env['google.service'])
for user in users:
_logger.info("Calendar Synchro - Starting synchronization for %s", user)
try:
user.with_user(user).sudo()._sync_google_calendar(google)
self.env.cr.commit()
except Exception as e:
_logger.exception("[%s] Calendar Synchro - Exception : %s !", user, exception_to_unicode(e))
self.env.cr.rollback()
def stop_google_synchronization(self):
self.ensure_one()
self.google_synchronization_stopped = True
def restart_google_synchronization(self):
self.ensure_one()
if not self.google_cal_account_id:
self.google_cal_account_id = self.env['google.calendar.credentials'].sudo().create([{'user_ids': [Command.set(self.ids)]}])
self.google_synchronization_stopped = False
self.env['calendar.recurrence']._restart_google_sync()
self.env['calendar.event']._restart_google_sync()
| 51.056075 | 5,463 |
440 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
cal_client_id = fields.Char("Client_id", config_parameter='google_calendar_client_id', default='')
cal_client_secret = fields.Char("Client_key", config_parameter='google_calendar_client_secret', default='')
| 40 | 440 |
11,388 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
from odoo import api, models, Command
from odoo.tools import email_normalize
from odoo.addons.google_calendar.utils.google_calendar import GoogleCalendarService
class RecurrenceRule(models.Model):
_name = 'calendar.recurrence'
_inherit = ['calendar.recurrence', 'google.calendar.sync']
def _apply_recurrence(self, specific_values_creation=None, no_send_edit=False, generic_values_creation=None):
events = self.filtered('need_sync').calendar_event_ids
detached_events = super()._apply_recurrence(specific_values_creation, no_send_edit,
generic_values_creation)
google_service = GoogleCalendarService(self.env['google.service'])
# If a synced event becomes a recurrence, the event needs to be deleted from
# Google since it's now the recurrence which is synced.
# Those events are kept in the database and their google_id is updated
# according to the recurrence google_id, therefore we need to keep an inactive copy
# of those events with the original google id. The next sync will then correctly
# delete those events from Google.
vals = []
for event in events.filtered('google_id'):
if event.active and event.google_id != event.recurrence_id._get_event_google_id(event):
vals += [{
'name': event.name,
'google_id': event.google_id,
'start': event.start,
'stop': event.stop,
'active': False,
'need_sync': True,
}]
event.with_user(event._get_event_user())._google_delete(google_service, event.google_id)
event.google_id = False
self.env['calendar.event'].create(vals)
self.calendar_event_ids.need_sync = False
return detached_events
def _get_event_google_id(self, event):
"""Return the Google id of recurring event.
Google ids of recurrence instances are formatted as: {recurrence google_id}_{UTC starting time in compacted ISO8601}
"""
if self.google_id:
if event.allday:
time_id = event.start_date.isoformat().replace('-', '')
else:
# '-' and ':' are optional in ISO8601
start_compacted_iso8601 = event.start.isoformat().replace('-', '').replace(':', '')
# Z at the end for UTC
time_id = '%sZ' % start_compacted_iso8601
return '%s_%s' % (self.google_id, time_id)
return False
def _write_events(self, values, dtstart=None):
values.pop('google_id', False)
# If only some events are updated, sync those events.
values['need_sync'] = bool(dtstart)
return super()._write_events(values, dtstart=dtstart)
def _cancel(self):
self.calendar_event_ids._cancel()
super()._cancel()
def _get_google_synced_fields(self):
return {'rrule'}
@api.model
def _restart_google_sync(self):
self.env['calendar.recurrence'].search(self._get_sync_domain()).write({
'need_sync': True,
})
def _write_from_google(self, gevent, vals):
current_rrule = self.rrule
# event_tz is written on event in Google but on recurrence in Odoo
vals['event_tz'] = gevent.start.get('timeZone')
super()._write_from_google(gevent, vals)
base_event_time_fields = ['start', 'stop', 'allday']
new_event_values = self.env["calendar.event"]._odoo_values(gevent)
# We update the attendee status for all events in the recurrence
google_attendees = gevent.attendees or []
emails = [a.get('email') for a in google_attendees]
partners = self.env['mail.thread']._mail_find_partner_from_emails(emails, records=self, force_create=True, extra_domain=[('type', '!=', 'private')])
existing_attendees = self.calendar_event_ids.attendee_ids
for attendee in zip(emails, partners, google_attendees):
email = attendee[0]
if email in existing_attendees.mapped('email'):
# Update existing attendees
existing_attendees.filtered(lambda att: att.email == email).write({'state': attendee[2].get('responseStatus')})
else:
# Create new attendees
if attendee[2].get('self'):
partner = self.env.user.partner_id
elif attendee[1]:
partner = attendee[1]
else:
continue
self.calendar_event_ids.write({'attendee_ids': [(0, 0, {'state': attendee[2].get('responseStatus'), 'partner_id': partner.id})]})
if attendee[2].get('displayName') and not partner.name:
partner.name = attendee[2].get('displayName')
for odoo_attendee_email in set(existing_attendees.mapped('email')):
# Remove old attendees. Sometimes, several partners have the same email.
if email_normalize(odoo_attendee_email) not in emails:
attendees = existing_attendees.exists().filtered(lambda att: att.email == email_normalize(odoo_attendee_email))
self.calendar_event_ids.write({'need_sync': False, 'partner_ids': [Command.unlink(att.partner_id.id) for att in attendees]})
# Update the recurrence values
old_event_values = self.base_event_id and self.base_event_id.read(base_event_time_fields)[0]
if old_event_values and any(new_event_values[key] != old_event_values[key] for key in base_event_time_fields):
# we need to recreate the recurrence, time_fields were modified.
base_event_id = self.base_event_id
# We archive the old events to recompute the recurrence. These events are already deleted on Google side.
# We can't call _cancel because events without user_id would not be deleted
(self.calendar_event_ids - base_event_id).google_id = False
(self.calendar_event_ids - base_event_id).unlink()
base_event_id.with_context(dont_notify=True).write(dict(new_event_values, google_id=False, need_sync=False))
if self.rrule == current_rrule:
# if the rrule has changed, it will be recalculated below
# There is no detached event now
self.with_context(dont_notify=True)._apply_recurrence()
else:
time_fields = (
self.env["calendar.event"]._get_time_fields()
| self.env["calendar.event"]._get_recurrent_fields()
)
# We avoid to write time_fields because they are not shared between events.
self._write_events(dict({
field: value
for field, value in new_event_values.items()
if field not in time_fields
}, need_sync=False)
)
# We apply the rrule check after the time_field check because the google_id are generated according
# to base_event start datetime.
if self.rrule != current_rrule:
detached_events = self._apply_recurrence()
detached_events.google_id = False
detached_events.unlink()
def _create_from_google(self, gevents, vals_list):
attendee_values = {}
for gevent, vals in zip(gevents, vals_list):
base_values = dict(
self.env['calendar.event']._odoo_values(gevent), # FIXME default reminders
need_sync=False,
)
# If we convert a single event into a recurrency on Google, we should reuse this event on Odoo
# Google reuse the event google_id to identify the recurrence in that case
base_event = self.env['calendar.event'].search([('google_id', '=', vals['google_id'])])
if not base_event:
base_event = self.env['calendar.event'].create(base_values)
else:
# We override the base_event values because they could have been changed in Google interface
# The event google_id will be recalculated once the recurrence is created
base_event.write(dict(base_values, google_id=False))
vals['base_event_id'] = base_event.id
vals['calendar_event_ids'] = [(4, base_event.id)]
# event_tz is written on event in Google but on recurrence in Odoo
vals['event_tz'] = gevent.start.get('timeZone')
attendee_values[base_event.id] = {'attendee_ids': base_values.get('attendee_ids')}
recurrence = super(RecurrenceRule, self.with_context(dont_notify=True))._create_from_google(gevents, vals_list)
generic_values_creation = {
rec.id: attendee_values[rec.base_event_id.id]
for rec in recurrence if attendee_values.get(rec.base_event_id.id)
}
recurrence.with_context(dont_notify=True)._apply_recurrence(generic_values_creation=generic_values_creation)
return recurrence
def _get_sync_domain(self):
# Empty rrule may exists in historical data. It is not a desired behavior but it could have been created with
# older versions of the module. When synced, these recurrency may come back from Google after database cleaning
# and trigger errors as the records are not properly populated.
# We also prevent sync of other user recurrent events.
return [('calendar_event_ids.user_id', '=', self.env.user.id), ('rrule', '!=', False)]
@api.model
def _odoo_values(self, google_recurrence, default_reminders=()):
return {
'rrule': google_recurrence.rrule,
'google_id': google_recurrence.id,
}
def _google_values(self):
event = self._get_first_event()
if not event:
return {}
values = event._google_values()
values['id'] = self.google_id
if not self._is_allday():
values['start']['timeZone'] = self.event_tz or 'Etc/UTC'
values['end']['timeZone'] = self.event_tz or 'Etc/UTC'
# DTSTART is not allowed by Google Calendar API.
# Event start and end times are specified in the start and end fields.
rrule = re.sub('DTSTART:[0-9]{8}T[0-9]{1,8}\\n', '', self.rrule)
# UNTIL must be in UTC (appending Z)
# We want to only add a 'Z' to non UTC UNTIL values and avoid adding a second.
# 'RRULE:FREQ=DAILY;UNTIL=20210224T235959;INTERVAL=3 --> match UNTIL=20210224T235959
# 'RRULE:FREQ=DAILY;UNTIL=20210224T235959 --> match
rrule = re.sub(r"(UNTIL=\d{8}T\d{6})($|;)", r"\1Z\2", rrule)
values['recurrence'] = ['RRULE:%s' % rrule] if 'RRULE:' not in rrule else [rrule]
property_location = 'shared' if event.user_id else 'private'
values['extendedProperties'] = {
property_location: {
'%s_odoo_id' % self.env.cr.dbname: self.id,
},
}
return values
def _get_event_user(self):
self.ensure_one()
event = self._get_first_event()
if event:
return event._get_event_user()
return self.env.user
| 50.167401 | 11,388 |
2,631 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import http
from odoo.http import request
from odoo.addons.google_calendar.utils.google_calendar import GoogleCalendarService
class GoogleCalendarController(http.Controller):
@http.route('/google_calendar/sync_data', type='json', auth='user')
def sync_data(self, model, **kw):
""" This route/function is called when we want to synchronize Odoo
calendar with Google Calendar.
Function return a dictionary with the status : need_config_from_admin, need_auth,
need_refresh, sync_stopped, success if not calendar_event
The dictionary may contains an url, to allow Odoo Client to redirect user on
this URL for authorization for example
"""
if model == 'calendar.event':
base_url = request.httprequest.url_root.strip('/')
GoogleCal = GoogleCalendarService(request.env['google.service'].with_context(base_url=base_url))
# Checking that admin have already configured Google API for google synchronization !
client_id = request.env['ir.config_parameter'].sudo().get_param('google_calendar_client_id')
if not client_id or client_id == '':
action_id = ''
if GoogleCal._can_authorize_google(request.env.user):
action_id = request.env.ref('base_setup.action_general_configuration').id
return {
"status": "need_config_from_admin",
"url": '',
"action": action_id
}
# Checking that user have already accepted Odoo to access his calendar !
if not GoogleCal.is_authorized(request.env.user):
url = GoogleCal._google_authentication_url(from_url=kw.get('fromurl'))
return {
"status": "need_auth",
"url": url
}
# If App authorized, and user access accepted, We launch the synchronization
need_refresh = request.env.user.sudo()._sync_google_calendar(GoogleCal)
# If synchronization has been stopped
if not need_refresh and request.env.user.google_synchronization_stopped:
return {
"status": "sync_stopped",
"url": ''
}
return {
"status": "need_refresh" if need_refresh else "no_new_event_from_google",
"url": ''
}
return {"status": "success"}
| 45.362069 | 2,631 |
1,038 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'OdooBot',
'version': '1.2',
'category': 'Productivity/Discuss',
'summary': 'Add OdooBot in discussions',
'description': "",
'website': 'https://www.odoo.com/app/discuss',
'depends': ['mail'],
'auto_install': True,
'installable': True,
'application': False,
'data': [
'views/res_users_views.xml',
'data/mailbot_data.xml',
],
'demo': [
'data/mailbot_demo.xml',
],
'assets': {
'mail.assets_discuss_public': [
'mail_bot/static/src/models/*/*.js',
],
'web.assets_backend': [
'mail_bot/static/src/models/*/*.js',
'mail_bot/static/src/scss/odoobot_style.scss',
],
'web.tests_assets': [
'mail_bot/static/tests/**/*',
],
'web.qunit_suite_tests': [
'mail_bot/static/src/models/*/tests/*.js',
],
},
'license': 'LGPL-3',
}
| 27.315789 | 1,038 |
438 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class Http(models.AbstractModel):
_inherit = 'ir.http'
def session_info(self):
res = super(Http, self).session_info()
if self.env.user.has_group('base.group_user'):
res['odoobot_initialized'] = self.env.user.odoobot_state not in [False, 'not_initialized']
return res
| 31.285714 | 438 |
396 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class MailThread(models.AbstractModel):
_inherit = 'mail.thread'
def _message_post_after_hook(self, message, msg_vals):
self.env['mail.bot']._apply_logic(self, msg_vals)
return super(MailThread, self)._message_post_after_hook(message, msg_vals)
| 33 | 396 |
895 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields
class Users(models.Model):
_inherit = 'res.users'
odoobot_state = fields.Selection(
[
('not_initialized', 'Not initialized'),
('onboarding_emoji', 'Onboarding emoji'),
('onboarding_attachement', 'Onboarding attachement'),
('onboarding_command', 'Onboarding command'),
('onboarding_ping', 'Onboarding ping'),
('idle', 'Idle'),
('disabled', 'Disabled'),
], string="OdooBot Status", readonly=True, required=False) # keep track of the state: correspond to the code of the last message sent
odoobot_failed = fields.Boolean(readonly=True)
@property
def SELF_READABLE_FIELDS(self):
return super().SELF_READABLE_FIELDS + ['odoobot_state']
| 38.913043 | 895 |
12,500 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import itertools
import random
from odoo import models, _
class MailBot(models.AbstractModel):
_name = 'mail.bot'
_description = 'Mail Bot'
def _apply_logic(self, record, values, command=None):
""" Apply bot logic to generate an answer (or not) for the user
The logic will only be applied if odoobot is in a chat with a user or
if someone pinged odoobot.
:param record: the mail_thread (or mail_channel) where the user
message was posted/odoobot will answer.
:param values: msg_values of the message_post or other values needed by logic
:param command: the name of the called command if the logic is not triggered by a message_post
"""
odoobot_id = self.env['ir.model.data']._xmlid_to_res_id("base.partner_root")
if len(record) != 1 or values.get("author_id") == odoobot_id:
return
if self._is_bot_pinged(values) or self._is_bot_in_private_channel(record):
body = values.get("body", "").replace(u'\xa0', u' ').strip().lower().strip(".!")
answer = self._get_answer(record, body, values, command)
if answer:
message_type = values.get('message_type', 'comment')
subtype_id = values.get('subtype_id', self.env['ir.model.data']._xmlid_to_res_id('mail.mt_comment'))
record.with_context(mail_create_nosubscribe=True).sudo().message_post(body=answer, author_id=odoobot_id, message_type=message_type, subtype_id=subtype_id)
def _get_answer(self, record, body, values, command=False):
# onboarding
odoobot_state = self.env.user.odoobot_state
if self._is_bot_in_private_channel(record):
# main flow
if odoobot_state == 'onboarding_emoji' and self._body_contains_emoji(body):
self.env.user.odoobot_state = "onboarding_command"
self.env.user.odoobot_failed = False
return _("Great! 👍<br/>To access special commands, <b>start your sentence with</b> <span class=\"o_odoobot_command\">/</span>. Try getting help.")
elif odoobot_state == 'onboarding_command' and command == 'help':
self.env.user.odoobot_state = "onboarding_ping"
self.env.user.odoobot_failed = False
return _("Wow you are a natural!<br/>Ping someone with @username to grab their attention. <b>Try to ping me using</b> <span class=\"o_odoobot_command\">@OdooBot</span> in a sentence.")
elif odoobot_state == 'onboarding_ping' and self._is_bot_pinged(values):
self.env.user.odoobot_state = "onboarding_attachement"
self.env.user.odoobot_failed = False
return _("Yep, I am here! 🎉 <br/>Now, try <b>sending an attachment</b>, like a picture of your cute dog...")
elif odoobot_state == 'onboarding_attachement' and values.get("attachment_ids"):
self.env.user.odoobot_state = "idle"
self.env.user.odoobot_failed = False
return _("I am a simple bot, but if that's a dog, he is the cutest 😊 <br/>Congratulations, you finished this tour. You can now <b>close this chat window</b>. Enjoy discovering Odoo.")
elif odoobot_state in (False, "idle", "not_initialized") and (_('start the tour') in body.lower()):
self.env.user.odoobot_state = "onboarding_emoji"
return _("To start, try to send me an emoji :)")
# easter eggs
elif odoobot_state == "idle" and body in ['❤️', _('i love you'), _('love')]:
return _("Aaaaaw that's really cute but, you know, bots don't work that way. You're too human for me! Let's keep it professional ❤️")
elif _('fuck') in body or "fuck" in body:
return _("That's not nice! I'm a bot but I have feelings... 💔")
# help message
elif self._is_help_requested(body) or odoobot_state == 'idle':
return _("Unfortunately, I'm just a bot 😞 I don't understand! If you need help discovering our product, please check "
"<a href=\"https://www.odoo.com/documentation\" target=\"_blank\">our documentation</a> or "
"<a href=\"https://www.odoo.com/slides\" target=\"_blank\">our videos</a>.")
else:
# repeat question
if odoobot_state == 'onboarding_emoji':
self.env.user.odoobot_failed = True
return _("Not exactly. To continue the tour, send an emoji: <b>type</b> <span class=\"o_odoobot_command\">:)</span> and press enter.")
elif odoobot_state == 'onboarding_attachement':
self.env.user.odoobot_failed = True
return _("To <b>send an attachment</b>, click on the <i class=\"fa fa-paperclip\" aria-hidden=\"true\"></i> icon and select a file.")
elif odoobot_state == 'onboarding_command':
self.env.user.odoobot_failed = True
return _("Not sure what you are doing. Please, type <span class=\"o_odoobot_command\">/</span> and wait for the propositions. Select <span class=\"o_odoobot_command\">help</span> and press enter")
elif odoobot_state == 'onboarding_ping':
self.env.user.odoobot_failed = True
return _("Sorry, I am not listening. To get someone's attention, <b>ping him</b>. Write <span class=\"o_odoobot_command\">@OdooBot</span> and select me.")
return random.choice([
_("I'm not smart enough to answer your question.<br/>To follow my guide, ask: <span class=\"o_odoobot_command\">start the tour</span>."),
_("Hmmm..."),
_("I'm afraid I don't understand. Sorry!"),
_("Sorry I'm sleepy. Or not! Maybe I'm just trying to hide my unawareness of human language...<br/>I can show you features if you write: <span class=\"o_odoobot_command\">start the tour</span>.")
])
return False
def _body_contains_emoji(self, body):
# coming from https://unicode.org/emoji/charts/full-emoji-list.html
emoji_list = itertools.chain(
range(0x231A, 0x231c),
range(0x23E9, 0x23f4),
range(0x23F8, 0x23fb),
range(0x25AA, 0x25ac),
range(0x25FB, 0x25ff),
range(0x2600, 0x2605),
range(0x2614, 0x2616),
range(0x2622, 0x2624),
range(0x262E, 0x2630),
range(0x2638, 0x263b),
range(0x2648, 0x2654),
range(0x265F, 0x2661),
range(0x2665, 0x2667),
range(0x267E, 0x2680),
range(0x2692, 0x2698),
range(0x269B, 0x269d),
range(0x26A0, 0x26a2),
range(0x26AA, 0x26ac),
range(0x26B0, 0x26b2),
range(0x26BD, 0x26bf),
range(0x26C4, 0x26c6),
range(0x26D3, 0x26d5),
range(0x26E9, 0x26eb),
range(0x26F0, 0x26f6),
range(0x26F7, 0x26fb),
range(0x2708, 0x270a),
range(0x270A, 0x270c),
range(0x270C, 0x270e),
range(0x2733, 0x2735),
range(0x2753, 0x2756),
range(0x2763, 0x2765),
range(0x2795, 0x2798),
range(0x2934, 0x2936),
range(0x2B05, 0x2b08),
range(0x2B1B, 0x2b1d),
range(0x1F170, 0x1f172),
range(0x1F191, 0x1f19b),
range(0x1F1E6, 0x1f200),
range(0x1F201, 0x1f203),
range(0x1F232, 0x1f23b),
range(0x1F250, 0x1f252),
range(0x1F300, 0x1f321),
range(0x1F324, 0x1f32d),
range(0x1F32D, 0x1f330),
range(0x1F330, 0x1f336),
range(0x1F337, 0x1f37d),
range(0x1F37E, 0x1f380),
range(0x1F380, 0x1f394),
range(0x1F396, 0x1f398),
range(0x1F399, 0x1f39c),
range(0x1F39E, 0x1f3a0),
range(0x1F3A0, 0x1f3c5),
range(0x1F3C6, 0x1f3cb),
range(0x1F3CB, 0x1f3cf),
range(0x1F3CF, 0x1f3d4),
range(0x1F3D4, 0x1f3e0),
range(0x1F3E0, 0x1f3f1),
range(0x1F3F3, 0x1f3f6),
range(0x1F3F8, 0x1f400),
range(0x1F400, 0x1f43f),
range(0x1F442, 0x1f4f8),
range(0x1F4F9, 0x1f4fd),
range(0x1F500, 0x1f53e),
range(0x1F549, 0x1f54b),
range(0x1F54B, 0x1f54f),
range(0x1F550, 0x1f568),
range(0x1F56F, 0x1f571),
range(0x1F573, 0x1f57a),
range(0x1F58A, 0x1f58e),
range(0x1F595, 0x1f597),
range(0x1F5B1, 0x1f5b3),
range(0x1F5C2, 0x1f5c5),
range(0x1F5D1, 0x1f5d4),
range(0x1F5DC, 0x1f5df),
range(0x1F5FB, 0x1f600),
range(0x1F601, 0x1f611),
range(0x1F612, 0x1f615),
range(0x1F61C, 0x1f61f),
range(0x1F620, 0x1f626),
range(0x1F626, 0x1f628),
range(0x1F628, 0x1f62c),
range(0x1F62E, 0x1f630),
range(0x1F630, 0x1f634),
range(0x1F635, 0x1f641),
range(0x1F641, 0x1f643),
range(0x1F643, 0x1f645),
range(0x1F645, 0x1f650),
range(0x1F680, 0x1f6c6),
range(0x1F6CB, 0x1f6d0),
range(0x1F6D1, 0x1f6d3),
range(0x1F6E0, 0x1f6e6),
range(0x1F6EB, 0x1f6ed),
range(0x1F6F4, 0x1f6f7),
range(0x1F6F7, 0x1f6f9),
range(0x1F910, 0x1f919),
range(0x1F919, 0x1f91f),
range(0x1F920, 0x1f928),
range(0x1F928, 0x1f930),
range(0x1F931, 0x1f933),
range(0x1F933, 0x1f93b),
range(0x1F93C, 0x1f93f),
range(0x1F940, 0x1f946),
range(0x1F947, 0x1f94c),
range(0x1F94D, 0x1f950),
range(0x1F950, 0x1f95f),
range(0x1F95F, 0x1f96c),
range(0x1F96C, 0x1f971),
range(0x1F973, 0x1f977),
range(0x1F97C, 0x1f980),
range(0x1F980, 0x1f985),
range(0x1F985, 0x1f992),
range(0x1F992, 0x1f998),
range(0x1F998, 0x1f9a3),
range(0x1F9B0, 0x1f9ba),
range(0x1F9C1, 0x1f9c3),
range(0x1F9D0, 0x1f9e7),
range(0x1F9E7, 0x1fa00),
[0x2328, 0x23cf, 0x24c2, 0x25b6, 0x25c0, 0x260e, 0x2611, 0x2618, 0x261d, 0x2620, 0x2626,
0x262a, 0x2640, 0x2642, 0x2663, 0x2668, 0x267b, 0x2699, 0x26c8, 0x26ce, 0x26cf,
0x26d1, 0x26fd, 0x2702, 0x2705, 0x270f, 0x2712, 0x2714, 0x2716, 0x271d, 0x2721, 0x2728, 0x2744, 0x2747, 0x274c,
0x274e, 0x2757, 0x27a1, 0x27b0, 0x27bf, 0x2b50, 0x2b55, 0x3030, 0x303d, 0x3297, 0x3299, 0x1f004, 0x1f0cf, 0x1f17e,
0x1f17f, 0x1f18e, 0x1f21a, 0x1f22f, 0x1f321, 0x1f336, 0x1f37d, 0x1f3c5, 0x1f3f7, 0x1f43f, 0x1f440, 0x1f441, 0x1f4f8,
0x1f4fd, 0x1f4ff, 0x1f57a, 0x1f587, 0x1f590, 0x1f5a4, 0x1f5a5, 0x1f5a8, 0x1f5bc, 0x1f5e1, 0x1f5e3, 0x1f5e8, 0x1f5ef,
0x1f5f3, 0x1f5fa, 0x1f600, 0x1f611, 0x1f615, 0x1f616, 0x1f617, 0x1f618, 0x1f619, 0x1f61a, 0x1f61b, 0x1f61f, 0x1f62c,
0x1f62d, 0x1f634, 0x1f6d0, 0x1f6e9, 0x1f6f0, 0x1f6f3, 0x1f6f9, 0x1f91f, 0x1f930, 0x1f94c, 0x1f97a, 0x1f9c0]
)
if any(chr(emoji) in body for emoji in emoji_list):
return True
return False
def _is_bot_pinged(self, values):
odoobot_id = self.env['ir.model.data']._xmlid_to_res_id("base.partner_root")
return odoobot_id in values.get('partner_ids', [])
def _is_bot_in_private_channel(self, record):
odoobot_id = self.env['ir.model.data']._xmlid_to_res_id("base.partner_root")
if record._name == 'mail.channel' and record.channel_type == 'chat':
return odoobot_id in record.with_context(active_test=False).channel_partner_ids.ids
return False
def _is_help_requested(self, body):
"""Returns whether a message linking to the documentation and videos
should be sent back to the user.
"""
return any(token in body for token in ['help', _('help'), '?']) or self.env.user.odoobot_failed
| 52.42437 | 12,477 |
1,142 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models, _
class Channel(models.Model):
_inherit = 'mail.channel'
def execute_command_help(self, **kwargs):
super().execute_command_help(**kwargs)
self.env['mail.bot']._apply_logic(self, kwargs, command="help") # kwargs are not usefull but...
@api.model
def init_odoobot(self):
if self.env.user.odoobot_state in [False, 'not_initialized']:
odoobot_id = self.env['ir.model.data']._xmlid_to_res_id("base.partner_root")
channel_info = self.channel_get([odoobot_id])
channel = self.browse(channel_info['id'])
message = _("Hello,<br/>Odoo's chat helps employees collaborate efficiently. I'm here to help you discover its features.<br/><b>Try to send me an emoji</b> <span class=\"o_odoobot_command\">:)</span>")
channel.sudo().message_post(body=message, author_id=odoobot_id, message_type="comment", subtype_xmlid="mail.mt_comment")
self.env.user.odoobot_state = 'onboarding_emoji'
return channel
| 49.652174 | 1,142 |
902 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2011 Cubic ERP - Teradata SAC. (https://cubicerp.com).
{
"name": "Bolivia - Accounting",
"version": "2.0",
"description": """
Bolivian accounting chart and tax localization.
Plan contable boliviano e impuestos de acuerdo a disposiciones vigentes
""",
"author": "Cubic ERP",
'category': 'Accounting/Localizations/Account Charts',
"depends": ["account"],
"data": [
"data/l10n_bo_chart_data.xml",
"data/account.account.template.csv",
"data/l10n_bo_chart_post_data.xml",
'data/account_tax_group_data.xml',
'data/account_tax_report_data.xml',
"data/account_tax_data.xml",
"data/account_chart_template_data.xml",
],
'demo': [
'demo/demo_company.xml',
],
'license': 'LGPL-3',
}
| 29.096774 | 902 |
858 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': "Events Booths",
'category': 'Marketing/Events',
'version': '1.0',
'summary': "Manage event booths",
'description': """
Create booths for your favorite event.
""",
'depends': ['event'],
'data': [
'security/ir.model.access.csv',
'views/event_booth_category_views.xml',
'views/event_type_booth_views.xml',
'views/event_booth_views.xml',
'views/event_type_views.xml',
'views/event_event_views.xml',
'views/event_menus.xml',
'data/event_booth_category_data.xml',
'data/mail_data.xml',
'data/mail_templates.xml',
],
'demo': [
'data/event_booth_demo.xml',
'data/event_type_demo.xml',
],
'license': 'LGPL-3',
}
| 28.6 | 858 |
997 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.event.tests.common import TestEventCommon
class TestEventBoothCommon(TestEventCommon):
@classmethod
def setUpClass(cls):
super(TestEventBoothCommon, cls).setUpClass()
cls.event_booth_category_1 = cls.env['event.booth.category'].create({
'name': 'Standard',
'description': '<p>Standard</p>',
})
cls.event_booth_category_2 = cls.env['event.booth.category'].create({
'name': 'Premium',
'description': '<p>Premium</p>',
})
cls.event_type_complex.write({
'event_type_booth_ids': [
(5, 0),
(0, 0,
{'name': 'Standard 1', 'booth_category_id': cls.event_booth_category_1.id}),
(0, 0,
{'name': 'Premium 1', 'booth_category_id': cls.event_booth_category_2.id}),
],
})
| 33.233333 | 997 |
4,180 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta
from odoo import Command
from odoo.addons.event_booth.tests.common import TestEventBoothCommon
from odoo.fields import Datetime as FieldsDatetime
from odoo.tests.common import users, Form, tagged
from odoo.tools import mute_logger
@tagged('post_install', '-at_install')
class TestEventData(TestEventBoothCommon):
@mute_logger('odoo.models.unlink')
@users('user_eventmanager')
def test_event_configuration_booths_from_type(self):
""" Test data computation (related to booths) of event coming from its event.type template. """
# setup test records
event_type_nobooth = self.env['event.type'].create({
'name': 'No booth',
})
event_type_wbooths = self.env['event.type'].create({
'name': 'Using booths',
'event_type_booth_ids': [
Command.clear(),
Command.create({
'name': 'Standard Booth',
'booth_category_id': self.event_booth_category_1.id,
}),
Command.create({
'name': 'Premium Booth',
'booth_category_id': self.event_booth_category_2.id,
})
]
})
# no booth by default as no booths on type
event = self.env['event.event'].create({
'name': 'Event',
'date_begin': FieldsDatetime.to_string(datetime.today() + timedelta(days=1)),
'date_end': FieldsDatetime.to_string(datetime.today() + timedelta(days=15)),
'event_type_id': event_type_nobooth.id
})
self.assertEqual(event.event_booth_ids, self.env['event.booth'])
# manually create booths: ok
event.write({
'event_booth_ids': [
Command.create({
'name': 'Custom Standard Booth 1',
'booth_category_id': self.event_booth_category_1.id,
}),
Command.create({
'name': 'Custom Standard Booth 2',
'booth_category_id': self.event_booth_category_1.id,
})
]
})
self.assertEqual(event.event_booth_count, 2)
self.assertEqual(event.event_booth_count_available, 2)
self.assertEqual(event.event_booth_category_ids, self.event_booth_category_1)
self.assertEqual(event.event_booth_ids[1].message_partner_ids, self.env['res.partner'])
# updating partner is independent from availability
event.event_booth_ids[1].write({'partner_id': self.event_customer.id})
self.assertEqual(event.event_booth_count, 2)
self.assertEqual(event.event_booth_count_available, 2)
self.assertEqual(event.event_booth_ids[1].message_partner_ids, self.event_customer)
# one booth is sold
event.event_booth_ids[1].write({'state': 'unavailable'})
self.assertEqual(event.event_booth_count, 2)
self.assertEqual(event.event_booth_count_available, 1)
# partner is reset: booth still unavailable but follower removed
event.event_booth_ids[1].write({'partner_id': False})
self.assertEqual(event.event_booth_count, 2)
self.assertEqual(event.event_booth_count_available, 1)
self.assertEqual(event.event_booth_ids[1].message_partner_ids, self.env['res.partner'])
# change event type to one using booths: include event type booths and keep reserved booths
with Form(event) as event_form:
event_form.event_type_id = event_type_wbooths
self.assertEqual(event.event_booth_count, 3)
self.assertEqual(
set(r['name'] for r in event.event_booth_ids),
set(('Custom Standard Booth 2', 'Standard Booth', 'Premium Booth')),
'Should keep booths with reservation, remove unused ones and add type ones'
)
self.assertEqual(event.event_booth_count_available, 2)
self.assertEqual(event.event_booth_category_ids, self.event_booth_category_1 + self.event_booth_category_2)
| 44.946237 | 4,180 |
2,505 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta
from odoo.addons.event_booth.tests.common import TestEventBoothCommon
from odoo.fields import Datetime as FieldsDatetime
from odoo.tests.common import users, tagged
@tagged('post_install', '-at_install', 'event_booth')
class TestEventData(TestEventBoothCommon):
@users('user_eventmanager')
def test_event_booth_contact(self):
""" Test contact details computation """
customer = self.env['res.partner'].browse(self.event_customer.ids)
category = self.env['event.booth.category'].browse(self.event_booth_category_1.ids)
self.assertTrue(all(
bool(customer[fname])
for fname in ['name', 'email', 'country_id', 'phone']
)
)
customer_email = customer.email
event = self.env['event.event'].create({
'name': 'Event',
'date_begin': FieldsDatetime.to_string(datetime.today() + timedelta(days=1)),
'date_end': FieldsDatetime.to_string(datetime.today() + timedelta(days=15)),
'event_type_id': False,
})
self.assertEqual(event.event_booth_ids, self.env['event.booth'])
booth = self.env['event.booth'].create({
'name': 'Test Booth',
'booth_category_id': category.id,
'event_id': event.id,
'partner_id': customer.id,
})
self.assertEqual(booth.contact_name, customer.name)
self.assertEqual(booth.contact_email, customer_email)
self.assertEqual(booth.contact_phone, customer.phone)
self.assertFalse(booth.contact_mobile, 'Data has no mobile')
booth.write({
'contact_email': '"New Emails" <[email protected]',
'contact_phone': False,
})
self.assertEqual(booth.contact_email, '"New Emails" <[email protected]')
self.assertEqual(booth.contact_phone, False)
self.assertEqual(customer.email, customer_email, 'No sync from booth to partner')
# partial update of contact fields: we may end up with mixed contact information, is it a good idea ?
booth.write({'partner_id': self.event_customer2.id})
self.assertEqual(booth.contact_name, customer.name)
self.assertEqual(booth.contact_email, '"New Emails" <[email protected]')
self.assertEqual(booth.contact_phone, self.event_customer2.phone)
| 43.947368 | 2,505 |
336 |
py
|
PYTHON
|
15.0
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class EventType(models.Model):
_inherit = 'event.type'
event_type_booth_ids = fields.One2many(
'event.type.booth', 'event_type_id',
string='Booths', readonly=False, store=True)
| 28 | 336 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.