diff --git a/connector_extension/README.rst b/connector_extension/README.rst new file mode 100644 index 000000000..6f13bdee4 --- /dev/null +++ b/connector_extension/README.rst @@ -0,0 +1,86 @@ +=================== +Connector Extension +=================== + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:39b64c10f88d1224bc1e05fc967b018cce54a165a66cc8fd65dbf1c03f196c2d + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png + :target: https://odoo-community.org/page/development-status + :alt: Beta +.. |badge2| image:: https://img.shields.io/badge/licence-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-NuoBiT%2Fodoo--addons-lightgray.png?logo=github + :target: https://github.com/NuoBiT/odoo-addons/tree/18.0/connector_extension + :alt: NuoBiT/odoo-addons + +|badge1| |badge2| |badge3| + +The "connector_extension" module is an add-on for the Odoo ERP system +that enhances the functionality of the base "connector" module. This +extension provides additional features, tools, and integrations, making +it easier for developers to create, manage, and maintain connections +between Odoo and various third-party systems, APIs, or services. + +The module aims to simplify the connector development process by +providing a robust and flexible framework. The "connector_extension" +module allows developers to focus on implementing specific business +logic and requirements, while the extension handles common tasks. + +- Extended connector framework: The module extends the base connector + framework by providing new classes, methods, and utilities for easier + integration with third-party systems. +- Reusable components: Pre-built components for handling common tasks + such error handling, and data synchronization. +- Enhanced data mapping: Advanced data mapping capabilities to transform + and adapt data between Odoo and external systems seamlessly. +- Scalability and performance improvements: Optimized for handling large + datasets and concurrent connections, ensuring smooth and efficient + data synchronization. +- Linking with existing records: Enables linking with existing records, + using alternate keys, which allows for easier data management and + reduced redundancy. + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* NuoBiT Solutions SL + +Contributors +------------ + +- `NuoBiT `__: + + - Kilian Niubo kniubo@nuobit.com + - Eric Antones eantones@nuobit.com + +Maintainers +----------- + +This module is part of the `NuoBiT/odoo-addons `_ project on GitHub. + +You are welcome to contribute. diff --git a/connector_extension/__init__.py b/connector_extension/__init__.py new file mode 100644 index 000000000..f24d3e242 --- /dev/null +++ b/connector_extension/__init__.py @@ -0,0 +1,2 @@ +from . import components +from . import models diff --git a/connector_extension/__manifest__.py b/connector_extension/__manifest__.py new file mode 100644 index 000000000..bf281ed4e --- /dev/null +++ b/connector_extension/__manifest__.py @@ -0,0 +1,17 @@ +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright 2025 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +{ + "name": "Connector Extension", + "summary": "This module extends the connector module", + "version": "18.0.1.0.0", + "author": "NuoBiT Solutions SL", + "license": "LGPL-3", + "category": "Connector", + "website": "https://github.com/NuoBiT/odoo-addons", + "depends": ["connector"], + # The dependency on queue_context is necessary so that + # when a job calls another job, the company is not lost +} diff --git a/connector_extension/common/tools.py b/connector_extension/common/tools.py new file mode 100644 index 000000000..f77b924e8 --- /dev/null +++ b/connector_extension/common/tools.py @@ -0,0 +1,168 @@ +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2025 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +import datetime +import hashlib +import re +import unicodedata + +from odoo import _ +from odoo.exceptions import ValidationError + + +def list2hash(_list): + _hash = hashlib.sha256() + for e in _list: + if isinstance(e, int): + e9 = str(e) + elif isinstance(e, str): + e9 = e + elif isinstance(e, float): + e9 = str(e) + elif e is None: + e9 = "" + else: + raise Exception(f"Unexpected type for a key: type {type(e)}") + _hash.update(e9.encode("utf8")) + return _hash.hexdigest() + + +def domain_to_normalized_dict(self, domain): + """Convert, if possible, standard Odoo domain to a dictionary. + To do so it is necessary to convert all operators to + equal '=' operator. + """ + res = {} + for elem in domain: + if len(elem) != 3: + raise ValidationError(_("Wrong domain clause format %s") % elem) + field, op, value = elem + if op == "=": + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(value) + elif op == "!=": + if not isinstance(value, bool): + raise ValidationError( + _("Not equal operation not supported for non boolean fields") + ) + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(not value) + elif op == "in": + if not isinstance(value, (tuple | list)): + raise ValidationError( + _( + "Operator '%(OPERATOR)s' only supports " + "tuples or lists, not %(TYPES)s" + ) + % { + "OPERATOR": op, + "TYPES": type(value), + } + ) + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(value) + elif op in (">", ">=", "<", "<="): + if not isinstance(value, (datetime.date | datetime.datetime | int)): + raise ValidationError( + _("Type %(value_type)s not supported for operator %(operator)s") + % {"value_type": type(value), "operator": op} + ) + if op in (">", "<"): + adj = 1 + if isinstance(value, (datetime.date | datetime.datetime)): + adj = datetime.timedelta(days=adj) + if op == "<": + op, value = "<=", value - adj + else: + op, value = ">=", value + adj + + res[field] = self._normalize_value(value) + else: + raise ValidationError(_("Operator %s not supported") % op) + + return res + + +def convert_item_to_json(item, ct, namespace): + jitem = {} + for path, func, key, multi in ct: + if key in jitem: + raise ValidationError(_("Key %s already exists") % key) + value = item.xpath(path, namespaces=namespace) + if not value: + jitem[key] = None + else: + if multi: + jitem[key] = func(value) + else: + if len(value) > 1: + raise ValidationError(_("Multiple values found for '%s'") % path) + else: + jitem[key] = func(value[0]) + return jitem + + +def convert_to_json(data, ct, namespace): + res = [] + for d in data: + res.append(convert_item_to_json(d, ct, namespace)) + return res + + +def slugify(value): + if not value: + return None + return ( + unicodedata.normalize("NFKD", value) + .encode("ascii", "ignore") + .decode("ascii") + .lower() + .replace(" ", "") + ) + + +def trim_domain(domain): + """ + Takes an Odoo-style domain (a Python list of clauses) and returns a new domain where + any string values in the third position of 3‑element clauses have leading/trailing + whitespace removed. + """ + trimmed_domain = [] + for d in domain: + if isinstance(d, (list | tuple)): + if len(d) == 3 and isinstance(d[2], str): + trimmed_domain.append((d[0], d[1], d[2].strip())) + elif len(d) == 3 and isinstance(d[2], (list | tuple)): + trimmed_value = [ + value.strip() if isinstance(value, str) else value for value in d[2] + ] + trimmed_domain.append((d[0], d[1], trimmed_value)) + else: + trimmed_domain.append(d) + else: + raise Exception(f"Unexpected domain format: {d}") + return trimmed_domain + + +def color_rgb2hex(data): + def conv_rgb(match): + rgb_hex_l = [] + groups = match.groups() + for value, percent in zip(groups[0::2], groups[1::2], strict=True): + if percent: + hex_value = round(float(value) * 255 / 100) + else: + hex_value = int(value) + rgb_hex_l.append(f"{hex_value:02X}") + return f'#{"".join(rgb_hex_l)}' + + return re.sub( + r"rgb\( *([0-9.]+) *(%?) *, *([0-9.]+) *(%?) *, *([0-9.]+) *(%?) *\)", + conv_rgb, + data, + flags=re.IGNORECASE, + ) diff --git a/connector_extension/components/__init__.py b/connector_extension/components/__init__.py new file mode 100644 index 000000000..ebf77cde3 --- /dev/null +++ b/connector_extension/components/__init__.py @@ -0,0 +1,9 @@ +from . import adapter +from . import binder +from . import synchronizer +from . import export_deleter +from . import exporter +from . import importer +from . import import_deleter +from . import listener +from . import mapper diff --git a/connector_extension/components/adapter.py b/connector_extension/components/adapter.py new file mode 100644 index 000000000..1c708f8d8 --- /dev/null +++ b/connector_extension/components/adapter.py @@ -0,0 +1,270 @@ +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2025 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +import datetime + +from odoo import _, api +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent + + +class ConnectorExtensionAdapterCRUD(AbstractComponent): + _name = "connector.extension.adapter.crud" + _inherit = "base.backend.adapter.crud" + + _date_format = "%Y-%m-%d" + _datetime_format = "%Y-%m-%dT%H:%M:%SZ" + + _usage = "adapter" + + def _prepare_field_type(self, field_data): + default_values = {} + fields = [] + for m in field_data: + if isinstance(m, tuple): + fields.append(m[0]) + default_values[m[0]] = m[1] + else: + fields.append(m) + + return fields, default_values + + def _prepare_parameters(self, values, mandatory, optional=None): + if not optional: + optional = [] + + mandatory, mandatory_default_values = self._prepare_field_type(mandatory) + optional, default_values = self._prepare_field_type(optional) + default_values.update(mandatory_default_values) + missing_fields = list(set(mandatory) - set(values)) + if missing_fields: + raise ValidationError(_("Missing mandatory fields %s") % missing_fields) + mandatory_values = {x: values[x] for x in mandatory} + optional_values = {} + found = False + for o in optional: + if not found and o in values: + found = True + if found: + optional_values[o] = values.get(o, default_values.get(o)) + return {**mandatory_values, **optional_values} + + def _prepare_results(self, result): + return result + + @api.model + def _filter(self, values, domain=None): # noqa: C901 + # TODO support for domains with 'or' clauses + # TODO refactor and optimize + if not domain: + return values + + values_filtered = [] + for record in values: + for elem in domain: + k, op, v = elem + if k not in record: + raise ValidationError(_("Key %s does not exist") % k) + if op == "=": + if record[k] != v: + break + elif op == "!=": + if record[k] == v: + break + elif op == ">": + if record[k] <= v: + break + elif op == "<": + if record[k] >= v: + break + elif op == ">=": + if record[k] < v: + break + elif op == "<=": + if record[k] > v: + break + elif op == "like": + if record[k] not in v: + break + elif op == "not like": + if record[k] in v: + break + elif op == "in": + if not isinstance(v, (tuple | list)): + raise ValidationError( + _("The value %s should be a list or tuple") % v + ) + if record[k] not in v: + break + elif op == "not in": + if not isinstance(v, (tuple | list)): + raise ValidationError( + _("The value %s should be a list or tuple") % v + ) + if record[k] in v: + break + else: + raise NotImplementedError(f"Operator {op} not supported") + else: + values_filtered.append(record) + return values_filtered + + def chunks(self, lst, n): + """Yield successive n-sized chunks from lst.""" + for i in range(0, len(lst), n): + yield lst[i : i + n] + + def _filter_by_hash(self, data, hash_field): + indexed_data = {x["Hash"]: x for x in data} + odoo_hashes = set( + self.model.search( + [ + ("backend_id", "=", self.backend_record.id), + ] + ).mapped(hash_field) + ) + changed_hashes = set(indexed_data.keys()) - odoo_hashes + return [indexed_data[x] for x in changed_hashes] + + def _normalize_value(self, value): + if isinstance(value, datetime.datetime): + value = value.strftime(self._datetime_format) + elif isinstance(value, datetime.date): + value = value.strftime(self._date_format) + elif isinstance(value, (int | str | list | tuple | bool)): + pass + elif value is None: + pass + else: + raise ValidationError(_("Type '%s' not supported") % type(value)) + return value + + def _domain_to_normalized_dict(self, domain): + """Convert, if possible, standard Odoo domain to a dictionary. + To do so it is necessary to convert all operators to + equal '=' operator. + """ + res = {} + for elem in domain: + if len(elem) != 3: + raise ValidationError(_("Wrong domain clause format %s") % elem) + field, op, value = elem + if op == "=": + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(value) + elif op == "!=": + if not isinstance(value, bool): + raise ValidationError( + _("Not equal operation not supported for non boolean fields") + ) + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(not value) + elif op == "in": + if not isinstance(value, (tuple | list)): + raise ValidationError( + _( + "Operator '%(OPERATOR)s' only supports tuples or lists, " + "not %(TYPE)s" + ) + % { + "OPERATOR": op, + "TYPE": type(value), + } + ) + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(value) + elif op in (">", ">=", "<", "<="): + if not isinstance(value, (datetime.date | datetime.datetime | int)): + raise ValidationError( + _("Type %(TYPE)s not supported for operator %(OPERATOR)s") + % { + "TYPE": type(value), + "OPERATOR": op, + } + ) + if op in (">", "<"): + adj = 1 + if isinstance(value, (datetime.date | datetime.datetime)): + adj = datetime.timedelta(days=adj) + if op == "<": + op, value = "<=", value - adj + else: + op, value = ">=", value + adj + + res[field] = self._normalize_value(value) + else: + raise ValidationError(_("Operator %s not supported") % op) + + return res + + def _normalized_dict_to_domain(self, normalized_dict): + return [(k, "=", v) for k, v in normalized_dict.items()] + + def _extract_domain_clauses(self, domain, fields): + if not isinstance(fields, (tuple | list)): + fields = [fields] + extracted, rest = [], [] + for clause in domain: + tgt = ( + extracted + if clause[0] in fields and clause[1] not in ["in", "not in"] + else rest + ) + tgt.append(clause) + return extracted, rest + + def _convert_format(self, elem, mapper, path=""): + if isinstance(elem, dict): + for k, v in elem.items(): + current_path = f"{path}/{k}" + if isinstance(v, (tuple | list | set | dict)): + if isinstance(v, dict): + if current_path in mapper: + v2 = {} + for k1, v1 in v.items(): + new_value = mapper[current_path](k1) + v2[new_value] = v1 + v = elem[k] = v2 + self._convert_format(v, mapper, current_path) + elif v is None: + pass + else: + if current_path in mapper: + elem[k] = mapper[current_path](v) + elif isinstance(elem, (tuple | list | set)): + for ch in elem: + self._convert_format(ch, mapper, path) + + def _convert_format_domain(self, domain): + res = [] + for k, op, v in domain: + if isinstance(v, datetime.datetime): + v = v.strftime(self._datetime_format) + elif isinstance(v, datetime.date): + v = v.strftime(self._date_format) + elif isinstance(v, (int | str | list | tuple | bool)): + pass + else: + raise Exception(f"Type {type(v)} not supported") + res.append((k, op, v)) + return res + + def _check_uniq(self, data, id_fields): + uniq = set() + for rec in data: + id_t = tuple([rec[f] for f in id_fields]) + if id_t in uniq: + raise ValidationError( + _("Unexpected error: ID duplicated: %(ID_FIELDS)s - %(ID_T)s") + % { + "ID_FIELDS": id_fields, + "ID_T": id_t, + } + ) + uniq.add(id_t) diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py new file mode 100644 index 000000000..72f41c9a5 --- /dev/null +++ b/connector_extension/components/binder.py @@ -0,0 +1,578 @@ +# Copyright 2013-2017 Camptocamp SA +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2026 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +""" +Binders +======= + +Binders are components that know how to find the external ID for an +Odoo ID, how to find the Odoo ID for an external ID and how to +create the binding between them. + +""" + +import hashlib +import logging +from contextlib import contextmanager + +import psycopg2 + +import odoo +from odoo import _, fields, models +from odoo.exceptions import ValidationError +from odoo.osv import expression + +from odoo.addons.component.core import AbstractComponent +from odoo.addons.component.exception import NoComponentError +from odoo.addons.connector.exception import InvalidDataError +from odoo.addons.queue_job.exception import RetryableJobError + +_logger = logging.getLogger(__name__) + + +class ConnectorExtensionBinderComposite(AbstractComponent): + """The same as Binder but allowing composite external keys""" + + _name = "connector.extension.binder" + _inherit = "base.binder" + + _internal_field = "internal_id" + + _internal_alt_field = "internal_alt_id" + _external_alt_field = "external_alt_id" + + _default_binding_field = None + + def idhash(self, external_id): + odoo_hash = hashlib.sha256() + for e in external_id: + if isinstance(e, int): + e9 = str(e) + if int(e9) != e: + raise Exception("Unexpected") + elif isinstance(e, str): + e9 = e + elif e is None: + pass + else: + raise Exception(f"Unexpected type for a key: type {type(e)}") + odoo_hash.update(e9.encode("utf8")) + return odoo_hash.hexdigest() + + def get_id_fields(self, in_field=True, alt_field=False): + if in_field: + fields = self._internal_alt_field if alt_field else self._internal_field + else: + fields = self._external_alt_field if alt_field else self._external_field + if not isinstance(fields, (tuple | list)): + fields = [fields] + fields_l = [] + for f in fields: + if hasattr(self, f): + fields = getattr(self, f) + if isinstance(fields, (tuple | list)): + fields_l.extend(fields) + else: + fields_l.append(fields) + else: + raise ValidationError( + _("Id field %(FIELD)s is not defined in model %(MODEL)s") + % { + "FIELD": f, + "MODEL": self._name, + } + ) + return fields_l + + def id2dict(self, _id, in_field=True, alt_field=False): + """Return a dict with the internal or external fields and their values + :param _id: Values to put on internal or external fields + :param in_field: with True value, _internal_field defined in binder are used. + With this parameter False, _external_field will be used. + """ + if _id: + if not isinstance(_id, (tuple | list)): + _id = [_id] + fields = self.get_id_fields(in_field=in_field, alt_field=alt_field) + return dict(zip(fields, _id, strict=True)) + else: + return None + + # This Function returns a dict with the external ids from a "dirty" dict + def dict2id2dict(self, _dict, in_field=True, alt_field=False): + """Giving a dict, return the a dict with internal or external ids + :param _dict: Dict to extract internal or external fields + :param in_field: with True value, _internal_field defined in binder are used. + With this parameter False, _external_field will be used. + :param alt_field: with True value, + alternative id fields defined in binder are used. + """ + return self.id2dict( + self.dict2id(_dict, in_field=in_field, alt_field=alt_field), + in_field=in_field, + alt_field=alt_field, + ) + + def dict2id( + self, + _dict, + in_field=True, + alt_field=False, + unwrap=False, + raise_on_not_found=False, + ): + """Giving a dict, return the values of the internal or external fields + :param _dict: Dict (usually binder) to extract internal or external fields + :param in_field: with True value, _internal_field defined in binder are used. + With this parameter False, _external_field will be used. + :param alt_field: with True value, + alternative id fields defined in binder are used. + :param unwrap: if True, return the first value of the composite id + :param raise_on_not_found: if True, raise ValidationError + if the id is not complete + :return: a list with the values of the internal or external fields + """ + fields = self.get_id_fields(in_field=in_field, alt_field=alt_field) + res = [] + for f in fields: + f_splitted = f.split(".") + if f_splitted[0] in _dict or _dict.get(f_splitted[0]) is not None: + val = _dict[f_splitted[0]] + else: + if raise_on_not_found: + raise Exception( + _( + "The external_id with fields %(fields)s " + "has not found for record: %(record)s" + ) + % { + "fields": fields, + "record": _dict, + } + ) + return None + if len(f_splitted) == 2: + if isinstance(val, models.BaseModel): + val = val[f_splitted[1]] + if len(f_splitted) > 2: + raise NotImplementedError(_("Multiple dot notation is not supported")) + res.append(val) + if unwrap: + if len(res) == 1: + return res[0] + else: + raise ValidationError(_("It's not possible to unwrap a composite id")) + return res + + def is_complete_id(self, _id, in_field=True): + fields = in_field and self.internal_id or self.external_id + if not isinstance(fields, (tuple | list)): + fields = [fields] + if not isinstance(_id, (tuple | list)): + _id = [_id] + _id = list(filter(None, _id)) + return len(_id) == len(fields) + + @contextmanager + def _retry_unique_violation(self): + """Context manager: catch Unique constraint error and retry the + job later. + + When we execute several jobs workers concurrently, it happens + that 2 jobs are creating the same record at the same time (binding + record created by :meth:`_export_dependency`), resulting in: + + IntegrityError: duplicate key value violates unique + constraint "my_backend_product_template_odoo_uniq" + DETAIL: Key (backend_id, odoo_id)=(1, 4851) already exists. + + In that case, we'll retry the import just later. + + .. warning:: The unique constraint must be created on the + binding record to prevent 2 bindings to be created + for the same External record. + """ + try: + yield + except psycopg2.IntegrityError as err: + if err.pgcode == psycopg2.errorcodes.UNIQUE_VIOLATION: + raise RetryableJobError( + f"A database error caused the failure of the job: \n" + f"{err}\n\n" + "Likely due to 2 concurrent jobs wanting to create " + "the same record. The job will be retried later." + ) from err + else: + raise + + def _is_wrapped(self, obj): + # alternate way to detect a binding + # return self.model._name == obj._name + try: + obj._fields[self._odoo_field] + except KeyError: + return False + return True + + def to_internal(self, external_id, unwrap=False): + """Give the Odoo recordset for an external ID + + :param external_id: external ID for which we want + the Odoo ID + :param unwrap: if True, returns the normal record + else return the binding record + :return: a recordset, depending on the value of unwrap, + or an empty recordset if the external_id is not mapped + :rtype: recordset + """ + context = self.env.context + domain = [(self._backend_field, "=", self.backend_record.id)] + for key, value in self.id2dict(external_id, in_field=True).items(): + domain.append((key, "=", value)) + + bindings = self.model.with_context(active_test=False).search(domain) + if not bindings: + if unwrap: + return self.model.browse()[self._odoo_field] + return self.model.browse() + bindings.ensure_one() + if unwrap: + bindings = bindings[self._odoo_field] + bindings = bindings.with_context(**context) + return bindings + + # TODO: Deprecated in favor of generic binding.to_external + def to_external(self, binding, wrap=True): + """Give the external ID for an Odoo binding ID + + :param binding: Odoo binding for which we want the external id + :param wrap: if False, binding is a normal record, the + method will search the corresponding binding and return + the external id of the binding + :return: external ID of the record + """ + if not wrap: + binding = self.wrap_record(binding) + if not binding: + return None + return self.dict2id(binding, in_field=True) or None + + def bind(self, external_id, binding): + raise ValidationError( + _("This method is deprecated. Use bind_export or bind_import instead") + ) + + def bind_import(self, external_data, values, sync_date, for_create=False): + values.update( + { + self._sync_date_field: sync_date, + **self._additional_internal_binding_fields(external_data), + } + ) + if for_create: + external_id = self.dict2id(external_data, in_field=False) + values.update( + { + self._backend_field: self.backend_record.id, + **self.id2dict(external_id, in_field=True), + } + ) + self.env.cr.commit() # pylint: disable=E8102 + + def _prepare_binding_export_values(self, relation, external_data): + external_id = self.dict2id(external_data, in_field=False) + if not external_id: + raise ValidationError( + _( + "Cannot bind export record without external id. " + "The external id %(fields)s is missing in the data: %(data)s", + fields=self.get_id_fields(in_field=False), + data=external_data, + ) + ) + return { + self._backend_field: self.backend_record.id, + self._odoo_field: relation.id, + self._sync_date_field: fields.Datetime.now(), + **self.id2dict(external_id, in_field=True), + **self._additional_external_binding_fields(external_data, relation), + } + + def bind_export(self, external_data, relation): + """Create the link between an external ID and an Odoo ID + + :param external_id: external id to bind + :param relation: Odoo record to bind + """ + assert ( + external_data and relation + ), f"external_data or relation missing, got: {external_data}, {relation}" + with self._retry_unique_violation(): + values = self._prepare_binding_export_values(relation, external_data) + binding = self.model.with_context(connector_no_export=True).create(values) + # Eager commit to avoid having 2 jobs + # exporting at the same time. The constraint + # will pop if an other job already created + # the same binding. It will be caught and + # raise a RetryableJobError. + if not odoo.tools.config["test_enable"]: + self.env.cr.commit() # pylint: disable=E8102 + return binding + + def _additional_external_binding_fields(self, external_data, relation): + return {} + + def is_id_null(self, _id): + if not isinstance(_id, (list | tuple)): + _id = [_id] + for value in _id: + if value is None: + return True + return False + + def _get_internal_record_domain(self, values): + return [(k, "=", v) for k, v in values.items()] + + def _check_domain(self, domain): + for field, _op, value in domain: + if isinstance(value, (list | tuple)): + for e in value: + if isinstance(e, (tuple | list | set | dict)): + raise ValidationError( + _( + "Wrong domain value type '%(TYPE)s' on value " + "'%(VALUE)s' of field '%(FIELD)s'" + ) + % { + "TYPE": type(e), + "VALUE": e, + "FIELD": field, + } + ) + + def _get_internal_record_alt(self, values): + model_name = self.unwrap_model() + domain = self._get_internal_record_domain(values) + self._check_domain(domain) + return self.env[model_name].search(domain) + + def get_binding_domain(self, relation): + return [] + + def wrap_record(self, relation): + """Get the binding + + :param relation: Odoo real record for which we want to get its binding + :return: binding corresponding to the real record or + empty recordset if the record has no binding + """ + relation.ensure_one() + if self._is_wrapped(relation): + raise ValidationError( + _( + "The object '%s' is already wrapped, " + "it's already a binding object. " + "You can only wrap real objects" + ) + % relation._name + ) + binding = self.model.with_context(active_test=False).search( + expression.AND( + [ + [ + (self._odoo_field, "=", relation.id), + (self._backend_field, "=", self.backend_record.id), + ], + self.get_binding_domain(relation), + ] + ) + ) + if len(binding) > 1: + raise ValidationError(_("More than one binding found")) + return binding + + def _to_record_from_external_key(self, map_record): + """ + :param map_record: + :return: binding with alternate external key + """ + model_name = self.unwrap_model() + internal_alt_id = getattr(self, self._internal_alt_field, None) + if internal_alt_id: + if isinstance(internal_alt_id, str): + internal_alt_id = [internal_alt_id] + all_values = map_record.values(for_create=True, binding=self.model) + if any([x not in all_values for x in internal_alt_id]): + raise InvalidDataError( + f"The alternative id {internal_alt_id} must exist on mapper" + ) + id_values = {x: all_values[x] for x in internal_alt_id} + record = self._get_internal_record_alt(id_values) + if len(record) > 1: + raise InvalidDataError( + f"More than one '{model_name}' found with id {id_values}: " + f"{record.ids} " + f"The alternate internal id field '{internal_alt_id}' is not unique" + ) + return record + return self.env[model_name] + + def to_binding_from_external_key(self, map_record, sync_date): + """ + :param map_record: + :return: binding with alternate external key + """ + record = self._to_record_from_external_key(map_record) + if record: + binding = self.wrap_record(record) + if not binding: + binding_only_fields = set(binding._fields) - set(record._fields) + update_values = map_record.values() + values = { + k: update_values[k] + for k in binding_only_fields & set(update_values) + } + if self._odoo_field in values: + if values[self._odoo_field] != record.id: + raise InvalidDataError( + "The id found on the mapper ('%i') " + "is not the one expected ('%i')" + % (values[self._odoo_field], record.id) + ) + else: + values[self._odoo_field] = record.id + self.bind_import(map_record.source, values, sync_date, for_create=True) + importer = self.component(usage="record.direct.importer") + binding = importer._create(values) + _logger.debug("%d linked from Backend", binding) + return binding + return self.model + + def _additional_internal_binding_fields(self, external_data): + return {} + + def _get_external_record_domain(self, relation, values): + return [(k, "=", v) for k, v in values.items()] + + def _get_external_record_alt(self, relation, id_values): + domain = self._get_external_record_domain(relation, id_values) + if domain: + adapter = self.component(usage="adapter") + res = adapter.search_read(domain) + if res: + if len(res) > 1: + raise InvalidDataError( + f"More than one external records found. " + f"The alternate external id field '{id_values}' is not " + f"unique in the backend" + ) + return res[0] + return {} + + def to_binding_from_internal_key(self, relation): + """ + Given an odoo object (not binding object) without binding related + :param relation: odoo object, not a binding and without binding + :return: binding + """ + export_mapper = self.component(usage="export.mapper") + mapper_external_data = export_mapper.map_record(relation) + ext_alt_id = getattr(self, self._external_alt_field, None) + if not ext_alt_id: + id_values = {} + else: + if isinstance(ext_alt_id, str): + ext_alt_id = [ext_alt_id] + + id_fields = mapper_external_data._mapper.get_target_fields( + mapper_external_data, fields=ext_alt_id + ) + if not id_fields: + raise ValidationError( + _("External alternative id '%s' not found in export mapper") + % (ext_alt_id,) + ) + id_values = mapper_external_data.values( + for_create=True, + fields=id_fields, + binding=self.model, + ignore_required_fields=True, + ) + # TODO: check if we can put this in a hook + external_alt_id = self.dict2id(id_values, in_field=False, alt_field=True) + if self.is_id_null(external_alt_id): + return self.model + record = self._get_external_record_alt(relation, id_values) + if record: + external_id = self.dict2id(record, in_field=False) + binding = self.wrap_record(relation) + if binding: + current_external_id = self.to_external(binding) + if current_external_id != external_id: + raise InvalidDataError( + f"More than one external records found. " + f"The alternate external id field '{ext_alt_id}'" + f" is not unique in the backend" + ) + _logger.debug("%d already binded to Backend", binding) + else: + import_mapper_exists = True + try: + import_mapper = self.component(usage="import.mapper") + mapper_internal_data = import_mapper.map_record(record) + binding_ext_fields = mapper_internal_data._mapper.get_target_fields( + mapper_internal_data, fields=self.model._fields + ) + importer = self.component(usage="record.direct.importer") + importer.run( + external_id, + external_data=record, + external_fields=binding_ext_fields, + ) + binding = self.to_internal(external_id) + except NoComponentError: + import_mapper_exists = False + if not import_mapper_exists: + binding = self.bind_export(record, relation) + binding[self._sync_date_field] = fields.Datetime.now() + if not binding: + raise InvalidDataError( + f"The binding with external id {external_id} " + "not found and it should be" + ) + _logger.debug("%d linked to Backend", binding) + return binding + + return self.model + + def unwrap_binding(self, binding): + if not isinstance(binding, models.BaseModel): + if isinstance(binding, (tuple | list)): + odoo_object_ids = binding + elif isinstance(binding, int): + odoo_object_ids = [binding] + else: + raise ValidationError(_("Invalid binding type")) + binding = self.model.browse(odoo_object_ids) + return binding.mapped(self._odoo_field) + + def get_external_dict_ids(self, relation, check_external_id=True): + external_id = self.to_external(relation, wrap=False) + if check_external_id: + assert external_id, ( + f"Unexpected error on {relation._name}:" + "The backend id cannot be obtained." + "At this stage, the backend record should have " + "been already linked via " + "._export_dependencies. " + ) + return self.id2dict(external_id, in_field=False) + + +# TODO: naming the methods more intuitively +# TODO: unify both methods, they have a lot of common code +# TODO: extract parts to smaller and common methods reused by the main methods +# TODO: use .new instead of dicts on to_binding_from_internal_key diff --git a/connector_extension/components/export_deleter.py b/connector_extension/components/export_deleter.py new file mode 100644 index 000000000..54e95a0b3 --- /dev/null +++ b/connector_extension/components/export_deleter.py @@ -0,0 +1,69 @@ +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2026 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +import logging + +from odoo.addons.component.core import AbstractComponent + +_logger = logging.getLogger(__name__) + + +class ConnectorExtensionDirectExportDeleter(AbstractComponent): + """Generic Synchronizer for delete data from Odoo to a backend""" + + _name = "connector.extension.record.direct.export.deleter" + _inherit = ["base.deleter", "connector.extension.synchronizer"] + + _usage = "record.direct.export.deleter" + + def _delete(self, external_id, binding): + return self.backend_adapter.delete(external_id) + + def run(self, relation): + binding = self.binder_for().wrap_record(relation) + if binding: + external_id = binding.to_external() + return self._delete(external_id, binding) + return True + + +class ConnectorExtensionBatchExportDeleter(AbstractComponent): + """Generic Synchronizer for importing data from backend to Odoo""" + + _name = "connector.extension.batch.export.deleter" + _inherit = ["base.importer", "connector.extension.synchronizer"] + + def _delete_record(self, external_id): + """Delete a record directly or delay the delete of the record. + + Method to implement in sub-classes. + """ + raise NotImplementedError + + +class ConnectorExtensionBatchDirectExportDeleter(AbstractComponent): + """Import the records directly, without delaying the jobs.""" + + _name = "connector.extension.batch.direct.export.deleter" + _inherit = "connector.extension.batch.export.deleter" + + _usage = "batch.direct.export.deleter" + + def _delete_record(self, external_id): + """Delete the record directly""" + self.model.export_delete_record(external_id) + + +class ConnectorExtensionBatchDelayedExportDeleter(AbstractComponent): + """Delay import of the records""" + + _name = "connector.extension.batch.delayed.export.deleter" + _inherit = "connector.extension.batch.export.deleter" + + _usage = "batch.delayed.export.deleter" + + def _delete_record(self, external_id, job_options=None): + """Delay the delete of the records""" + delayable = self.model.with_delay(**job_options or {}) + delayable.export_delete_record(external_id) diff --git a/connector_extension/components/exporter.py b/connector_extension/components/exporter.py new file mode 100644 index 000000000..234602d72 --- /dev/null +++ b/connector_extension/components/exporter.py @@ -0,0 +1,316 @@ +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2025 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +import logging +from contextlib import contextmanager + +import psycopg2 + +from odoo import _, fields + +from odoo.addons.component.core import AbstractComponent +from odoo.addons.connector.exception import RetryableJobError + +_logger = logging.getLogger(__name__) + + +class ConnectorExtensionDirectExporter(AbstractComponent): + """Generic Synchronizer for exporting data from Odoo to a backend""" + + _name = "connector.extension.record.direct.exporter" + _inherit = ["base.exporter", "connector.extension.synchronizer"] + + _usage = "record.direct.exporter" + + def _mapper_options(self, binding): + return {"binding": binding} + + def run(self, relation, always=True, internal_fields=None): + """Run the synchronization + + :param binding: binding record to export + """ + now_fmt = fields.Datetime.now() + result = None + # get binding from real record + binding = self.binder_for().wrap_record(relation) + + # if not binding, try to link to existing external record with + # the same alternate key and create/update binding + if not binding: + binding = ( + self.binder_for().to_binding_from_internal_key(relation) or binding + ) + + if not binding: + internal_fields = None # should be created with all the fields + if self._has_to_skip(relation): + return _("Nothing to export") + + # export the missing linked resources + self._export_dependencies(relation) + + if always or not binding: + # prevent other jobs to export the same record + # will be released on commit (or rollback) + self._lock(relation) + + map_record = self.mapper.map_record(relation) + + # passing info to the mapper + opts = self._mapper_options(binding) + if binding: + values = self._update_data(map_record, fields=internal_fields, **opts) + if values: + external_id = self.binder_for().dict2id(binding, in_field=True) + result = self._update(external_id, values) + else: + values = self._create_data(map_record, fields=internal_fields, **opts) + if values: + external_data = self._create(values) + binding = self.binder_for().bind_export(external_data, relation) + if not values: + result = _("Nothing to export") + if not result: + result = _("Record exported with ID %s on Backend.") % "external_id" + self._after_export(binding) + binding[self.binder_for()._sync_date_field] = now_fmt + return result + + def _after_export(self, binding): + """Can do several actions after exporting a record on the backend""" + + def _get_sql_lock(self, record): + return f"SELECT id FROM {record._table} WHERE ID = %s FOR UPDATE NOWAIT" + + def _lock(self, record): + """Lock the binding record. + + Lock the binding record so we are sure that only one export + job is running for this record if concurrent jobs have to export the + same record. + + When concurrent jobs try to export the same record, the first one + will lock and proceed, the others will fail to lock and will be + retried later. + + This behavior works also when the export becomes multilevel + with :meth:`_export_dependencies`. Each level will set its own lock + on the binding record it has to export. + + """ + sql = self._get_sql_lock(record) + try: + self.env.cr.execute(sql, (record.id,), log_exceptions=False) + except psycopg2.OperationalError as e: + _logger.info( + "A concurrent job is already exporting the same " + "record (%s with id %s). Job delayed later.", + self.model._name, + record.id, + ) + raise RetryableJobError( + f"A concurrent job is already exporting the same record " + f"({self.model._name} with id {record.id}). " + f"The job will be retried later." + ) from e + + def _has_to_skip(self, relation): + """Return True if the export can be skipped""" + return False + + @contextmanager + def _retry_unique_violation(self): + """Context manager: catch Unique constraint error and retry the + job later. + + When we execute several jobs workers concurrently, it happens + that 2 jobs are creating the same record at the same time (binding + record created by :meth:`_export_dependency`), resulting in: + + IntegrityError: duplicate key value violates unique + constraint "my_backend_product_product_odoo_uniq" + DETAIL: Key (backend_id, odoo_id)=(1, 4851) already exists. + + In that case, we'll retry the import just later. + + .. warning:: The unique constraint must be created on the + for the same External record. + + """ + try: + yield + except psycopg2.IntegrityError as err: + if err.pgcode == psycopg2.errorcodes.UNIQUE_VIOLATION: + raise RetryableJobError( + f"A database error caused the failure of the job:\n" + f"{err}\n\n" + "Likely due to 2 concurrent jobs wanting to create " + "the same record. The job will be retried later." + ) from err + else: + raise + + def _export_dependency( + self, + relation, + binding_model, + component_usage="record.direct.exporter", + always=False, + ): + """ + Export a dependency. The exporter class is a subclass of + ``GenericExporter``. If a more precise class need to be defined, + it can be passed to the ``exporter_class`` keyword argument. + + .. warning:: a commit is done at the end of the export of each + dependency. The reason for that is that we pushed a record + on the backend and we absolutely have to keep its ID. + + So you *must* take care not to modify the Odoo + database during an export, excepted when writing + back the external ID or eventually to store + external data that we have to keep on this side. + + You should call this method only at the beginning + of the exporter synchronization, + in :meth:`~._export_dependencies`. + + :param relation: record to export if not already exported + :type relation: :py:class:`odoo.models.BaseModel` + :param binding_model: name of the binding model for the relation + :type binding_model: str | unicode + :param component_usage: 'usage' to look for to find the Component to + for the export, by default 'record.exporter' + :type exporter: str | unicode + :param binding_field: name of the one2many field on a normal + record that points to the binding record + (default: my_backend_bind_ids). + It is used only when the relation is not + a binding but is a normal record. + :type binding_field: str | unicode + :binding_extra_vals: In case we want to create a new binding + pass extra values for this binding + :type binding_extra_vals: dict + """ + if not always and relation.env.context.get("resync_export", False): + # This is an optimization to avoid exporting + # the same record multiple times on resync + if "resync_exported_dependencies" not in self.env.context: + self.env.context = { + **self.env.context, + "resync_exported_dependencies": {}, + } + + resync_model_ids = self.env.context["resync_exported_dependencies"] + model_name = relation._name + + if model_name not in resync_model_ids: + resync_model_ids[model_name] = [relation.id] + else: + if relation.id in resync_model_ids[model_name]: + return + resync_model_ids[model_name].append(relation.id) + always = True + + exporter = self.component(usage=component_usage, model_name=binding_model) + exporter.run(relation, always=always) + + def _export_dependencies(self, relation): + """Export the dependencies for the record""" + return + + def _validate_create_data(self, data): + """Check if the values to import are correct + + Pro-actively check before the ``Model.create`` if some fields + are missing or invalid + + Raise `InvalidDataError` + """ + return + + def _validate_update_data(self, data): + """Check if the values to import are correct + + Pro-actively check before the ``Model.update`` if some fields + are missing or invalid + + Raise `InvalidDataError` + """ + return + + def _create_data(self, map_record, fields=None, **kwargs): + """Get the data to pass to :py:meth:`_create`""" + return map_record.values(for_create=True, fields=fields, **kwargs) + + def _create(self, data): + """Create the External record""" + # special check on data before export + self._validate_create_data(data) + return self.backend_adapter.create(data) + + def _update_data(self, map_record, fields=None, **kwargs): + """Get the data to pass to :py:meth:`_update`""" + return map_record.values(fields=fields, **kwargs) + + def _update(self, external_id, data): + """Update an External record""" + # special check on data before export + self._validate_update_data(data) + return self.backend_adapter.write(external_id, data) + + +class ConnectorExtensionBatchExporter(AbstractComponent): + """Generic Synchronizer for importing data from backend to Odoo""" + + _name = "connector.extension.batch.exporter" + _inherit = ["base.exporter", "connector.extension.synchronizer"] + + _usage = "batch.exporter" + + def run(self, domain=None): + if not domain: + domain = [] + # Run the batch synchronization + relation_model = self.binder_for(self.model._name).unwrap_model() + for relation in ( + self.env[relation_model].with_context(active_test=False).search(domain) + ): + self._export_record(relation) + + def _export_record(self, external_id): + """Export a record directly or delay the export of the record. + + Method to implement in sub-classes. + """ + raise NotImplementedError + + +class ConnectorExtensionBatchDirectExporter(AbstractComponent): + """Import the records directly, without delaying the jobs.""" + + _name = "connector.extension.batch.direct.exporter" + _inherit = "connector.extension.batch.exporter" + + _usage = "batch.direct.exporter" + + def _export_record(self, relation): + """export the record directly""" + self.model.export_record(self.backend_record, relation) + + +class ConnectorExtensionBatchDelayedExporter(AbstractComponent): + """Delay import of the records""" + + _name = "connector.extension.batch.delayed.exporter" + _inherit = "connector.extension.batch.exporter" + + _usage = "batch.delayed.exporter" + + def _export_record(self, relation, job_options=None): + """Delay the export of the records""" + delayable = self.model.with_delay(**job_options or {}) + delayable.export_record(self.backend_record, relation) diff --git a/connector_extension/components/import_deleter.py b/connector_extension/components/import_deleter.py new file mode 100644 index 000000000..d10505de2 --- /dev/null +++ b/connector_extension/components/import_deleter.py @@ -0,0 +1,67 @@ +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2026 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +import logging + +from odoo.addons.component.core import AbstractComponent + +_logger = logging.getLogger(__name__) + + +class ConnectorExtensionDirectImportDeleter(AbstractComponent): + """Generic Synchronizer for delete data from Odoo to a backend""" + + _name = "connector.extension.record.direct.import.deleter" + _inherit = ["base.deleter", "connector.extension.synchronizer"] + + _usage = "record.direct.import.deleter" + + def run(self, external_id): + return self.backend_adapter.delete(external_id) + + def delete_record(self, external_id): + """Delete the external record""" + raise NotImplementedError + + +class ConnectorExtensionBatchImportDeleter(AbstractComponent): + """Generic Synchronizer for importing data from backend to Odoo""" + + _name = "connector.extension.batch.import.deleter" + _inherit = ["base.deleter", "connector.extension.synchronizer"] + + def _delete_record(self, external_id): + """Delete a record directly or delay the delete of the record. + + Method to implement in sub-classes. + """ + raise NotImplementedError + + +class ConnectorExtensionBatchDirectImportDeleter(AbstractComponent): + """Import the records directly, without delaying the jobs.""" + + _name = "connector.extension.batch.direct.import.deleter" + _inherit = "connector.extension.batch.import.deleter" + + _usage = "batch.direct.import.deleter" + + def _delete_record(self, external_id): + """Delete the record directly""" + self.model.delete_record(external_id) + + +class ConnectorExtensionBatchDelayedImportDeleter(AbstractComponent): + """Delay import of the records""" + + _name = "connector.extension.batch.delayed.import.deleter" + _inherit = "connector.extension.batch.import.deleter" + + _usage = "batch.delayed.import.deleter" + + def _delete_record(self, external_id, job_options=None): + """Delay the delete of the records""" + delayable = self.model.with_delay(**job_options or {}) + delayable.delete_record(external_id) diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py new file mode 100644 index 000000000..fef80339a --- /dev/null +++ b/connector_extension/components/importer.py @@ -0,0 +1,421 @@ +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2025 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +import logging +from contextlib import contextmanager + +import psycopg2 + +from odoo import _, fields +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent +from odoo.addons.connector.exception import IDMissingInBackend +from odoo.addons.queue_job.exception import FailedJobError, RetryableJobError + +_logger = logging.getLogger(__name__) + + +class ConnectorExtensionDirectImporter(AbstractComponent): + """Generic Synchronizer for importing data from backend to Odoo""" + + _name = "connector.extension.record.direct.importer" + _inherit = ["base.exporter", "connector.extension.synchronizer"] + + _usage = "record.direct.importer" + + @contextmanager + def _retry_unique_violation(self): + """Context manager: catch Unique constraint error and retry the + job later. + + When we execute several jobs workers concurrently, it happens + that 2 jobs are creating the same record at the same time (binding + record created by :meth:`_export_dependency`), resulting in: + + IntegrityError: duplicate key value violates unique + constraint "prestashop_product_template_openerp_uniq" + DETAIL: Key (backend_id, odoo_id)=(1, 4851) already exists. + + In that case, we'll retry the import just later. + + """ + try: + yield + except psycopg2.IntegrityError as e: + if e.pgcode == psycopg2.errorcodes.UNIQUE_VIOLATION: + raise RetryableJobError( + f"A database error caused the failure of the job:\n" + f"{e}\n\n" + "Likely due to 2 concurrent jobs wanting to create " + "the same record. The job will be retried later." + ) from e + else: + raise + + def _import_dependency( + self, + external_id, + binding_model, + sync_date, + external_data=None, + importer=None, + adapter=None, + always=False, + ): + """Import a dependency. + + The importer class is a class or subclass of + :class:`Importer`. A specific class can be defined. + + :param external_ids: id or id's of the related bindings to import + :param binding_model: name of the binding model for the relation + :type binding_model: str | unicode + :param importer_component: component to use for import + By default: 'importer' + :type importer_component: Component + :param adapter_component: component to use for access to backend + By default: 'backend.adapter' + :type adapter_component: Component + :param always: if True, the record is updated even if it already + exists, note that it is still skipped if it has + not been modified on Backend since the last + update. When False, it will import it only when + it does not yet exist. + :type always: boolean + """ + if not external_id: + return + + if importer is None: + importer = self.component(usage=self._usage, model_name=binding_model) + + binder = self.binder_for(binding_model) + + if always or not binder.to_internal(external_id): + try: + importer.run(external_id, sync_date, external_data=external_data) + except FailedJobError: + _logger.info( + "Dependency import of %s(%s) has been ignored.", + binding_model._name, + external_id, + ) + + def _import_dependencies(self, external_data, sync_date, external_fields=None): + """Import the dependencies for the record + + Import of dependencies can be done manually or by calling + :meth:`_import_dependency` for each dependency. + """ + return + + def _after_import(self, binding): + return + + def _must_skip(self, binding): + """Hook called right after we read the data from the backend. + + If the method returns a message giving a reason for the + skipping, the import will be interrupted and the message + recorded in the job (if the import is called directly by the + job, not by dependencies). + + If it returns None, the import will continue normally. + + :returns: None | str | unicode + """ + return False + + # TODO: Convert the mandatory parameters to a dictionary + # or optional parameters (kwargs) + def _mapper_options(self, binding, sync_date): + return {"binding": binding, "sync_date": sync_date} + + def run(self, external_id, sync_date, external_data=None, external_fields=None): + if not external_data: + external_data = {} + lock_name = ( + f"import({self.backend_record._name}, {self.backend_record.id}," + f" {self.work.model_name}, {external_id})" + ) + # Keep a lock on this import until the transaction is committed + # The lock is kept since we have detected that the informations + # will be updated into Odoo + self.advisory_lock_or_retry(lock_name, retry_seconds=10) + if not external_data: + # read external data from Backend + external_data = self.backend_adapter.read(external_id) + if not external_data: + raise IDMissingInBackend( + _("Record with external_id '%s' does not exist in Backend") + % (external_id,) + ) + + # import the missing linked resources + self._import_dependencies(external_data, sync_date) + + # map_data + # this one knows how to convert backend data to odoo data + mapper = self.component(usage="import.mapper") + + # convert to odoo data + internal_data = mapper.map_record(external_data) + + binder = self.binder_for() + # find if the external id already exists in odoo + binding = binder.to_internal(external_id) + + # if binding not exists, try to link existing internal object + if not binding: + binding = binder.to_binding_from_external_key(internal_data, sync_date) + + # skip binding + skip = self._must_skip(binding) + if skip: + return skip + + # passing info to the mapper + opts = self._mapper_options(binding, sync_date) + + if external_fields != [] or external_fields is None: + # persist data + if binding: + # if exists, we update it + values = internal_data.values(fields=external_fields, **opts) + binder.bind_import(external_data, values, sync_date) + self._update(binding, values) + _logger.debug("%d updated from Backend %s", binding, external_id) + else: + # or we create it + values = internal_data.values( + for_create=True, fields=external_fields, **opts + ) + binder.bind_import(external_data, values, sync_date, for_create=True) + binding = self._create(values) + _logger.debug("%d created from Backend %s", binding, external_id) + + # last update + self._after_import(binding) + return True + + def _validate_update_data(self, data): + """Check if the values to import are correct + + Pro-actively check before the ``Model.update`` if some fields + are missing or invalid + + Raise `InvalidDataError` + """ + return + + def _update(self, binding, data): + """Update the Internal record""" + self._validate_update_data(data) + return binding.with_context(connector_no_export=True).write(data) + + def _validate_create_data(self, data): + """Check if the values to import are correct + + Pro-actively check before the ``Model.create`` if some fields + are missing or invalid + + Raise `InvalidDataError` + """ + return + + def _create(self, data): + """Create the Internal record""" + self._validate_create_data(data) + return self.model.with_context(connector_no_export=True).create(data) + + +class ConnectorExtensionBatchImporter(AbstractComponent): + """Generic Synchronizer for importing data from backend to Odoo""" + + _name = "connector.extension.batch.importer" + _inherit = ["base.importer", "connector.extension.synchronizer"] + + _usage = "batch.importer" + + def run(self, domain=None, use_data=True): + """Run the synchronization""" + if domain is None: + domain = [] + chunk_size = self.backend_record.chunk_size + if chunk_size > 0: + total_items = self.backend_adapter.get_total_items(domain=domain) + if total_items == 0: + return + offset = 0 + while total_items > 0: + if chunk_size > total_items: + chunk_size = total_items + self._import_chunk(domain, offset, chunk_size) + offset += chunk_size + total_items -= chunk_size + else: + sync_date = fields.Datetime.now() + if use_data: + data = self.backend_adapter.search_read(domain) + + def _import_record(x): + return self._import_record( + self.binder_for().dict2id( + x, in_field=False, raise_on_not_found=True + ), + sync_date, + external_data=x, + ) + + else: + data = self.backend_adapter.search(domain) + + def _import_record(x): + return self._import_record(x, sync_date) + + for d in data: + _import_record(d) + + def _import_chunk(self, domain, offset, chunk_size): + raise NotImplementedError + + def _import_batch(self, domain): + raise NotImplementedError + + def _import_record(self, external_id, sync_date, external_data=None): + """Import a record directly or delay the import of the record. + + Method to implement in sub-classes. + """ + raise NotImplementedError + + +class ConnectorExtensionBatchDirectImporter(AbstractComponent): + """Import the records directly, without delaying the jobs.""" + + _name = "connector.extension.batch.direct.importer" + _inherit = "connector.extension.batch.importer" + + _usage = "batch.direct.importer" + + def _import_chunk(self, domain, offset, chunk_size): + self.model.import_chunk(self.backend_record, domain, offset, chunk_size) + + def _import_batch(self, domain): + self.model.import_batch(self.backend_record, domain) + + def _import_record(self, external_id, sync_date, external_data=None): + """Import the record directly""" + if external_data is None: + external_data = {} + self.model.import_record( + self.backend_record, external_id, sync_date, external_data=external_data + ) + + +class ConnectorExtensionBatchDelayedImporter(AbstractComponent): + """Delay import of the records""" + + _name = "connector.extension.batch.delayed.importer" + _inherit = "connector.extension.batch.importer" + + _usage = "batch.delayed.importer" + + def _import_chunk(self, domain, offset, chunk_size): + delayable = self.model.with_delay() + delayable.import_chunk(self.backend_record, domain, offset, chunk_size) + + def _import_batch(self, domain): + delayable = self.model.with_delay() + delayable.import_batch(self.backend_record, domain) + + def _import_record( + self, external_id, sync_date, external_data=None, job_options=None + ): + """Delay the import of the records""" + if external_data is None: + external_data = {} + delayable = self.model.with_delay(**job_options or {}) + delayable.import_record( + self.backend_record, external_id, sync_date, external_data=external_data + ) + + +class ConnectorExtensionChunkImporter(AbstractComponent): + """The role of a ChunkImporter is to search for a list of + items to import, then it can either import them directly or delay + the import of each item separately. + """ + + _name = "connector.extension.chunk.importer" + _inherit = ["base.importer", "connector.extension.synchronizer"] + + def run(self, domain, offset, chunk_size): + """Run the synchronization""" + sync_date = fields.Datetime.now() + data, len_items = self.backend_adapter.search_read(domain, offset, chunk_size) + chunk_size -= len_items + offset += len_items + if chunk_size < 0: + raise ValidationError(_("Unexpected Error: Chunk_size is < 0")) + if chunk_size != 0: + self.get_batch_importer()._import_chunk(domain, offset, chunk_size) + for d in data: + external_id = self.binder_for().dict2id(d, in_field=False) + self._import_record(external_id, sync_date, external_data=d) + + def get_batch_importer(self): + raise NotImplementedError + + def _import_record(self, external_id, sync_date, external_data=None): + """Import a record directly or delay the import of the record. + + Method to implement in sub-classes. + """ + raise NotImplementedError + + +class ConnectorExtensionChunkDirectImporter(AbstractComponent): + """Import the records directly, without delaying the jobs.""" + + _name = "connector.extension.chunk.direct.importer" + _inherit = "connector.extension.chunk.importer" + + _usage = "chunk.direct.importer" + + def get_batch_importer(self): + return self.component(usage="batch.direct.importer") + + def _import_record(self, external_id, sync_date, external_data=None): + """Import the record directly""" + if external_data is None: + external_data = {} + self.model.import_record( + self.backend_record, external_id, sync_date, external_data=external_data + ) + + +class ConnectorExtensionChunkDelayedImporter(AbstractComponent): + """Delay import of the records""" + + _name = "connector.extension.chunk.delayed.importer" + _inherit = "connector.extension.chunk.importer" + + _usage = "chunk.delayed.importer" + + def get_batch_importer(self): + return self.component(usage="batch.delayed.importer") + + def _import_record( + self, external_id, sync_date, external_data=None, job_options=None + ): + """Delay the import of the records""" + if external_data is None: + external_data = {} + delayable = self.model.with_delay(**job_options or {}) + delayable.import_record( + self.backend_record, external_id, sync_date, external_data=external_data + ) diff --git a/connector_extension/components/listener.py b/connector_extension/components/listener.py new file mode 100644 index 000000000..d5aa6dc28 --- /dev/null +++ b/connector_extension/components/listener.py @@ -0,0 +1,26 @@ +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2026 NuoBiT Solutions SL - Deniz Gallo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl) + + +from odoo import _ +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent + + +class ConnectorExtensionEventListener(AbstractComponent): + _name = "connector.extension.event.listener" + _inherit = "base.event.listener" + + def on_record_after_unlink(self, binding_data): + # Binding data is a dictionary with the following keys + # - backend: backend + # - binding_name: the name of the binding model + # - external_id: the external id of the record to delete + # It's normally called AFTER actual deletion on Odoo + external_id = binding_data.get("external_id") + if not external_id: + raise ValidationError(_("The external_id of the binding is null")) + with binding_data["backend"].work_on(binding_data["binding_name"]) as work: + work.component(usage="record.direct.deleter").run(external_id) diff --git a/connector_extension/components/mapper.py b/connector_extension/components/mapper.py new file mode 100644 index 000000000..a62cabcc7 --- /dev/null +++ b/connector_extension/components/mapper.py @@ -0,0 +1,345 @@ +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2025 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +import collections +import logging +from itertools import zip_longest + +from odoo import _, fields +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent +from odoo.addons.connector.components.mapper import m2o_to_external + +_logger = logging.getLogger(__name__) + + +def required(*args): + """Decorator, declare that a method is a required field. + + It is then used by the :py:class:`Mapper` to validate the records. + + Usage:: + + @required + def any(self, record): + return {'output_field': record['input_field']} + + """ + + def required_mapping(func): + func.required = args + return func + + return required_mapping + # func.is_required = True + # return func + + +class ConnectorExtensionMapper(AbstractComponent): + _name = "connector.extension.mapper" + _inherit = "base.mapper" + + def _apply_with_options(self, map_record): + """ + Hack to allow having non required children field + """ + assert ( + self.options is not None + ), "options should be defined with '_mapping_options'" + _logger.debug("converting record %s to model %s", map_record.source, self.model) + + fields = self.options.fields + for_create = self.options.for_create + result = {} + for from_attr, to_attr in self.direct: + if isinstance(from_attr, collections.abc.Callable): + attr_name = self._direct_source_field_name(from_attr) + else: + attr_name = from_attr + + if not fields or attr_name in fields: + value = self._map_direct(map_record.source, from_attr, to_attr) + result[to_attr] = value + + for meth, definition in self.map_methods: + mapping_changed_by = definition.changed_by + if not fields or ( + mapping_changed_by and mapping_changed_by.intersection(fields) + ): + if definition.only_create and not for_create: + continue + values = meth(map_record.source) + if not values: + continue + if not isinstance(values, dict): + raise ValueError( + f"{values}: invalid return value for " + f"the mapping method {meth}" + ) + if not self.options.get("ignore_required_fields"): + for field_required in getattr(meth, "required", []): + if field_required not in values or not values[field_required]: + raise ValidationError( + _("Missing required field '%s'") % field_required + ) + result.update(values) + + for from_attr, to_attr, model_name in self.children: + if not fields or from_attr in fields: + if from_attr in map_record.source: + items = self._map_child(map_record, from_attr, to_attr, model_name) + if items: + result[to_attr] = items + return self.finalize(map_record, result) + + def get_target_fields(self, map_record, fields): + if not fields: + return [] + fields = set(fields) + result = {} + for from_attr, to_attr in self.direct: + if isinstance(from_attr, collections.abc.Callable): + from_attr = self._direct_source_field_name(from_attr) + + if to_attr in fields: + if to_attr in result: + raise ValidationError(_("Field '%s' mapping defined twice")) + result[to_attr] = from_attr + + # TODO: create a new decorator to write the field mapping manually + # I think this is not necessary, just use changed_by is precisely for that + for meth, definition in self.map_methods: + mapping_changed_by = definition.changed_by + if definition.changed_by: + if len(definition.changed_by) > 1: + raise ValidationError(_("Changed by can only be one field")) + values = meth(map_record.source) + if values: + if len(values) != 1: + raise ValidationError( + _( + "Return values of a mapper must be unique " + "if it has changed by decorator" + ) + ) + from_attr, to_attr = ( + list(mapping_changed_by)[0], + list(values.keys())[0], + ) + if to_attr in fields: + if to_attr in result: + raise ValidationError( + _("Field '%s' mapping defined twice") % to_attr + ) + result[to_attr] = from_attr + for from_attr, to_attr, _model_name in self.children: + if to_attr in fields: + if to_attr in result: + raise ValidationError( + _("Field '%s' mapping defined twice") % to_attr + ) + result[to_attr] = from_attr + return list(set(result.values())) + + +class ConnectorExtensionChildMapper(AbstractComponent): + _name = "connector.extension.child.mapper" + _inherit = "base.map.child" + + def get_all_items(self, mapper, items, parent, to_attr, options): + mapped = [] + for item in items: + map_record = mapper.map_record(item, parent=parent) + if self.skip_item(map_record): + continue + binder = self.binder_for() + external_id = binder.dict2id(item, in_field=False) + binding = binder.to_internal(external_id) + + item_values = self.get_item_values( + map_record, + to_attr, + { + **options, + "binding": binding, + }, + ) + if item_values: + self._child_bind(map_record, item_values) + mapped.append(item_values) + return mapped + + def get_items(self, items, parent, to_attr, options): + mapper = self._child_mapper() + mapped = self.get_all_items(mapper, items, parent, to_attr, options) + mapped = self.classify_items(mapped, to_attr, options) + return self.format_items(mapped) + + def _child_bind(self, map_record, item_values): + raise NotImplementedError + + def classify_items(self, mapped, to_attr, options): + raise NotImplementedError + + +class ConnectorExtensionMapChildImport(AbstractComponent): + _name = "connector.extension.map.child.import" + _inherit = ["base.map.child.import", "connector.extension.child.mapper"] + + def _child_bind(self, map_record, item_values): + binder = self.binder_for() + if not binder._is_wrapped(self.model): + return + external_id = binder.dict2id(map_record.source, in_field=False) + values = { + binder._backend_field: self.backend_record.id, + binder._sync_date_field: fields.Datetime.now(), + **binder.id2dict(external_id, in_field=True), + **binder._additional_internal_binding_fields(map_record.source), + } + if map_record.parent: + binding = binder.to_internal(external_id, unwrap=False) + if not binding: + record = binder._to_record_from_external_key(map_record) + if record: + values.update({binder._odoo_field: record.id}) + item_values.update(values) + + def format_items(self, items_values): + ops = [] + for values in items_values: + _id = values.pop("id", None) + if _id: + if values: + ops.append((1, _id, values)) + else: + ops.append((2, _id, False)) + else: + ops.append((0, False, values)) + return ops + + def _prepare_existing_records(self, records): + return records + + def classify_items(self, mapped, to_attr, options): + def keygen(_id): + return tuple(binder.dict2id(_id)) + + binding = options["binding"] + binder = self.binder_for() + if binding: + existing = {} + for rec in self._prepare_existing_records(options["binding"][to_attr]): + existing.setdefault(keygen(rec), []).append(rec.id) + new = {} + for value in mapped: + new.setdefault(keygen(value), []).append(value) + res = [] + # update ( create,delete ) + for key in set(existing.keys()) & set(new.keys()): + for _id, value in zip_longest(existing[key], new[key]): + # update + if value and _id: + res.append({"id": _id, **value}) + # delete + elif not value: + res.append({"id": _id}) + # create + elif not _id: + res.append(value) + # create + for key in set(new.keys()) - set(existing.keys()): + res += new[key] + # delete + for key in set(existing.keys()) - set(new.keys()): + res += [{"id": x} for x in existing[key]] + return res + return mapped + + +class ConnectorExtensionMapChildExport(AbstractComponent): + _name = "connector.extension.map.child.export" + _inherit = ["base.map.child.export", "connector.extension.child.mapper"] + + def _child_bind(self, map_record, item_values): + # TODO: implement this method + return + # raise NotImplementedError + + def classify_items(self, mapped, to_attr, options): + return mapped + + +# TODO: create a fix on OCA repo and remove this class +class ConnectorExtensionExportMapper(AbstractComponent): + _name = "connector.extension.export.mapper" + _inherit = ["base.export.mapper", "connector.extension.mapper"] + + def _map_direct(self, record, from_attr, to_attr): + """Apply the ``direct`` mappings. + + :param record: record to convert from a source to a target + :param from_attr: name of the source attribute or a callable + :type from_attr: callable | str + :param to_attr: name of the target attribute + :type to_attr: str + """ + if isinstance(from_attr, collections.abc.Callable): + return from_attr(self, record, to_attr) + + value = record[from_attr] + if value is None: # we need to allow fields with value 0 + return False + + # Backward compatibility: when a field is a relation, and a modifier is + # not used, we assume that the relation model is a binding. + # Use an explicit modifier m2o_to_external in the 'direct' mappings to + # change that. + field = self.model._fields[from_attr] + if field.type == "many2one": + mapping_func = m2o_to_external(from_attr) + value = mapping_func(self, record, to_attr) + return value + + def check_external_id(self, external_id, relation): + assert external_id, ( + f"Unexpected error on {relation._name}: " + "The backend id cannot be obtained. " + "At this stage, the backend record should have been already linked via " + "._export_dependencies." + ) + + +class ConnectorExtensionImportMapper(AbstractComponent): + _name = "connector.extension.import.mapper" + _inherit = ["base.import.mapper", "connector.extension.mapper"] + + +class ConnectorExtensionDeleteMapChild(AbstractComponent): + """:py:class:`MapChild` for the Deleters""" + + _name = "connector.extension.base.map.child.deleter" + _inherit = "base.map.child" + _usage = "delete.map.child" + + def _child_mapper(self): + return self.component(usage="import.mapper") + + def format_items(self, items_values): + """Format the values of the items mapped from the child Mappers. + + It can be overridden for instance to add the Odoo + relationships commands ``(6, 0, [IDs])``, ... + + As instance, it can be modified to handle update of existing + items: check if an 'id' has been defined by + :py:meth:`get_item_values` then use the ``(1, ID, {values}``) + command + + :param items_values: list of values for the items to create + :type items_values: list + + """ + return [(0, 0, values) for values in items_values] diff --git a/connector_extension/components/synchronizer.py b/connector_extension/components/synchronizer.py new file mode 100644 index 000000000..bee3338c1 --- /dev/null +++ b/connector_extension/components/synchronizer.py @@ -0,0 +1,14 @@ +# Copyright 2025 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +from odoo.addons.component.core import AbstractComponent + + +class ConnectorExtensionSynchronizer(AbstractComponent): + _name = "connector.extension.synchronizer" + _inherit = "base.synchronizer" + + #: usage of the component used as backend adapter, + #: customized to use "adapter" instead of "backend.adapter" + _base_backend_adapter_usage = "adapter" + _description = "Connector Extension Base Synchronizer Component" diff --git a/connector_extension/models/__init__.py b/connector_extension/models/__init__.py new file mode 100644 index 000000000..2b2c01cd3 --- /dev/null +++ b/connector_extension/models/__init__.py @@ -0,0 +1,3 @@ +from . import backend +from . import base +from . import binding diff --git a/connector_extension/models/backend/__init__.py b/connector_extension/models/backend/__init__.py new file mode 100644 index 000000000..baacd255d --- /dev/null +++ b/connector_extension/models/backend/__init__.py @@ -0,0 +1 @@ +from . import backend diff --git a/connector_extension/models/backend/backend.py b/connector_extension/models/backend/backend.py new file mode 100644 index 000000000..03b25354a --- /dev/null +++ b/connector_extension/models/backend/backend.py @@ -0,0 +1,152 @@ +# Copyright NuoBiT Solutions SL - Eric Antones +# Copyright NuoBiT Solutions SL - Kilian Niubo +# Copyright 2025 NuoBiT Solutions SL - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +import logging + +import pytz + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + +_logger = logging.getLogger(__name__) + +_tzs = [ + (tz, tz) + for tz in sorted( + pytz.all_timezones, key=lambda tz: tz if not tz.startswith("Etc/") else "_" + ) +] + + +def _tz_get(self): + return _tzs + + +class ConnectorBackend(models.AbstractModel): + _name = "connector.extension.backend" + _inherit = "connector.backend" + _description = "Connector Backend Extension" + + @api.model + def _select_state(self): + return [ + ("draft", "Draft"), + ("validated", "Validated"), + ] + + name = fields.Char( + required=True, + ) + company_id = fields.Many2one( + comodel_name="res.company", + index=True, + required=True, + default=lambda self: self.env.company, + string="Company", + ) + + lang_ids = fields.Many2many( + comodel_name="res.lang", + column1="backend_id", + column2="lang_id", + required=True, + string="Languages", + ) + + # TODO: REVIEW: Create a template view to be inherited + state = fields.Selection( + selection="_select_state", + default="draft", + ) + active = fields.Boolean( + default=True, + ) + + version = fields.Text(readonly=True) + + tz = fields.Selection( + _tz_get, + string="Timezone", + required=True, + default=lambda self: self._context.get("tz") or self.env.user.tz or "UTC", + help="This field is used to define in which timezone the backend will work.", + ) + + chunk_size = fields.Integer( + default=-1, + required=True, + help="This field is used to define the chunk size to import from the backend.", + ) + page_size = fields.Integer( + required=True, + default=10, + help="This field is used in order to define the " + "number of records imported at the same time.", + ) + + sync_offset = fields.Integer( + required=True, + default=0, + help="Minutes to start the synchronization " + "before(negative)/after(positive) the last one", + ) + + enable_call_logging = fields.Boolean( + default=False, + help="Enable logging of calls to the external system", + ) + + def _check_connection(self): + self.ensure_one() + with self.work_on(self._name) as work: + component = work.component(usage="adapter") + self.version = component.get_version() + + def button_check_connection(self): + for rec in self: + rec._check_connection() + rec.state = "validated" + return + + def button_reset_to_draft(self): + self.ensure_one() + self.write({"state": "draft", "version": False}) + + def tz_to_utc(self, datetime_local_naive): + datetime_local = pytz.timezone(self.tz).localize(datetime_local_naive) + datetime_utc = datetime_local.astimezone(pytz.utc) + datetime_utc_naive = datetime_utc.replace(tzinfo=None) + return datetime_utc_naive + + def tz_to_local(self, datetime_utc_naive): + local_tz = pytz.timezone(self.tz) + datetime_utc = pytz.utc.localize(datetime_utc_naive) + datetime_local = datetime_utc.astimezone(local_tz) + datetime_local_naive = datetime_local.replace(tzinfo=None) + return datetime_local_naive + + # Scheduler methods + @api.model + def _get_current_user_company(self): + if self.env.user.id == self.env.ref("base.user_root").id: + raise ValidationError(_("The cron user cannot be admin")) + if self.env.company != self.env.user.company_id: + raise ValidationError( + _( + "The current company must be the same " + "as the default company of the user. " + ) + ) + if self.env.company != self.env.user.company_ids: + raise ValidationError( + _("The current company must be one of the companies of the user. ") + ) + return self.env.company + + @api.model + def scheduler(self, function_name): + company_id = self._get_current_user_company() + domain = [("company_id", "=", company_id.id)] + func = getattr(self.search(domain), function_name) + return func() diff --git a/connector_extension/models/base.py b/connector_extension/models/base.py new file mode 100644 index 000000000..e18be02dc --- /dev/null +++ b/connector_extension/models/base.py @@ -0,0 +1,53 @@ +# Copyright NuoBiT Solutions SL - Kilian Niubo +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl) + +""" +Base Model +========== + +Extend the 'base' Odoo Model to add Events related features. + + +""" + +from odoo import models + + +class Base(models.AbstractModel): + """The base model, which is implicitly inherited by all models. + + Add an :meth:`_event` method to all Models. This method allows to + trigger events. + + It also notifies the following events: + + * ``on_record_post_unlink(self, bindings_data)`` + + ``on_record_post_unlink`` is notified just *after* the unlink is done. + + """ + + _inherit = "base" + + def _dict_binding_data(self, binding): + with binding.backend_id.work_on(binding._name) as work: + binder = work.component(usage="binder") + external_id = binder.to_external(binding) + return { + "backend": binding.backend_id, + "binding_name": binding._name, + "external_id": external_id, + } + + def unlink(self): + to_remove = [] + binding_field = self.env.context.get("binding_field") + if binding_field: + for record in self: + bindings = record[binding_field] + for binding in bindings: + to_remove.append(self._dict_binding_data(binding)) + result = super().unlink() + for bindings_data in to_remove: + self._event("on_record_after_unlink").notify(bindings_data) + return result diff --git a/connector_extension/models/binding/__init__.py b/connector_extension/models/binding/__init__.py new file mode 100644 index 000000000..0fec82e8a --- /dev/null +++ b/connector_extension/models/binding/__init__.py @@ -0,0 +1 @@ +from . import binding diff --git a/connector_extension/models/binding/binding.py b/connector_extension/models/binding/binding.py new file mode 100644 index 000000000..f44caffcd --- /dev/null +++ b/connector_extension/models/binding/binding.py @@ -0,0 +1,148 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# Copyright 2025 NuoBiT - Deniz Gallo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + + +class ConnectorExtensionExternalBinding(models.AbstractModel): + _name = "connector.extension.external.binding" + _inherit = "external.binding" + _description = "Connector Extension External Binding (abstract)" + + # by default we consider sync_date as the import one + + # BINDER METHODS + def to_external(self, ensure_one=False): + external_ids = [] + for rec in self: + with rec.backend_id.work_on(self._name) as work: + binder = work.component(usage="binder") + external_ids.append( + binder.dict2id(rec, in_field=True, raise_on_not_found=True, unwrap=True) + ) + if ensure_one and external_ids: + if len(external_ids) > 1: + raise ValidationError( + _( + "If ensure_one is set only one record" + " can be converted to external id." + ) + ) + return external_ids[0] + return external_ids + + # LAUNCHERS + @api.model + def import_data(self, backend_record, domain=None, delayed=True): + if delayed: + model = self.with_delay() + return model.import_batch( + backend_record, + domain=domain, + delayed=delayed, + ) + + @api.model + def export_data(self, backend_record, domain=None, delayed=True): + """Prepare the batch export records to Channel""" + if delayed: + model = self.with_delay() + return model.export_batch( + backend_record, + domain=domain, + delayed=delayed, + ) + + # BATCH + @api.model + def import_batch(self, backend_record, domain=None, delayed=True, use_data=True): + """Prepare the batch import of records from Backend""" + if not domain: + domain = [] + with backend_record.work_on(self._name) as work: + importer = work.component( + usage=delayed and "batch.delayed.importer" or "batch.direct.importer" + ) + return importer.run(domain, use_data=use_data) + + @api.model + def export_batch(self, backend_record, domain=None, delayed=True): + """Prepare the batch export of records modified on Odoo""" + if not domain: + domain = [] + with backend_record.work_on(self._name) as work: + exporter = work.component( + usage=delayed and "batch.delayed.exporter" or "batch.direct.exporter" + ) + return exporter.run(domain=domain) + + # CHUNKS + @api.model + def import_chunk( + self, + backend_record, + domain, + offset, + chunk_size, + delayed=True, + ): + """Prepare the chunk import of records modified on Backend""" + with backend_record.work_on(self._name) as work: + importer = work.component( + usage=delayed and "chunk.delayed.importer" or "chunk.direct.importer" + # "chunk.direct.importer" + ) + return importer.run(domain, offset, chunk_size) + + # RECORDS + @api.model + def import_record(self, backend_record, external_id, sync_date, external_data=None): + """Import record from Backend""" + if not external_data: + external_data = {} + with backend_record.work_on(self._name) as work: + importer = work.component(usage="record.direct.importer") + return importer.run(external_id, sync_date, external_data=external_data) + + @api.model + def export_record(self, backend_record, relation): + """Export Odoo record""" + with backend_record.work_on(self._name) as work: + exporter = work.component(usage="record.direct.exporter") + return exporter.run(relation) + + @api.model + def export_delete_record(self, backend_record, relation): + """Export Odoo record""" + with backend_record.work_on(self._name) as work: + deleter = work.component(usage="record.direct.export.deleter") + return deleter.run(relation) + + # RESYNC: existing binding synchronization + def resync_import(self): + self.env.user.company_id = self.company_id + for record in self: + with record.backend_id.work_on(record._name) as work: + binder = work.component(usage="binder") + external_id = binder.to_external(record) + func = record.import_record + if record.env.context.get("connector_delay"): + func = func.with_delay + func(record.backend_id, external_id, fields.Datetime.now()) + return True + + def resync_export(self): + for record in self: + with record.backend_id.work_on(record._name) as work: + binder = work.component(usage="binder") + relation = binder.unwrap_binding(record).with_context( + resync_export=True + ) + func = record.export_record + if record.env.context.get("connector_delay"): + func = func.with_delay + func(record.backend_id, relation) + return True diff --git a/connector_extension/pyproject.toml b/connector_extension/pyproject.toml new file mode 100644 index 000000000..4231d0ccc --- /dev/null +++ b/connector_extension/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/connector_extension/readme/CONTRIBUTORS.md b/connector_extension/readme/CONTRIBUTORS.md new file mode 100644 index 000000000..dd0f9a595 --- /dev/null +++ b/connector_extension/readme/CONTRIBUTORS.md @@ -0,0 +1,3 @@ +- [NuoBiT](https://www.nuobit.com): + - Kilian Niubo + - Eric Antones diff --git a/connector_extension/readme/DESCRIPTION.md b/connector_extension/readme/DESCRIPTION.md new file mode 100644 index 000000000..337af0d7c --- /dev/null +++ b/connector_extension/readme/DESCRIPTION.md @@ -0,0 +1,24 @@ +The "connector_extension" module is an add-on for the Odoo ERP system +that enhances the functionality of the base "connector" module. This +extension provides additional features, tools, and integrations, making +it easier for developers to create, manage, and maintain connections +between Odoo and various third-party systems, APIs, or services. + +The module aims to simplify the connector development process by +providing a robust and flexible framework. The "connector_extension" +module allows developers to focus on implementing specific business +logic and requirements, while the extension handles common tasks. + +- Extended connector framework: The module extends the base connector + framework by providing new classes, methods, and utilities for easier + integration with third-party systems. +- Reusable components: Pre-built components for handling common tasks + such error handling, and data synchronization. +- Enhanced data mapping: Advanced data mapping capabilities to transform + and adapt data between Odoo and external systems seamlessly. +- Scalability and performance improvements: Optimized for handling large + datasets and concurrent connections, ensuring smooth and efficient + data synchronization. +- Linking with existing records: Enables linking with existing records, + using alternate keys, which allows for easier data management and + reduced redundancy. diff --git a/connector_extension/static/description/index.html b/connector_extension/static/description/index.html new file mode 100644 index 000000000..8996411f1 --- /dev/null +++ b/connector_extension/static/description/index.html @@ -0,0 +1,443 @@ + + + + + +Connector Extension + + + +
+

Connector Extension

+ + +

Beta License: LGPL-3 NuoBiT/odoo-addons

+

The “connector_extension” module is an add-on for the Odoo ERP system +that enhances the functionality of the base “connector” module. This +extension provides additional features, tools, and integrations, making +it easier for developers to create, manage, and maintain connections +between Odoo and various third-party systems, APIs, or services.

+

The module aims to simplify the connector development process by +providing a robust and flexible framework. The “connector_extension” +module allows developers to focus on implementing specific business +logic and requirements, while the extension handles common tasks.

+
    +
  • Extended connector framework: The module extends the base connector +framework by providing new classes, methods, and utilities for easier +integration with third-party systems.
  • +
  • Reusable components: Pre-built components for handling common tasks +such error handling, and data synchronization.
  • +
  • Enhanced data mapping: Advanced data mapping capabilities to transform +and adapt data between Odoo and external systems seamlessly.
  • +
  • Scalability and performance improvements: Optimized for handling large +datasets and concurrent connections, ensuring smooth and efficient +data synchronization.
  • +
  • Linking with existing records: Enables linking with existing records, +using alternate keys, which allows for easier data management and +reduced redundancy.
  • +
+

Table of contents

+ +
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • NuoBiT Solutions SL
  • +
+
+
+

Contributors

+ +
+
+

Maintainers

+

This module is part of the NuoBiT/odoo-addons project on GitHub.

+

You are welcome to contribute.

+
+
+
+ +