From 9151557135e826553d49741eab9282d75a3064ae Mon Sep 17 00:00:00 2001 From: KNVx Date: Thu, 23 Mar 2023 09:21:10 +0100 Subject: [PATCH 01/68] [ADD] connector_extension: New Module --- connector_extension/README.rst | 70 +++ connector_extension/__init__.py | 2 + connector_extension/__manifest__.py | 21 + connector_extension/common/__init__.py | 1 + connector_extension/common/backend.py | 44 ++ connector_extension/components/__init__.py | 8 + connector_extension/components/adapter.py | 228 ++++++++ .../components/adapter_mysql.py | 47 ++ connector_extension/components/adapter_sql.py | 410 +++++++++++++ connector_extension/components/binder.py | 547 ++++++++++++++++++ connector_extension/components/binding.py | 101 ++++ connector_extension/components/exporter.py | 279 +++++++++ connector_extension/components/importer.py | 207 +++++++ connector_extension/components/mapper.py | 287 +++++++++ connector_extension/readme/CONTRIBUTORS.rst | 4 + connector_extension/readme/DESCRIPTION.rst | 10 + .../static/description/index.html | 427 ++++++++++++++ 17 files changed, 2693 insertions(+) create mode 100644 connector_extension/README.rst create mode 100644 connector_extension/__init__.py create mode 100644 connector_extension/__manifest__.py create mode 100644 connector_extension/common/__init__.py create mode 100644 connector_extension/common/backend.py create mode 100644 connector_extension/components/__init__.py create mode 100644 connector_extension/components/adapter.py create mode 100644 connector_extension/components/adapter_mysql.py create mode 100644 connector_extension/components/adapter_sql.py create mode 100644 connector_extension/components/binder.py create mode 100644 connector_extension/components/binding.py create mode 100644 connector_extension/components/exporter.py create mode 100644 connector_extension/components/importer.py create mode 100644 connector_extension/components/mapper.py create mode 100644 connector_extension/readme/CONTRIBUTORS.rst create mode 100644 connector_extension/readme/DESCRIPTION.rst create mode 100644 connector_extension/static/description/index.html diff --git a/connector_extension/README.rst b/connector_extension/README.rst new file mode 100644 index 000000000..e3ed5083b --- /dev/null +++ b/connector_extension/README.rst @@ -0,0 +1,70 @@ +=================== +Connector Extension +=================== + +.. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png + :target: https://odoo-community.org/page/development-status + :alt: Beta +.. |badge2| image:: https://img.shields.io/badge/licence-AGPL--3-blue.png + :target: http://www.gnu.org/licenses/agpl-3.0-standalone.html + :alt: License: AGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-nuobit%2Fodoo--addons-lightgray.png?logo=github + :target: https://github.com/nuobit/odoo-addons/tree/16.0/connector_extension + :alt: nuobit/odoo-addons + +|badge1| |badge2| |badge3| + +The "connector_extension" module is an add-on for the Odoo ERP system that enhances the functionality of the base "connector" module. This extension provides additional features, tools, and integrations, making it easier for developers to create, manage, and maintain connections between Odoo and various third-party systems, APIs, or services. + +The module aims to simplify the connector development process by providing a robust and flexible framework. The "connector_extension" module allows developers to focus on implementing specific business logic and requirements, while the extension handles common tasks. + + +* Extended connector framework: The module extends the base connector framework by providing new classes, methods, and utilities for easier integration with third-party systems. +* Reusable components: Pre-built components for handling common tasks such error handling, and data synchronization. +* Enhanced data mapping: Advanced data mapping capabilities to transform and adapt data between Odoo and external systems seamlessly. +* Scalability and performance improvements: Optimized for handling large datasets and concurrent connections, ensuring smooth and efficient data synchronization. +* Linking with existing records: Enables linking with existing records, using alternate keys, which allows for easier data management and reduced redundancy. + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us smashing it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +~~~~~~~ + +* NuoBiT Solutions +* S.L. + +Contributors +~~~~~~~~~~~~ + +* `NuoBiT `__: + + * Kilian Niubo + * Eric Antones + +Maintainers +~~~~~~~~~~~ + +This module is part of the `nuobit/odoo-addons `_ project on GitHub. + +You are welcome to contribute. diff --git a/connector_extension/__init__.py b/connector_extension/__init__.py new file mode 100644 index 000000000..565f91978 --- /dev/null +++ b/connector_extension/__init__.py @@ -0,0 +1,2 @@ +from . import common +from . import components diff --git a/connector_extension/__manifest__.py b/connector_extension/__manifest__.py new file mode 100644 index 000000000..be2f80bd5 --- /dev/null +++ b/connector_extension/__manifest__.py @@ -0,0 +1,21 @@ +# Copyright NuoBiT Solutions - Kilian Niubo +# Copyright NuoBiT Solutions - Eric Antones +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl) + +{ + "name": "Connector Extension", + "summary": "This module extends the connector module", + "version": "16.0.1.0.1", + "author": "NuoBiT Solutions, S.L.", + "license": "AGPL-3", + "category": "Connector", + "website": "https://github.com/nuobit/odoo-addons", + "external_dependencies": { + "python": [ + "requests", + "mysql-connector-python", + ], + }, + "depends": ["connector"], + "data": [], +} diff --git a/connector_extension/common/__init__.py b/connector_extension/common/__init__.py new file mode 100644 index 000000000..baacd255d --- /dev/null +++ b/connector_extension/common/__init__.py @@ -0,0 +1 @@ +from . import backend diff --git a/connector_extension/common/backend.py b/connector_extension/common/backend.py new file mode 100644 index 000000000..6f37d0c51 --- /dev/null +++ b/connector_extension/common/backend.py @@ -0,0 +1,44 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import logging + +from odoo import api, fields, models + +_logger = logging.getLogger(__name__) + + +# TODO:REVIEW: GET_VERSION +class ConnectorBackend(models.AbstractModel): + _inherit = "connector.backend" + + @api.model + def _select_state(self): + return [ + ("draft", "Draft"), + ("validated", "Validated"), + ] + + def _check_connection(self): + raise NotImplementedError + + def button_check_connection(self): + for rec in self: + rec._check_connection() + rec.state = "validated" + return + + # TODO: REVIEW: Create a template view to be inhereted + state = fields.Selection( + selection="_select_state", + default="draft", + ) + active = fields.Boolean( + default=True, + ) + + def check_connection(self): + self.ensure_one() + with self.work_on("connector.backend") as work: + component = work.component(usage="backend.adapter") + self.version = component.get_version() diff --git a/connector_extension/components/__init__.py b/connector_extension/components/__init__.py new file mode 100644 index 000000000..fed07636f --- /dev/null +++ b/connector_extension/components/__init__.py @@ -0,0 +1,8 @@ +from . import adapter +from . import adapter_sql +from . import adapter_mysql +from . import binder +from . import binding +from . import exporter +from . import importer +from . import mapper diff --git a/connector_extension/components/adapter.py b/connector_extension/components/adapter.py new file mode 100644 index 000000000..a61019eb8 --- /dev/null +++ b/connector_extension/components/adapter.py @@ -0,0 +1,228 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +import datetime + +from odoo import _ +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent + + +class BackendAdapter(AbstractComponent): + _inherit = "base.backend.adapter" + + _date_format = "%Y-%m-%d" + _datetime_format = "%Y-%m-%dT%H:%M:%SZ" + + def _prepare_field_type(self, field_data): + default_values = {} + fields = [] + for m in field_data: + if isinstance(m, tuple): + fields.append(m[0]) + default_values[m[0]] = m[1] + else: + fields.append(m) + + return fields, default_values + + def _prepare_parameters(self, values, mandatory, optional=None): + if not optional: + optional = [] + + mandatory, mandatory_default_values = self._prepare_field_type(mandatory) + optional, default_values = self._prepare_field_type(optional) + default_values.update(mandatory_default_values) + missing_fields = list(set(mandatory) - set(values)) + if missing_fields: + raise ValidationError(_("Missing mandatory fields %s") % missing_fields) + mandatory_values = {x: values[x] for x in mandatory} + optional_values = {} + found = False + for o in optional: + if not found and o in values: + found = True + if found: + optional_values[o] = values.get(o, default_values.get(o)) + return {**mandatory_values, **optional_values} + + def _prepare_results(self, result): + return result + + def _filter(self, values, domain=None): # noqa: C901 + # TODO support for domains with 'or' clauses + # TODO refactor and optimize + if not domain: + return values + + values_filtered = [] + for record in values: + for elem in domain: + k, op, v = elem + if k not in record: + raise ValidationError(_("Key %s does not exist") % k) + if op == "=": + if record[k] != v: + break + elif op == "!=": + if record[k] == v: + break + elif op == ">": + if record[k] <= v: + break + elif op == "<": + if record[k] >= v: + break + elif op == ">=": + if record[k] < v: + break + elif op == "<=": + if record[k] > v: + break + elif op == "like": + if record[k] not in v: + break + elif op == "not like": + if record[k] in v: + break + elif op == "in": + if not isinstance(v, (tuple, list)): + raise ValidationError( + _("The value %s should be a list or tuple") % v + ) + if record[k] not in v: + break + elif op == "not in": + if not isinstance(v, (tuple, list)): + raise ValidationError( + _("The value %s should be a list or tuple") % v + ) + if record[k] in v: + break + else: + raise NotImplementedError("Operator '%s' not supported" % op) + else: + values_filtered.append(record) + return values_filtered + + def _domain_to_normalized_dict(self, domain): + """Convert, if possible, standard Odoo domain to a dictionary. + To do so it is necessary to convert all operators to + equal '=' operator. + """ + res = {} + for elem in domain: + if len(elem) != 3: + raise ValidationError(_("Wrong domain clause format %s") % elem) + field, op, value = elem + if op == "=": + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(value) + elif op == "!=": + if not isinstance(value, bool): + raise ValidationError( + _("Not equal operation not supported for non boolean fields") + ) + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(not value) + elif op == "in": + if not isinstance(value, (tuple, list)): + raise ValidationError( + _( + "Operator '%(OPERATOR)s' only supports tuples or lists, " + "not %(TYPE)s" + ) + % { + "OPERATOR": op, + "TYPE": type(value), + } + ) + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(value) + elif op in (">", ">=", "<", "<="): + if not isinstance(value, (datetime.date, datetime.datetime, int)): + raise ValidationError( + _("Type %(TYPE)s not supported for operator %(OPERATOR)s") + % { + "TYPE": type(value), + "OPERATOR": op, + } + ) + if op in (">", "<"): + adj = 1 + if isinstance(value, (datetime.date, datetime.datetime)): + adj = datetime.timedelta(days=adj) + if op == "<": + op, value = "<=", value - adj + else: + op, value = ">=", value + adj + + res[field] = self._normalize_value(value) + else: + raise ValidationError(_("Operator %s not supported") % op) + + return res + + def _extract_domain_clauses(self, domain, fields): + if not isinstance(fields, (tuple, list)): + fields = [fields] + extracted, rest = [], [] + for clause in domain: + tgt = ( + extracted + if clause[0] in fields and clause[1] not in ["in", "not in"] + else rest + ) + tgt.append(clause) + return extracted, rest + + def _convert_format(self, elem, mapper, path=""): + if isinstance(elem, dict): + for k, v in elem.items(): + current_path = "{}/{}".format(path, k) + if isinstance(v, (tuple, list, dict)): + if isinstance(v, dict): + if current_path in mapper: + v2 = {} + for k1, v1 in v.items(): + new_value = mapper[current_path](k1) + v2[new_value] = v1 + v = elem[k] = v2 + self._convert_format(v, mapper, current_path) + elif isinstance( + v, (str, int, float, bool, datetime.date, datetime.datetime) + ): + if current_path in mapper: + elem[k] = mapper[current_path](v) + elif v is None: + pass + else: + raise NotImplementedError("Type %s not implemented" % type(v)) + elif isinstance(elem, (tuple, list)): + for ch in elem: + self._convert_format(ch, mapper, path) + elif isinstance( + elem, (str, int, float, bool, datetime.date, datetime.datetime) + ): + pass + else: + raise NotImplementedError("Type %s not implemented" % type(elem)) + + def _convert_format_domain(self, domain): + res = [] + for k, op, v in domain: + if isinstance(v, datetime.datetime): + v = v.strftime(self._datetime_format) + elif isinstance(v, datetime.date): + v = v.strftime(self._date_format) + elif isinstance(v, (int, str, list, tuple, bool)): + pass + else: + raise Exception("Type '%s' not supported" % type(v)) + res.append((k, op, v)) + return res diff --git a/connector_extension/components/adapter_mysql.py b/connector_extension/components/adapter_mysql.py new file mode 100644 index 000000000..1dbdedd7f --- /dev/null +++ b/connector_extension/components/adapter_mysql.py @@ -0,0 +1,47 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import logging + +from odoo import _ +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent + +_logger = logging.getLogger(__name__) + + +class BackendMySQLAdapterCRUD(AbstractComponent): + _name = "base.backend.mysql.adapter.crud" + _inherit = "base.backend.sql.adapter.crud" + + # TODO:REVIEW: GET_VERSION + # _sql_version = "SELECT VERSION();" + + def _execute(self, op, cr, sql, params): + if not sql: + raise ValidationError(_("Empty SQL statement")) + sql_l = sql.split(";") + if op == "create": + if len(sql_l) > 2: + raise ValidationError(_("Unexpected SQL statement")) + if len(sql_l) == 2: + if not "last_insert_id()".lower() in sql_l[1].lower(): + raise ValidationError( + _("Only last_insert_id() is allowed in insert statement.") + ) + else: + if len(sql_l) != 1: + raise ValidationError( + _("Only one query is allowed on non insert SQL statements.") + ) + + res = super()._execute(op, cr, sql_l[0], params) + if op == "create": + res = cr.execute(sql_l[1]) + return res + + # TODO:REVIEW: GET_VERSION + # def get_version(self): + # res = self._execute() + # return res[0][0] diff --git a/connector_extension/components/adapter_sql.py b/connector_extension/components/adapter_sql.py new file mode 100644 index 000000000..d26efd44d --- /dev/null +++ b/connector_extension/components/adapter_sql.py @@ -0,0 +1,410 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import datetime +import logging +import random + +import mysql.connector as mysql # pylint: disable=W7936 + +from odoo import _ +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent + +_logger = logging.getLogger(__name__) + + +class BackendSQLAdapterCRUD(AbstractComponent): + _name = "base.backend.sql.adapter.crud" + _inherit = "base.backend.adapter.crud" + + _date_format = "%Y-%m-%d" + _datetime_format = "%Y-%m-%dT%H:%M:%SZ" + + def _escape(self, s): + return s.replace("'", "").replace('"', "") + + def _check_schema(self): + conn = self.conn() + cr = conn.cursor() + # self._execute(cr, sql, params) + # TODO: REVIEW Get the operation (read) automatically from def _exec + self._execute("read", cr, self._sql_schema, (self.schema,)) + # cr.execute(self._sql_schema, (self.schema,)) + headers = [desc[0] for desc in cr.description] + res = [] + for row in cr: + res.append(dict(zip(headers, row))) + cr.close() + conn.close() + # schema_exists = self._exec_sql(self._sql_schema, (self.schema,)) + if not res: + raise mysql.InternalError("The schema %s does not exist" % self.schema) + + def _convert_dict(self, data, to_backend=True): + if not isinstance(data, dict): + raise ValidationError(_("Expected a dictionary, found %s") % data) + for k, v in data.items(): + if isinstance(v, datetime.datetime): + if to_backend: + func = self.backend_record.tz_to_local + else: + func = self.backend_record.tz_to_utc + data[k] = func(v) + return data + + def _execute(self, op, cr, sql, params=None): + return cr.execute(sql, params=params) + + def _exec(self, op, *args, **kwargs): + func = getattr(self, "_exec_%s" % op) + return func(*args, **kwargs) + + # TODO:REVIEW: GET_VERSION + # def _exec_get_version(self): + # sql = self._sql_version + # if self.schema: + # # check if schema exists to avoid injection + # self._check_schema() + # sql = sql % dict(schema=self.schema) + # conn = self.conn() + # cr = conn.cursor() + # self._execute("read", cr, sql) + # res = cr.fetchone() + # cr.close() + # conn.close() + # if not res: + # raise ValidationError("Unexpected error: + # The get_version should have return something") + # return res[0] + + def _exec_read(self, filters=None, fields=None): + if not filters: + filters = [] + sql = self._sql_read + if self.schema: + # check if schema exists to avoid injection + self._check_schema() + sql = sql % dict(schema=self.schema) + + values = [] + if filters or fields: + # TODO: Is it really necessary? + sql_l = ["with t as (%s)" % sql] + fields_l = fields or ["*"] + if fields: + if self._id: + for f in self._id: + if f not in fields_l: + fields_l.append(f) + sql_l.append("select %s from t" % (", ".join(fields_l),)) + + if filters: + where = [] + for k, operator, v in filters: + if v is None: + if operator == "=": + operator = "is" + elif operator == "!=": + operator = "is not" + else: + raise Exception( + "Operator '%s' is not implemented on NULL values" + % operator + ) + where.append("%s %s %%s" % (k, operator)) + values.append(v) + sql_l.append("where %s" % (" and ".join(where),)) + + sql = " ".join(sql_l) + + # res = self._exec_sql(sql, tuple(values)) + conn = self.conn() + cr = conn.cursor() + self._execute("read", cr, sql, tuple(values)) + # cr.execute(sql, tuple(values)) + headers = [desc[0] for desc in cr.description] + res = [] + for row in cr: + res.append(dict(zip(headers, row))) + cr.close() + conn.close() + + filter_keys_s = {e[0] for e in filters} + # TODO: Modified with getattr + id_fields = self.binder_for().get_id_fields(in_field=False) + if id_fields and set(id_fields).issubset(filter_keys_s): + self._check_uniq(res, id_fields) + + return res + + def _check_uniq(self, data, id_fields): + uniq = set() + for rec in data: + id_t = tuple([rec[f] for f in id_fields]) + if id_t in uniq: + raise ValidationError( + _("Unexpected error: ID duplicated: %(ID_FIELDS)s - %(ID_T)s") + % { + "ID_FIELDS": id_fields, + "ID_T": id_t, + } + ) + uniq.add(id_t) + + # exposed methods + + def search_read(self, filters=None): + """Search records according to some criterias + and returns a list of ids + + :rtype: list + """ + _logger.debug("method search_read, sql %s, filters %s", self._sql_read, filters) + if not filters: + filters = [] + res = self._exec("read", filters=filters) + + return res + + def search(self, filters=None): + """Search records according to some criterias + and returns a list of ids + + :rtype: list + """ + _logger.debug("method search, sql %s, filters %s", self._sql_read, filters) + if not filters: + filters = [] + res = self.search_read(filters=filters) + + res = [tuple([x[f] for f in self._id]) for x in res] + + return res + + # pylint: disable=W8106 + def read(self, _id, attributes=None): + """Returns the information of a record + + :rtype: dict + """ + _logger.debug( + "method read, sql %s id %s, attributes %s", self._sql_read, _id, attributes + ) + id_list = list(self.binder_for().id2dict(_id, in_field=False).items()) + filters = [(key, "=", value) for key, value in id_list] + res = self._exec("read", filters=filters) + if len(res) > 1: + raise mysql.IntegrityError( + "Unexpected error: Returned more the one rows:\n%s" % ("\n".join(res),) + ) + return res and res[0] or [] + + def _check_write_result(self, conn, cr, id_d): + count = cr.rowcount + # On mysql if record is not modified the rowcount is 0 + # if count == 0: + # raise Exception( + # _( + # "Impossible to update external record with ID '%s': " + # "Register not found on Backend" + # ) + # % (id_d,) + # ) + if count > 1: + conn.rollback() + raise mysql.IntegrityError( + "Unexpected error: Returned more the one row with ID: %s" % (id_d,) + ) + return count + + # pylint: disable=W8106 + def write(self, _id, values_d): + return self._exec("write", _id, values_d) + + def _exec_write(self, _id, values_d): # pylint: disable=W8106 + """Update records on the external system""" + _logger.debug( + "method write, sql %s id %s, values %s", self._sql_update, _id, values_d + ) + params_dict = {} + if not values_d: + return 0 + if self.schema: + # check if schema exists to avoid injection + self._check_schema() + params_dict["schema"] = self.schema + + # get id fieldnames and values + id_d = self.binder_for().id2dict(_id, in_field=False) + # fix same field on set and on where, change set fields + qset_map_d = {} + for k, v in values_d.items(): + if k in id_d: + while True: + k9 = "%s%i" % (k, random.randint(0, 999)) + if k9 not in values_d and k9 not in id_d: + qset_map_d[k] = (k9, v) + break + else: + qset_map_d[k] = (k, v) + + # get the set data + qset_l = [] + for k, (k9, _v) in qset_map_d.items(): + qset_l.append("%(field)s = %%(%(field9)s)s" % dict(field=k, field9=k9)) + qset = "%s" % (", ".join(qset_l),) + params_dict["qset"] = qset + + # prepare the sql with base strucrture + sql = self._sql_update % params_dict + + # prepare params + params = dict(id_d) + for k9, v in qset_map_d.values(): + params[k9] = v + params = self._convert_dict(params, to_backend=True) + + conn = self.conn() + cr = conn.cursor() + self._execute("write", cr, sql, params) + # cr.execute(sql, params) # pylint: disable=E8103 + count = self._check_write_result(conn, cr, id_d) + conn.commit() + cr.close() + conn.close() + + return count + + # pylint: disable=W8106 + def create(self, values_d): + return self._exec("create", values_d) + + def _exec_create(self, values_d): # pylint: disable=W8106 + """Create a record on the external system""" + _logger.debug("method create, model %s, attributes %s", self._name, values_d) + + params_dict = {} + if not values_d: + return 0 + if self.schema: + # check if schema exists to avoid injection + self._check_schema() + params_dict["schema"] = self.schema + + # build the sql parts + fields, params, phvalues = [], [], [] + for k, v in values_d.items(): + fields.append(k) + params.append(v) + if v is None or isinstance(v, (str, datetime.date, datetime.datetime)): + phvalues.append("%s") + elif isinstance(v, (int, float)): + phvalues.append("%s") + else: + raise NotImplementedError("Type %s" % type(v)) + + # build retvalues + id_list = list(self.binder_for().id2dict(id, in_field=False).keys()) + retvalues = id_list + params_dict["fields"] = ", ".join(fields) + params_dict["phvalues"] = ", ".join(phvalues) + params_dict["retvalues"] = ", ".join(retvalues) + + # prepare the sql with base structure + sql = self._sql_insert % dict(params_dict) + + # executem la insercio + res = [] + try: + conn = self.conn() + cr = conn.cursor() + # self._execute(cr, sql, params) + self._execute("create", cr, sql, tuple(params)) + # cr.execute(sql, tuple(params)) + headers = [desc[0] for desc in cr.description] + for row in cr: + res.append(dict(zip(headers, row))) + conn.commit() + cr.close() + conn.close() + + # res = self._exec_sql(sql, tuple(params), commit=True) + except mysql.IntegrityError as e: + # Workaround: Because of Microsoft SQL Server + # removes the spaces on varchars on comparisions + # where the varchar belongs to a PK or UK. + # This produces a no existent IntegrityViolation, + # so we need to make user aware of that in order to solve the issue. + if e.args[0] == 2627: + raise ValidationError( + _( + "%s\nThis can be caused by a Microsoft SQL Server " + "missbehaviour where a field belonging to a PK or " + "UK cannot have trailing spaces." + "If it has any then a fake IntegrityViolation can be thrown. " + "Please check that there's no other " + "record on the database with the same key " + "fields but with/without trailing spaces, " + "then fix it and try again." + ) + % (e,) + ) from e + + raise + + if not res: + raise Exception(_("Unexpected!! Nothing created: %s") % (values_d,)) + elif len(res) > 1: + raise Exception( + "Unexpected!!: Returned more the one row:%s - %s" + % ( + res, + values_d, + ) + ) + + return res[0] + + def delete(self, _id): + return self._exec("delete", _id) + + def _exec_delete(self, _id): + """ + Delete the record with _id + """ + _logger.debug("method delete, model %s, is %s", self._name, _id) + sql = self._sql_delete + if self.schema: + # check if schema exists to avoid injection + self._check_schema() + sql = sql % dict(schema=self.schema) + + # get id fieldnames and values + params = dict(zip(self._id, _id)) + params = self._convert_dict(params, to_backend=True) + + conn = self.conn() + cr = conn.cursor() + self._execute("delete", cr, sql, params) + # cr.execute(sql, params) # pylint: disable=E8103 + count = cr.rowcount + if count == 0: + raise Exception( + _( + "Impossible to delete external record with ID '%s': " + "Register not found on Backend" + ) + % (params,) + ) + elif count > 1: + conn.rollback() + raise mysql.IntegrityError( + "Unexpected error: Returned more the one row with ID: %s" % (params,) + ) + conn.commit() + cr.close() + conn.close() + + return count diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py new file mode 100644 index 000000000..f72c6abc4 --- /dev/null +++ b/connector_extension/components/binder.py @@ -0,0 +1,547 @@ +# Copyright 2013-2017 Camptocamp SA +# Copyright NuoBiT Solutions - Eric Antones +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) + +""" +Binders +======= + +Binders are components that know how to find the external ID for an +Odoo ID, how to find the Odoo ID for an external ID and how to +create the binding between them. + +""" +import hashlib +import logging +from contextlib import contextmanager + +import psycopg2 + +from odoo import _, fields, models, tools +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent +from odoo.addons.component.exception import NoComponentError +from odoo.addons.connector.exception import InvalidDataError, RetryableJobError + +_logger = logging.getLogger(__name__) + + +class BinderComposite(AbstractComponent): + """The same as Binder but allowing composite external keys""" + + # TODO: Better name instead of composite + _name = "base.binder.composite" + _inherit = "base.binder" + + _internal_field = "internal_id" + + _internal_alt_field = "internal_alt_id" + _external_alt_field = "external_alt_id" + + _default_binding_field = None + + def idhash(self, external_id): + odoo_hash = hashlib.sha256() + for e in external_id: + if isinstance(e, int): + e9 = str(e) + if int(e9) != e: + raise Exception("Unexpected") + elif isinstance(e, str): + e9 = e + elif e is None: + pass + else: + raise Exception("Unexpected type for a key: type %s" % type(e)) + odoo_hash.update(e9.encode("utf8")) + return odoo_hash.hexdigest() + + def get_id_fields(self, in_field=True, alt_field=False): + if in_field: + fields = self._internal_alt_field if alt_field else self._internal_field + else: + fields = self._external_alt_field if alt_field else self._external_field + if not isinstance(fields, (tuple, list)): + fields = [fields] + fields_l = [] + for f in fields: + if hasattr(self, f): + fields_l.append(getattr(self, f)) + else: + raise ValidationError( + _("Id field %(FIELD)s is not defined in model %(MODEL)s") + % { + "FIELD": f, + "MODEL": self._name, + } + ) + return fields_l + + def id2dict(self, _id, in_field=True, alt_field=False): + """Return a dict with the internal or external fields and their values + :param _id: Values to put on internal or external fields + :param in_field: with True value, _internal_field defined in binder are used. + With this parameter False, _external_field will be used. + """ + if not isinstance(_id, (tuple, list)): + _id = [_id] + else: + if len(_id) == 1: + raise ValidationError( + _("If the id has only 1 element, it shouldn't be a list ") + ) + fields = self.get_id_fields(in_field=in_field, alt_field=alt_field) + return dict(zip(fields, _id)) + + def dict2id(self, _dict, in_field=True, alt_field=False): + """Giving a dict, return the values of the internal or external fields + :param _dict: Dict (usually binder) to extract internal or external fields + :param in_field: with True value, _internal_field defined in binder are used. + With this parameter False, _external_field will be used. + """ + fields = self.get_id_fields(in_field=in_field, alt_field=alt_field) + res = [] + for f in fields: + f_splitted = f.split(".") + val = _dict[f_splitted[0]] + if len(f_splitted) == 2: + if isinstance(val, models.BaseModel): + val = val[f_splitted[1]] + if len(f_splitted) > 2: + raise NotImplementedError(_("Multiple dot notation is not supported")) + res.append(val) + if len(res) == 1: + return res[0] + return res + + def is_complete_id(self, _id, in_field=True): + fields = in_field and self._internal_field or self._external_field + if not isinstance(fields, (tuple, list)): + fields = [fields] + if not isinstance(_id, (tuple, list)): + _id = [_id] + _id = list(filter(None, _id)) + return len(_id) == len(fields) + + @contextmanager + def _retry_unique_violation(self): + """Context manager: catch Unique constraint error and retry the + job later. + + When we execute several jobs workers concurrently, it happens + that 2 jobs are creating the same record at the same time (binding + record created by :meth:`_export_dependency`), resulting in: + + IntegrityError: duplicate key value violates unique + constraint "my_backend_product_template_odoo_uniq" + DETAIL: Key (backend_id, odoo_id)=(1, 4851) already exists. + + In that case, we'll retry the import just later. + + .. warning:: The unique constraint must be created on the + binding record to prevent 2 bindings to be created + for the same External record. + """ + try: + yield + except psycopg2.IntegrityError as err: + if err.pgcode == psycopg2.errorcodes.UNIQUE_VIOLATION: + raise RetryableJobError( + "A database error caused the failure of the job:\n" + "%s\n\n" + "Likely due to 2 concurrent jobs wanting to create " + "the same record. The job will be retried later." % err + ) from err + else: + raise + + def _is_binding(self, binding): + try: + binding._fields[self._odoo_field] + except KeyError: + return False + return True + + def _find_binding(self, relation, binding_extra_vals=None): + if not binding_extra_vals: + binding_extra_vals = {} + if self._is_binding(relation): + raise Exception( + "The source object %s must not be a binding" % relation.model._name + ) + + domain = [ + (self._odoo_field, "=", relation.id), + (self._backend_field, "=", self.backend_record.id), + ] + binding = self.model.with_context(active_test=False).search(domain) + if binding: + binding.ensure_one() + return binding + + def wrap_binding(self, relation, binding_field=None, binding_extra_vals=None): + if not binding_extra_vals: + binding_extra_vals = {} + if not relation: + return + + if binding_field is None: + if not self._default_binding_field: + raise Exception( + "_binding_field defined on synchronizer class is mandatory" + ) + binding_field = self._default_binding_field + + wrap = relation._name != self.model._name + if wrap and hasattr(relation, binding_field): + binding = self._find_binding(relation, binding_extra_vals) + if not binding: + _bind_values = { + self._odoo_field: relation.id, + self._backend_field: self.backend_record.id, + } + _bind_values.update(binding_extra_vals) + with self._retry_unique_violation(): + binding = ( + self.model.with_context(connector_no_export=True) + .sudo() + .create(_bind_values) + ) + + if not tools.config["test_enable"]: + self.env.cr.commit() # pylint: disable=invalid-commit + else: + binding = relation + + if not self._is_binding(binding): + raise Exception( + "Expected binding '%s' and found regular model '%s'" + % (self.model._name, relation._name) + ) + + return binding + + def to_internal(self, external_id, unwrap=False): + """Give the Odoo recordset for an external ID + + :param external_id: external ID for which we want + the Odoo ID + :param unwrap: if True, returns the normal record + else return the binding record + :return: a recordset, depending on the value of unwrap, + or an empty recordset if the external_id is not mapped + :rtype: recordset + """ + context = self.env.context + domain = [(self._backend_field, "=", self.backend_record.id)] + for key, value in self.id2dict(external_id, in_field=True).items(): + domain.append((key, "=", value)) + + bindings = self.model.with_context(active_test=False).search(domain) + if not bindings: + if unwrap: + return self.model.browse()[self._odoo_field] + return self.model.browse() + bindings.ensure_one() + if unwrap: + bindings = bindings[self._odoo_field] + bindings = bindings.with_context(**context) + return bindings + + def to_external(self, binding, wrap=True, binding_extra_vals=None): + """Give the external ID for an Odoo binding ID + + :param binding: Odoo binding for which we want the external id + :param wrap: if False, binding is a normal record, the + method will search the corresponding binding and return + the external id of the binding + :return: external ID of the record + """ + if not binding_extra_vals: + binding_extra_vals = {} + if not wrap: + binding = self._find_binding(binding, binding_extra_vals) + if not binding: + return None + return self.dict2id(binding, in_field=True) or None + + def bind(self, external_id, binding): + raise ValidationError( + _("This method is deprecated. Use bind_export or bind_import instead") + ) + + def bind_import(self, external_data, values, sync_date, for_create=False): + values.update( + { + self._sync_date_field: sync_date, + **self._additional_internal_binding_fields(external_data), + } + ) + if for_create: + external_id = self.dict2id(external_data, in_field=False) + values.update( + { + self._backend_field: self.backend_record.id, + **self.id2dict(external_id, in_field=True), + } + ) + + def bind_export(self, external_data, relation): + """Create the link between an external ID and an Odoo ID + + :param external_id: external id to bind + :param binding: Odoo record to bind + :type binding: int + """ + assert ( + external_data and relation + ), "external_data or relation missing, " "got: %s, %s" % ( + external_data, + relation, + ) + # avoid to trigger the export when we modify the `external_id` + if isinstance(relation, models.BaseModel): + relation.ensure_one() + relation_id = relation.id + else: + relation_id = relation + + external_id = self.dict2id(external_data, in_field=False) + with self._retry_unique_violation(): + return self.model.with_context(connector_no_export=True).create( + { + self._backend_field: self.backend_record.id, + self._odoo_field: relation_id, + self._sync_date_field: fields.Datetime.now(), + **self.id2dict(external_id, in_field=True), + **self._additional_external_binding_fields(external_data), + } + ) + + def _additional_external_binding_fields(self, external_data): + return {} + + def _get_internal_record_domain(self, values): + return [(k, "=", v) for k, v in values.items()] + + def _check_domain(self, domain): + for field, _op, value in domain: + if isinstance(value, (list, tuple)): + for e in value: + if isinstance(e, (tuple, list, set, dict)): + raise ValidationError( + _( + "Wrong domain value type '%(TYPE)s' on value " + "'%(VALUE)s' of field '%(FIELD)s'" + ) + % { + "TYPE": type(e), + "VALUE": e, + "FIELD": field, + } + ) + + def _get_internal_record_alt(self, values): + model_name = self.unwrap_model() + domain = self._get_internal_record_domain(values) + self._check_domain(domain) + return self.env[model_name].search(domain) + + def wrap_record(self, relation): + """Give the real record + + :param relation: Odoo real record for which we want to get its binding + :param force: if this is True and not binding found it creates an + empty binding + :return: binding corresponding to the real record or + empty recordset if the record has no binding + """ + if isinstance(relation, models.BaseModel): + relation.ensure_one() + else: + if not isinstance(relation, int): + raise InvalidDataError( + "The real record (relation) must be a " + "regular Odoo record or an id (integer)" + ) + relation = self.model.browse(relation) + if not relation: + raise InvalidDataError("The real record (relation) does not exist") + + if self.model._name == relation._name: + raise Exception( + _( + "The object '%s' is already wrapped, it's already a binding object. " + "You can only wrap Odoo objects" + ) + % (relation) + ) + + binding = self.model.with_context(active_test=False).search( + [ + (self._odoo_field, "=", relation.id), + (self._backend_field, "=", self.backend_record.id), + ] + ) + if len(binding) > 1: + raise InvalidDataError("More than one binding found") + return binding + + def _to_record_from_external_key(self, map_record): + """ + :param map_record: + :return: binding with alternate external key + """ + model_name = self.unwrap_model() + internal_alt_id = getattr(self, self._internal_alt_field, None) + if internal_alt_id: + if isinstance(internal_alt_id, str): + internal_alt_id = [internal_alt_id] + all_values = map_record.values(for_create=True, binding=self.model) + if any([x not in all_values for x in internal_alt_id]): + raise InvalidDataError( + "The alternative id '%s' must exist on mapper" % internal_alt_id + ) + id_values = {x: all_values[x] for x in internal_alt_id} + record = self._get_internal_record_alt(id_values) + if len(record) > 1: + raise InvalidDataError( + "More than one '%s' found with id %s: %s " + "The alternate internal id field '%s' is not unique" + % (model_name, id_values, record.ids, internal_alt_id) + ) + return record + return self.env[model_name] + + def to_binding_from_external_key(self, map_record, sync_date): + """ + :param map_record: + :return: binding with alternate external key + """ + record = self._to_record_from_external_key(map_record) + if record: + binding = self.wrap_record(record) + if not binding: + binding_only_fields = set(binding._fields) - set(record._fields) + update_values = map_record.values() + values = { + k: update_values[k] + for k in binding_only_fields & set(update_values) + } + if self._odoo_field in values: + if values[self._odoo_field] != record.id: + raise InvalidDataError( + "The id found on the mapper ('%i') " + "is not the one expected ('%i')" + % (values[self._odoo_field], record.id) + ) + else: + values[self._odoo_field] = record.id + self.bind_import(map_record.source, values, sync_date, for_create=True) + importer = self.component(usage="direct.record.importer") + binding = importer._create(values) + _logger.debug("%d linked from Backend", binding) + return binding + return self.model + + def _additional_internal_binding_fields(self, external_data): + return {} + + def _get_external_record_domain(self, values): + return [(k, "=", v) for k, v in values.items()] + + def _get_external_record_alt(self, values): + domain = self._get_external_record_domain(values) + adapter = self.component(usage="backend.adapter") + return adapter.search_read(domain) + + def to_binding_from_internal_key(self, relation): + """ + Given an odoo object (not binding object) without binding related + :param relation: odoo object, not a binding and without binding + :return: binding + """ + + ext_alt_id = getattr(self, self._external_alt_field, None) + if not ext_alt_id: + return self.model + + if isinstance(ext_alt_id, str): + ext_alt_id = [ext_alt_id] + + export_mapper = self.component(usage="export.mapper") + mapper_external_data = export_mapper.map_record(relation) + id_fields = mapper_external_data._mapper.get_target_fields( + mapper_external_data, fields=ext_alt_id + ) + if not id_fields: + raise ValidationError( + _("External alternative id '%s' not found in export mapper") + % (ext_alt_id,) + ) + id_values = mapper_external_data.values( + for_create=True, fields=id_fields, binding=self.model + ) + record = self._get_external_record_alt(id_values) + if record: + if len(record) > 1: + raise InvalidDataError( + "More than one external records found. " + "The alternate external id field '%s' is not " + "unique in the backend" % (ext_alt_id,) + ) + record = record[0] + external_id = self.dict2id(record, in_field=False) + binding = self.wrap_record(relation) + if binding: + current_external_id = self.to_external(binding) + if current_external_id != external_id: + raise InvalidDataError( + "Integrity error: The current external_id '%s' " + "should be the same as the one we are trying " + "to assign '%s'" % (current_external_id, external_id) + ) + _logger.debug("%d already binded to Backend", binding) + else: + import_mapper_exists = True + try: + import_mapper = self.component(usage="import.mapper") + mapper_internal_data = import_mapper.map_record(record) + binding_ext_fields = mapper_internal_data._mapper.get_target_fields( + mapper_internal_data, fields=self.model._fields + ) + importer = self.component(usage="direct.record.importer") + importer.run( + external_id, + external_data=record, + external_fields=binding_ext_fields, + ) + binding = self.to_internal(external_id) + except NoComponentError: + import_mapper_exists = False + if not import_mapper_exists: + binding = self.bind_export(record, relation) + binding[self._sync_date_field] = fields.Datetime.now() + if not binding: + raise InvalidDataError( + "The binding with external id '%s' " + "not found and it should be" % external_id + ) + _logger.debug("%d linked to Backend", binding) + return binding + + return self.model + + def unwrap_binding(self, binding): + if isinstance(binding, models.BaseModel): + odoo_object_ids = binding.mapped(lambda x: x[self._odoo_field].id) + else: + odoo_object_ids = [binding] + return self.model.browse(odoo_object_ids) + + +# TODO: naming the methods more intuitively +# TODO: unify both methods, they have a lot of common code +# TODO: extract parts to smaller and common methods reused by the main methods +# TODO: use .new instead of dicts on to_binding_from_internal_key diff --git a/connector_extension/components/binding.py b/connector_extension/components/binding.py new file mode 100644 index 000000000..af410c999 --- /dev/null +++ b/connector_extension/components/binding.py @@ -0,0 +1,101 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +from odoo import api, fields, models + + +class ExternalBinding(models.AbstractModel): + _inherit = "external.binding" + + # by default we consider sync_date as the import one + + @api.model + def import_data(self, backend_record=None): + return self.import_batch(backend_record=backend_record) + + @api.model + def export_data(self, backend_record=None): + """Prepare the batch export records to Channel""" + return self.export_batch(backend_record=backend_record) + + @api.model + def import_batch(self, backend_record, domain=None, delayed=True): + """Prepare the batch import of records from Backend""" + if not domain: + domain = [] + with backend_record.work_on(self._name) as work: + importer = work.component( + usage=delayed and "batch.delayed.importer" or "batch.direct.importer" + ) + return importer.run(domain) + + @api.model + def export_batch(self, backend_record, domain=None, delayed=True): + """Prepare the batch export of records modified on Odoo""" + if not domain: + domain = [] + with backend_record.work_on(self._name) as work: + exporter = work.component( + # usage="batch.direct.exporter" + usage=delayed + and "batch.delayed.exporter" + or "batch.direct.exporter" + ) + return exporter.run(domain=domain) + + @api.model + def import_record(self, backend_record, external_id, sync_date, external_data=None): + """Import record from Backend""" + if not external_data: + external_data = {} + with backend_record.work_on(self._name) as work: + importer = work.component(usage="record.direct.importer") + return importer.run(external_id, sync_date, external_data=external_data) + + @api.model + def export_record(self, backend_record, relation): + """Export Odoo record""" + with backend_record.work_on(self._name) as work: + exporter = work.component(usage="record.direct.exporter") + return exporter.run(relation) + + @api.model + def import_chunk( + self, + backend_record, + domain, + offset, + chunk_size, + delayed=True, + ): + """Prepare the chunk import of records modified on Backend""" + with backend_record.work_on(self._name) as work: + importer = work.component( + usage=delayed and "delayed.chunk.importer" or "direct.chunk.importer" + ) + return importer.run(domain, offset, chunk_size) + + # existing binding synchronization + def resync_import(self): + self.env.user.company_id = self.company_id + for record in self: + with record.backend_id.work_on(record._name) as work: + binder = work.component(usage="binder") + external_id = binder.to_external(record) + func = record.import_record + if record.env.context.get("connector_delay"): + func = func.with_delay + func(record.backend_id, external_id, fields.Datetime.now()) + return True + + def resync_export(self): + for record in self: + with record.backend_id.work_on(record._name) as work: + binder = work.component(usage="binder") + relation = binder.unwrap_binding(record) + func = record.export_record + if record.env.context.get("connector_delay"): + func = func.with_delay + func(record.backend_id, relation) + return True diff --git a/connector_extension/components/exporter.py b/connector_extension/components/exporter.py new file mode 100644 index 000000000..8cbe3db15 --- /dev/null +++ b/connector_extension/components/exporter.py @@ -0,0 +1,279 @@ +# Copyright 2021 Eric Antones +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import logging +from contextlib import contextmanager + +import psycopg2 + +from odoo import _, fields + +from odoo.addons.component.core import AbstractComponent +from odoo.addons.connector.exception import IDMissingInBackend, RetryableJobError + +_logger = logging.getLogger(__name__) + + +class GenericDirectExporter(AbstractComponent): + """Generic Synchronizer for exporting data from Odoo to a backend""" + + _name = "generic.record.direct.exporter" + _inherit = "base.exporter" + _usage = "record.direct.exporter" + + def __init__(self, working_context): + super().__init__(working_context) + self.binding = None + self.external_id = None + + def _should_import(self): + return False + + def _delay_import(self): + """Schedule an import of the record. + + Adapt in the sub-classes when the model is not imported + using ``import_record``. + """ + # force is True because the sync_date will be more recent + # so the import would be skipped + assert self.external_id + self.binding.with_delay().import_record( + self.backend_record, self.external_id, force=True + ) + + def _mapper_options(self): + return {"binding": self.binding} + + def _force_binding_creation(self, relation): + if not self.binding: + self.binding = self.binder.wrap_record(relation, force=True) + + def run(self, relation, internal_fields=None): + """Run the synchronization + + :param binding: binding record to export + """ + now_fmt = fields.Datetime.now() + result = None + # get binding from real record + self.binding = self.binder.wrap_record(relation) + + # if not binding, try to link to existing external record with + # the same alternate key and create/update binding + if not self.binding: + self.binding = ( + self.binder.to_binding_from_internal_key(relation) or self.binding + ) + try: + should_import = self._should_import() + except IDMissingInBackend: + # self.external_id = None + should_import = False + if should_import: + self._delay_import() + + if not self.binding: + internal_fields = None # should be created with all the fields + + if self._has_to_skip(): + result = _("Nothing to export") + + # export the missing linked resources + self._export_dependencies(relation) + + # prevent other jobs to export the same record + # will be released on commit (or rollback) + self._lock(relation) + + map_record = self.mapper.map_record(relation) + + # passing info to the mapper + opts = self._mapper_options() + if self.binding: + values = self._update_data(map_record, fields=internal_fields, **opts) + if values: + external_id = self.binder_for().dict2id(self.binding, in_field=True) + result = self._update(external_id, values) + else: + values = self._create_data(map_record, fields=internal_fields, **opts) + if values: + external_data = self._create(values) + self.binding = self.binder.bind_export(external_data, relation) + if not values: + result = _("Nothing to export") + if not result: + result = _("Record exported with ID %s on Backend.") % "external_id" + self._after_export() + self.binding[self.binder._sync_date_field] = now_fmt + return result + + def _after_export(self): + """Can do several actions after exporting a record on the backend""" + + def _lock(self, record): + """Lock the binding record. + + Lock the binding record so we are sure that only one export + job is running for this record if concurrent jobs have to export the + same record. + + When concurrent jobs try to export the same record, the first one + will lock and proceed, the others will fail to lock and will be + retried later. + + This behavior works also when the export becomes multilevel + with :meth:`_export_dependencies`. Each level will set its own lock + on the binding record it has to export. + + """ + sql = "SELECT id FROM %s WHERE ID = %%s FOR UPDATE NOWAIT" % record._table + try: + self.env.cr.execute(sql, (record.id,), log_exceptions=False) + except psycopg2.OperationalError as e: + _logger.info( + "A concurrent job is already exporting the same " + "record (%s with id %s). Job delayed later.", + self.model._name, + record.id, + ) + raise RetryableJobError( + "A concurrent job is already exporting the same record " + "(%s with id %s). The job will be retried later." + % (self.model._name, record.id) + ) from e + + def _has_to_skip(self): + """Return True if the export can be skipped""" + return False + + @contextmanager + def _retry_unique_violation(self): + """Context manager: catch Unique constraint error and retry the + job later. + + When we execute several jobs workers concurrently, it happens + that 2 jobs are creating the same record at the same time (binding + record created by :meth:`_export_dependency`), resulting in: + + IntegrityError: duplicate key value violates unique + constraint "my_backend_product_product_odoo_uniq" + DETAIL: Key (backend_id, odoo_id)=(1, 4851) already exists. + + In that case, we'll retry the import just later. + + .. warning:: The unique constraint must be created on the + for the same External record. + + """ + try: + yield + except psycopg2.IntegrityError as err: + if err.pgcode == psycopg2.errorcodes.UNIQUE_VIOLATION: + raise RetryableJobError( + "A database error caused the failure of the job:\n" + "%s\n\n" + "Likely due to 2 concurrent jobs wanting to create " + "the same record. The job will be retried later." % err + ) from err + else: + raise + + def _export_dependency( + self, + relation, + binding_model, + component_usage="record.direct.exporter", + always=False, + ): + """ + Export a dependency. The exporter class is a subclass of + ``GenericExporter``. If a more precise class need to be defined, + it can be passed to the ``exporter_class`` keyword argument. + + .. warning:: a commit is done at the end of the export of each + dependency. The reason for that is that we pushed a record + on the backend and we absolutely have to keep its ID. + + So you *must* take care not to modify the Odoo + database during an export, excepted when writing + back the external ID or eventually to store + external data that we have to keep on this side. + + You should call this method only at the beginning + of the exporter synchronization, + in :meth:`~._export_dependencies`. + + :param relation: record to export if not already exported + :type relation: :py:class:`odoo.models.BaseModel` + :param binding_model: name of the binding model for the relation + :type binding_model: str | unicode + :param component_usage: 'usage' to look for to find the Component to + for the export, by default 'record.exporter' + :type exporter: str | unicode + :param binding_field: name of the one2many field on a normal + record that points to the binding record + (default: my_backend_bind_ids). + It is used only when the relation is not + a binding but is a normal record. + :type binding_field: str | unicode + :binding_extra_vals: In case we want to create a new binding + pass extra values for this binding + :type binding_extra_vals: dict + """ + if not relation: + return + + binding = None + if not always: + rel_binder = self.binder_for(binding_model) + binding = rel_binder.wrap_record(relation) + if not binding: + binding = rel_binder.to_binding_from_internal_key(relation) + + if always or not binding: + exporter = self.component(usage=component_usage, model_name=binding_model) + exporter.run(relation) + + def _export_dependencies(self, relation): + """Export the dependencies for the record""" + return + + def _validate_create_data(self, data): + """Check if the values to import are correct + + Pro-actively check before the ``Model.create`` if some fields + are missing or invalid + + Raise `InvalidDataError` + """ + return + + def _validate_update_data(self, data): + """Check if the values to import are correct + + Pro-actively check before the ``Model.update`` if some fields + are missing or invalid + + Raise `InvalidDataError` + """ + return + + def _create_data(self, map_record, fields=None, **kwargs): + """Get the data to pass to :py:meth:`_create`""" + return map_record.values(for_create=True, fields=fields, **kwargs) + + def _create(self, data): + """Create the External record""" + # special check on data before export + self._validate_create_data(data) + return self.backend_adapter.create(data) + + def _update_data(self, map_record, fields=None, **kwargs): + """Get the data to pass to :py:meth:`_update`""" + return map_record.values(fields=fields, **kwargs) + + def _update(self, external_id, data): + """Update an External record""" + # special check on data before export + self._validate_update_data(data) + return self.backend_adapter.write(external_id, data) diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py new file mode 100644 index 000000000..5ee50f6fd --- /dev/null +++ b/connector_extension/components/importer.py @@ -0,0 +1,207 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl) + +import logging +from contextlib import contextmanager + +import psycopg2 + +from odoo import _ + +from odoo.addons.component.core import AbstractComponent +from odoo.addons.connector.exception import IDMissingInBackend +from odoo.addons.queue_job.exception import NothingToDoJob, RetryableJobError + +_logger = logging.getLogger(__name__) + + +class GenericDirectImporter(AbstractComponent): + """Generic Synchronizer for importing data from backend to Odoo""" + + _name = "generic.record.direct.importer" + _inherit = "base.importer" + + _usage = "direct.record.importer" + + @contextmanager + def _retry_unique_violation(self): + """Context manager: catch Unique constraint error and retry the + job later. + + When we execute several jobs workers concurrently, it happens + that 2 jobs are creating the same record at the same time (binding + record created by :meth:`_export_dependency`), resulting in: + + IntegrityError: duplicate key value violates unique + constraint "prestashop_product_template_openerp_uniq" + DETAIL: Key (backend_id, odoo_id)=(1, 4851) already exists. + + In that case, we'll retry the import just later. + + """ + try: + yield + except psycopg2.IntegrityError as e: + if e.pgcode == psycopg2.errorcodes.UNIQUE_VIOLATION: + raise RetryableJobError( + "A database error caused the failure of the job:\n" + "%s\n\n" + "Likely due to 2 concurrent jobs wanting to create " + "the same record. The job will be retried later." % e + ) from e + else: + raise + + def _import_dependency( + self, + external_id, + binding_model, + sync_date, + external_data=None, + importer=None, + adapter=None, + always=False, + ): + """Import a dependency. + + The importer class is a class or subclass of + :class:`Importer`. A specific class can be defined. + + :param external_ids: id or id's of the related bindings to import + :param binding_model: name of the binding model for the relation + :type binding_model: str | unicode + :param importer_component: component to use for import + By default: 'importer' + :type importer_component: Component + :param adapter_component: component to use for access to backend + By default: 'backend.adapter' + :type adapter_component: Component + :param always: if True, the record is updated even if it already + exists, note that it is still skipped if it has + not been modified on Backend since the last + update. When False, it will import it only when + it does not yet exist. + :type always: boolean + """ + if not external_id: + return + + if importer is None: + importer = self.component(usage=self._usage, model_name=binding_model) + + binder = self.binder_for(binding_model) + + if always or not binder.to_internal(external_id): + try: + importer.run(external_id, sync_date, external_data=external_data) + except NothingToDoJob: + _logger.info( + "Dependency import of %s(%s) has been ignored.", + binding_model._name, + external_id, + ) + + def _import_dependencies(self, external_data, sync_date, external_fields=None): + """Import the dependencies for the record + + Import of dependencies can be done manually or by calling + :meth:`_import_dependency` for each dependency. + """ + return + + def _after_import(self, binding): + return + + def _must_skip(self, binding): + """Hook called right after we read the data from the backend. + + If the method returns a message giving a reason for the + skipping, the import will be interrupted and the message + recorded in the job (if the import is called directly by the + job, not by dependencies). + + If it returns None, the import will continue normally. + + :returns: None | str | unicode + """ + return False + + def _mapper_options(self, binding, sync_date): + return {"binding": binding, "sync_date": sync_date} + + def _create(self, values): + """Create the Internal record""" + return self.model.with_context(connector_no_export=True).create(values) + + def run(self, external_id, sync_date, external_data=None, external_fields=None): + if not external_data: + external_data = {} + lock_name = "import({}, {}, {}, {})".format( + self.backend_record._name, + self.backend_record.id, + self.work.model_name, + external_id, + ) + # Keep a lock on this import until the transaction is committed + # The lock is kept since we have detected that the informations + # will be updated into Odoo + self.advisory_lock_or_retry(lock_name, retry_seconds=10) + if not external_data: + # read external data from Backend + external_data = self.backend_adapter.read(external_id) + if not external_data: + raise IDMissingInBackend( + _("Record with external_id '%s' does not exist in Backend") + % (external_id,) + ) + + # import the missing linked resources + self._import_dependencies(external_data, sync_date) + + # map_data + # this one knows how to convert backend data to odoo data + mapper = self.component(usage="import.mapper") + + # convert to odoo data + internal_data = mapper.map_record(external_data) + + # get_binding + # this one knows how to link Baclend/Odoo records + binder = self.component(usage="binder") + + # find if the external id already exists in odoo + binding = binder.to_internal(external_id) + + # if binding not exists, try to link existing internal object + if not binding: + binding = binder.to_binding_from_external_key(internal_data, sync_date) + + # skip binding + skip = self._must_skip(binding) + if skip: + return skip + + # passing info to the mapper + opts = self._mapper_options(binding, sync_date) + + if external_fields != [] or external_fields is None: + # persist data + if binding: + # if exists, we update it + values = internal_data.values(fields=external_fields, **opts) + binder.bind_import(external_data, values, sync_date) + binding.with_context(connector_no_export=True).write(values) + _logger.debug("%d updated from Backend %s", binding, external_id) + else: + # or we create it + values = internal_data.values( + for_create=True, fields=external_fields, **opts + ) + binder.bind_import(external_data, values, sync_date, for_create=True) + self._create(values) + _logger.debug("%d created from Backend %s", binding, external_id) + + # last update + self._after_import(binding) + return True diff --git a/connector_extension/components/mapper.py b/connector_extension/components/mapper.py new file mode 100644 index 000000000..ca388a3b9 --- /dev/null +++ b/connector_extension/components/mapper.py @@ -0,0 +1,287 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import collections +import logging +from itertools import zip_longest + +from odoo import _, fields +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent +from odoo.addons.connector.components.mapper import m2o_to_external + +_logger = logging.getLogger(__name__) + + +class Mapper(AbstractComponent): + _inherit = "base.mapper" + + def _apply_with_options(self, map_record): + """ + Hack to allow having non required children field + """ + assert ( + self.options is not None + ), "options should be defined with '_mapping_options'" + _logger.debug("converting record %s to model %s", map_record.source, self.model) + + fields = self.options.fields + for_create = self.options.for_create + result = {} + for from_attr, to_attr in self.direct: + if isinstance(from_attr, collections.abc.Callable): + attr_name = self._direct_source_field_name(from_attr) + else: + attr_name = from_attr + + if not fields or attr_name in fields: + value = self._map_direct(map_record.source, from_attr, to_attr) + result[to_attr] = value + + for meth, definition in self.map_methods: + mapping_changed_by = definition.changed_by + if not fields or ( + mapping_changed_by and mapping_changed_by.intersection(fields) + ): + if definition.only_create and not for_create: + continue + values = meth(map_record.source) + if not values: + continue + if not isinstance(values, dict): + raise ValueError( + "%s: invalid return value for the " + "mapping method %s" % (values, meth) + ) + result.update(values) + + for from_attr, to_attr, model_name in self.children: + if not fields or from_attr in fields: + if from_attr in map_record.source: + items = self._map_child(map_record, from_attr, to_attr, model_name) + if items: + result[to_attr] = items + return self.finalize(map_record, result) + + def get_target_fields(self, map_record, fields): + if not fields: + return [] + fields = set(fields) + result = {} + for from_attr, to_attr in self.direct: + if isinstance(from_attr, collections.abc.Callable): + from_attr = self._direct_source_field_name(from_attr) + + if to_attr in fields: + if to_attr in result: + raise ValidationError(_("Field '%s' mapping defined twice")) + result[to_attr] = from_attr + + # TODO: create a new decorator to write the field mapping manually + # I think this is not necessary, just use changed_by is precisely for that + for meth, definition in self.map_methods: + mapping_changed_by = definition.changed_by + if definition.changed_by: + if len(definition.changed_by) > 1: + raise ValidationError(_("Changed by can only be one field")) + values = meth(map_record.source) + if len(values) != 1: + raise ValidationError( + _( + "Return values of a mapper must be unique " + "if it has changed by decorator" + ) + ) + from_attr, to_attr = list(mapping_changed_by)[0], list(values.keys())[0] + if to_attr in fields: + if to_attr in result: + raise ValidationError(_("Field '%s' mapping defined twice")) + result[to_attr] = from_attr + for from_attr, to_attr, _model_name in self.children: + if to_attr in fields: + if to_attr in result: + raise ValidationError(_("Field '%s' mapping defined twice")) + result[to_attr] = from_attr + + return list(set(result.values())) + + +class BaseChildMapper(AbstractComponent): + _inherit = "base.map.child" + + def get_all_items(self, mapper, items, parent, to_attr, options): + mapped = [] + for item in items: + map_record = mapper.map_record(item, parent=parent) + if self.skip_item(map_record): + continue + item_values = self.get_item_values(map_record, to_attr, options) + if item_values: + self._child_bind(map_record, item_values) + mapped.append(item_values) + return mapped + + def get_items(self, items, parent, to_attr, options): + mapper = self._child_mapper() + mapped = self.get_all_items(mapper, items, parent, to_attr, options) + mapped = self.classify_items(mapped, to_attr, options) + return self.format_items(mapped) + + def _child_bind(self, map_record, item_values): + raise NotImplementedError + + def classify_items(self, mapped, to_attr, options): + raise NotImplementedError + + +class ImportMapChild(AbstractComponent): + _inherit = "base.map.child.import" + + def _child_bind(self, map_record, item_values): + binder = self.binder_for() + if not binder._is_binding(self.model): + return + external_id = binder.dict2id(map_record.source, in_field=False) + values = { + binder._backend_field: self.backend_record.id, + binder._sync_date_field: fields.Datetime.now(), + **binder.id2dict(external_id, in_field=True), + **binder._additional_internal_binding_fields(map_record.source), + } + if map_record.parent: + binding = binder.to_internal(external_id, unwrap=False) + if not binding: + record = binder._to_record_from_external_key(map_record) + if record: + values.update({binder._odoo_field: record.id}) + item_values.update(values) + + def format_items(self, items_values): + ops = [] + for values in items_values: + _id = values.pop("id", None) + if _id: + if values: + ops.append((1, _id, values)) + else: + ops.append((2, _id, False)) + else: + ops.append((0, False, values)) + return ops + + def _prepare_existing_records(self, records): + return records + + def classify_items(self, mapped, to_attr, options): + def keygen(_id): + return tuple(binder.dict2id(_id)) + + binding = options["binding"] + binder = self.binder_for() + if binding: + existing = {} + for rec in self._prepare_existing_records(options["binding"][to_attr]): + existing.setdefault(keygen(rec), []).append(rec.id) + new = {} + for value in mapped: + new.setdefault(keygen(value), []).append(value) + res = [] + # update ( create,delete ) + for key in set(existing.keys()) & set(new.keys()): + for _id, value in zip_longest(existing[key], new[key]): + # update + if value and _id: + res.append({"id": _id, **value}) + # delete + elif not value: + res.append({"id": _id}) + # create + elif not _id: + res.append(value) + # create + for key in set(new.keys()) - set(existing.keys()): + res += new[key] + # delete + for key in set(existing.keys()) - set(new.keys()): + res += [{"id": x} for x in existing[key]] + return res + return mapped + + +class ExportMapChild(AbstractComponent): + _inherit = "base.map.child.export" + + def _child_bind(self, map_record, item_values): + # TODO: implement this method + raise NotImplementedError + + def classify_items(self, mapped, to_attr, options): + return mapped + + +# TODO: create a fix on OCA repo and remove this class +class ExportMapper(AbstractComponent): + _inherit = "base.export.mapper" + + def _map_direct(self, record, from_attr, to_attr): + """Apply the ``direct`` mappings. + + :param record: record to convert from a source to a target + :param from_attr: name of the source attribute or a callable + :type from_attr: callable | str + :param to_attr: name of the target attribute + :type to_attr: str + """ + if isinstance(from_attr, collections.abc.Callable): + return from_attr(self, record, to_attr) + + value = record[from_attr] + if value is None: # we need to allow fields with value 0 + return False + + # Backward compatibility: when a field is a relation, and a modifier is + # not used, we assume that the relation model is a binding. + # Use an explicit modifier m2o_to_external in the 'direct' mappings to + # change that. + field = self.model._fields[from_attr] + if field.type == "many2one": + mapping_func = m2o_to_external(from_attr) + value = mapping_func(self, record, to_attr) + return value + + def check_external_id(self, external_id, relation): + assert external_id, ( + "Unexpected error on %s:" + "The backend id cannot be obtained." + "At this stage, the backend record should have been already linked via " + "._export_dependencies. " % relation._name + ) + + +class DeleteMapChild(AbstractComponent): + """:py:class:`MapChild` for the Deleters""" + + _name = "base.map.child.deleter" + _inherit = "base.map.child" + _usage = "delete.map.child" + + def _child_mapper(self): + return self.component(usage="import.mapper") + + def format_items(self, items_values): + """Format the values of the items mapped from the child Mappers. + + It can be overridden for instance to add the Odoo + relationships commands ``(6, 0, [IDs])``, ... + + As instance, it can be modified to handle update of existing + items: check if an 'id' has been defined by + :py:meth:`get_item_values` then use the ``(1, ID, {values}``) + command + + :param items_values: list of values for the items to create + :type items_values: list + + """ + return [(0, 0, values) for values in items_values] diff --git a/connector_extension/readme/CONTRIBUTORS.rst b/connector_extension/readme/CONTRIBUTORS.rst new file mode 100644 index 000000000..e468d95a0 --- /dev/null +++ b/connector_extension/readme/CONTRIBUTORS.rst @@ -0,0 +1,4 @@ +* `NuoBiT `__: + + * Kilian Niubo + * Eric Antones diff --git a/connector_extension/readme/DESCRIPTION.rst b/connector_extension/readme/DESCRIPTION.rst new file mode 100644 index 000000000..e5d832363 --- /dev/null +++ b/connector_extension/readme/DESCRIPTION.rst @@ -0,0 +1,10 @@ +The "connector_extension" module is an add-on for the Odoo ERP system that enhances the functionality of the base "connector" module. This extension provides additional features, tools, and integrations, making it easier for developers to create, manage, and maintain connections between Odoo and various third-party systems, APIs, or services. + +The module aims to simplify the connector development process by providing a robust and flexible framework. The "connector_extension" module allows developers to focus on implementing specific business logic and requirements, while the extension handles common tasks. + + +* Extended connector framework: The module extends the base connector framework by providing new classes, methods, and utilities for easier integration with third-party systems. +* Reusable components: Pre-built components for handling common tasks such error handling, and data synchronization. +* Enhanced data mapping: Advanced data mapping capabilities to transform and adapt data between Odoo and external systems seamlessly. +* Scalability and performance improvements: Optimized for handling large datasets and concurrent connections, ensuring smooth and efficient data synchronization. +* Linking with existing records: Enables linking with existing records, using alternate keys, which allows for easier data management and reduced redundancy. diff --git a/connector_extension/static/description/index.html b/connector_extension/static/description/index.html new file mode 100644 index 000000000..ece8db9f9 --- /dev/null +++ b/connector_extension/static/description/index.html @@ -0,0 +1,427 @@ + + + + + + +Connector Extension + + + +
+

Connector Extension

+ + +

Beta License: AGPL-3 nuobit/odoo-addons

+

The “connector_extension” module is an add-on for the Odoo ERP system that enhances the functionality of the base “connector” module. This extension provides additional features, tools, and integrations, making it easier for developers to create, manage, and maintain connections between Odoo and various third-party systems, APIs, or services.

+

The module aims to simplify the connector development process by providing a robust and flexible framework. The “connector_extension” module allows developers to focus on implementing specific business logic and requirements, while the extension handles common tasks.

+
    +
  • Extended connector framework: The module extends the base connector framework by providing new classes, methods, and utilities for easier integration with third-party systems.
  • +
  • Reusable components: Pre-built components for handling common tasks such error handling, and data synchronization.
  • +
  • Enhanced data mapping: Advanced data mapping capabilities to transform and adapt data between Odoo and external systems seamlessly.
  • +
  • Scalability and performance improvements: Optimized for handling large datasets and concurrent connections, ensuring smooth and efficient data synchronization.
  • +
  • Linking with existing records: Enables linking with existing records, using alternate keys, which allows for easier data management and reduced redundancy.
  • +
+

Table of contents

+ +
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us smashing it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • NuoBiT Solutions
  • +
  • S.L.
  • +
+
+
+

Contributors

+ +
+
+

Maintainers

+

This module is part of the nuobit/odoo-addons project on GitHub.

+

You are welcome to contribute.

+
+
+
+ + From bd60b1c4ee25973320bec221315a26b696c11447 Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Mon, 1 May 2023 01:23:37 +0200 Subject: [PATCH 02/68] [FIX] connector_extension: pre-commit, company name --- connector_extension/README.rst | 3 +-- connector_extension/__manifest__.py | 2 +- connector_extension/static/description/index.html | 3 +-- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/connector_extension/README.rst b/connector_extension/README.rst index e3ed5083b..889e249eb 100644 --- a/connector_extension/README.rst +++ b/connector_extension/README.rst @@ -51,8 +51,7 @@ Credits Authors ~~~~~~~ -* NuoBiT Solutions -* S.L. +* NuoBiT Solutions SL Contributors ~~~~~~~~~~~~ diff --git a/connector_extension/__manifest__.py b/connector_extension/__manifest__.py index be2f80bd5..185a257ee 100644 --- a/connector_extension/__manifest__.py +++ b/connector_extension/__manifest__.py @@ -6,7 +6,7 @@ "name": "Connector Extension", "summary": "This module extends the connector module", "version": "16.0.1.0.1", - "author": "NuoBiT Solutions, S.L.", + "author": "NuoBiT Solutions SL", "license": "AGPL-3", "category": "Connector", "website": "https://github.com/nuobit/odoo-addons", diff --git a/connector_extension/static/description/index.html b/connector_extension/static/description/index.html index ece8db9f9..00c512f25 100644 --- a/connector_extension/static/description/index.html +++ b/connector_extension/static/description/index.html @@ -402,8 +402,7 @@

Credits

Authors

    -
  • NuoBiT Solutions
  • -
  • S.L.
  • +
  • NuoBiT Solutions SL
From 82afccee80d7101d34412de5d0d1eb7c49bbceff Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Mon, 1 May 2023 16:41:35 +0200 Subject: [PATCH 03/68] [IMP] connector_extension: added generic get version on check connection --- connector_extension/__init__.py | 2 +- connector_extension/common/backend.py | 44 ---------- connector_extension/components/__init__.py | 2 +- connector_extension/components/adapter.py | 28 +++++++ .../components/adapter_mssql.py | 37 +++++++++ .../components/adapter_mysql.py | 10 +-- connector_extension/components/adapter_sql.py | 44 +++++----- connector_extension/components/binder.py | 27 +++--- connector_extension/models/__init__.py | 2 + .../{common => models/backend}/__init__.py | 0 connector_extension/models/backend/backend.py | 82 +++++++++++++++++++ .../models/binding/__init__.py | 1 + .../{components => models/binding}/binding.py | 0 13 files changed, 187 insertions(+), 92 deletions(-) delete mode 100644 connector_extension/common/backend.py create mode 100644 connector_extension/components/adapter_mssql.py create mode 100644 connector_extension/models/__init__.py rename connector_extension/{common => models/backend}/__init__.py (100%) create mode 100644 connector_extension/models/backend/backend.py create mode 100644 connector_extension/models/binding/__init__.py rename connector_extension/{components => models/binding}/binding.py (100%) diff --git a/connector_extension/__init__.py b/connector_extension/__init__.py index 565f91978..f24d3e242 100644 --- a/connector_extension/__init__.py +++ b/connector_extension/__init__.py @@ -1,2 +1,2 @@ -from . import common from . import components +from . import models diff --git a/connector_extension/common/backend.py b/connector_extension/common/backend.py deleted file mode 100644 index 6f37d0c51..000000000 --- a/connector_extension/common/backend.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright NuoBiT Solutions - Eric Antones -# Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -import logging - -from odoo import api, fields, models - -_logger = logging.getLogger(__name__) - - -# TODO:REVIEW: GET_VERSION -class ConnectorBackend(models.AbstractModel): - _inherit = "connector.backend" - - @api.model - def _select_state(self): - return [ - ("draft", "Draft"), - ("validated", "Validated"), - ] - - def _check_connection(self): - raise NotImplementedError - - def button_check_connection(self): - for rec in self: - rec._check_connection() - rec.state = "validated" - return - - # TODO: REVIEW: Create a template view to be inhereted - state = fields.Selection( - selection="_select_state", - default="draft", - ) - active = fields.Boolean( - default=True, - ) - - def check_connection(self): - self.ensure_one() - with self.work_on("connector.backend") as work: - component = work.component(usage="backend.adapter") - self.version = component.get_version() diff --git a/connector_extension/components/__init__.py b/connector_extension/components/__init__.py index fed07636f..efb61ad64 100644 --- a/connector_extension/components/__init__.py +++ b/connector_extension/components/__init__.py @@ -1,8 +1,8 @@ from . import adapter from . import adapter_sql from . import adapter_mysql +from . import adapter_mssql from . import binder -from . import binding from . import exporter from . import importer from . import mapper diff --git a/connector_extension/components/adapter.py b/connector_extension/components/adapter.py index a61019eb8..9337e9a79 100644 --- a/connector_extension/components/adapter.py +++ b/connector_extension/components/adapter.py @@ -107,6 +107,34 @@ def _filter(self, values, domain=None): # noqa: C901 values_filtered.append(record) return values_filtered + def chunks(self, lst, n): + """Yield successive n-sized chunks from lst.""" + for i in range(0, len(lst), n): + yield lst[i : i + n] + + def _filter_by_hash(self, data): + indexed_data = {x["Hash"]: x for x in data} + odoo_hashes = set( + self.model.search( + [ + ("backend_id", "=", self.backend_record.id), + ] + ).mapped("veloconnect_hash") + ) + changed_hashes = set(indexed_data.keys()) - odoo_hashes + return [indexed_data[x] for x in changed_hashes] + + def _normalize_value(self, value): + if isinstance(value, datetime.datetime): + value = value.strftime(self._datetime_format) + elif isinstance(value, datetime.date): + value = value.strftime(self._date_format) + elif isinstance(value, (int, str, list, tuple, bool)): + pass + else: + raise ValidationError(_("Type '%s' not supported") % type(value)) + return value + def _domain_to_normalized_dict(self, domain): """Convert, if possible, standard Odoo domain to a dictionary. To do so it is necessary to convert all operators to diff --git a/connector_extension/components/adapter_mssql.py b/connector_extension/components/adapter_mssql.py new file mode 100644 index 000000000..95773499c --- /dev/null +++ b/connector_extension/components/adapter_mssql.py @@ -0,0 +1,37 @@ +# Copyright NuoBiT Solutions - Eric Antones +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import logging + +from odoo.addons.component.core import AbstractComponent + +_logger = logging.getLogger(__name__) + + +class MSSQLAdapterCRUD(AbstractComponent): + _name = "base.backend.mssql.adapter.crud" + _inherit = "base.backend.sql.adapter.crud" + + _sql_version = "select @@version" + + # def _execute(self, op, cr, sql, params): + # if not sql: + # raise ValidationError(_("Empty SQL statement")) + # sql_l = sql.split(";") + # if op == "create": + # if len(sql_l) > 2: + # raise ValidationError(_("Unexpected SQL statement")) + # if len(sql_l) == 2: + # if not "last_insert_id()".lower() in sql_l[1].lower(): + # raise ValidationError( + # _("Only last_insert_id() is allowed in insert statement.") + # ) + # else: + # if len(sql_l) != 1: + # raise ValidationError( + # _("Only one query is allowed on non insert SQL statements.") + # ) + # + # res = super()._execute(op, cr, sql_l[0], params) + # if op == "create": + # res = cr.execute(sql_l[1]) + # return res diff --git a/connector_extension/components/adapter_mysql.py b/connector_extension/components/adapter_mysql.py index 1dbdedd7f..8a9586660 100644 --- a/connector_extension/components/adapter_mysql.py +++ b/connector_extension/components/adapter_mysql.py @@ -11,12 +11,11 @@ _logger = logging.getLogger(__name__) -class BackendMySQLAdapterCRUD(AbstractComponent): +class MySQLAdapterCRUD(AbstractComponent): _name = "base.backend.mysql.adapter.crud" _inherit = "base.backend.sql.adapter.crud" - # TODO:REVIEW: GET_VERSION - # _sql_version = "SELECT VERSION();" + _sql_version = "select version()" def _execute(self, op, cr, sql, params): if not sql: @@ -40,8 +39,3 @@ def _execute(self, op, cr, sql, params): if op == "create": res = cr.execute(sql_l[1]) return res - - # TODO:REVIEW: GET_VERSION - # def get_version(self): - # res = self._execute() - # return res[0][0] diff --git a/connector_extension/components/adapter_sql.py b/connector_extension/components/adapter_sql.py index d26efd44d..cc8b6bcbf 100644 --- a/connector_extension/components/adapter_sql.py +++ b/connector_extension/components/adapter_sql.py @@ -15,13 +15,22 @@ _logger = logging.getLogger(__name__) -class BackendSQLAdapterCRUD(AbstractComponent): +class SQLAdapterCRUD(AbstractComponent): _name = "base.backend.sql.adapter.crud" _inherit = "base.backend.adapter.crud" _date_format = "%Y-%m-%d" _datetime_format = "%Y-%m-%dT%H:%M:%SZ" + def get_version(self): + conn = self.conn() + cr = conn.cursor() + cr.execute(self._sql_version) + version = cr.fetchone()[0] + cr.close() + conn.close() + return version + def _escape(self, s): return s.replace("'", "").replace('"', "") @@ -61,24 +70,7 @@ def _exec(self, op, *args, **kwargs): func = getattr(self, "_exec_%s" % op) return func(*args, **kwargs) - # TODO:REVIEW: GET_VERSION - # def _exec_get_version(self): - # sql = self._sql_version - # if self.schema: - # # check if schema exists to avoid injection - # self._check_schema() - # sql = sql % dict(schema=self.schema) - # conn = self.conn() - # cr = conn.cursor() - # self._execute("read", cr, sql) - # res = cr.fetchone() - # cr.close() - # conn.close() - # if not res: - # raise ValidationError("Unexpected error: - # The get_version should have return something") - # return res[0] - + # read/search def _exec_read(self, filters=None, fields=None): if not filters: filters = [] @@ -153,8 +145,6 @@ def _check_uniq(self, data, id_fields): ) uniq.add(id_t) - # exposed methods - def search_read(self, filters=None): """Search records according to some criterias and returns a list of ids @@ -183,6 +173,7 @@ def search(self, filters=None): return res + # read # pylint: disable=W8106 def read(self, _id, attributes=None): """Returns the information of a record @@ -201,6 +192,11 @@ def read(self, _id, attributes=None): ) return res and res[0] or [] + # write + # pylint: disable=W8106 + def write(self, _id, values_d): + return self._exec("write", _id, values_d) + def _check_write_result(self, conn, cr, id_d): count = cr.rowcount # On mysql if record is not modified the rowcount is 0 @@ -219,10 +215,6 @@ def _check_write_result(self, conn, cr, id_d): ) return count - # pylint: disable=W8106 - def write(self, _id, values_d): - return self._exec("write", _id, values_d) - def _exec_write(self, _id, values_d): # pylint: disable=W8106 """Update records on the external system""" _logger.debug( @@ -277,6 +269,7 @@ def _exec_write(self, _id, values_d): # pylint: disable=W8106 return count + # create # pylint: disable=W8106 def create(self, values_d): return self._exec("create", values_d) @@ -367,6 +360,7 @@ def _exec_create(self, values_d): # pylint: disable=W8106 return res[0] + # delete def delete(self, _id): return self._exec("delete", _id) diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index f72c6abc4..0e3380fdd 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -64,19 +64,20 @@ def get_id_fields(self, in_field=True, alt_field=False): fields = self._external_alt_field if alt_field else self._external_field if not isinstance(fields, (tuple, list)): fields = [fields] - fields_l = [] - for f in fields: - if hasattr(self, f): - fields_l.append(getattr(self, f)) - else: - raise ValidationError( - _("Id field %(FIELD)s is not defined in model %(MODEL)s") - % { - "FIELD": f, - "MODEL": self._name, - } - ) - return fields_l + return fields + # fields_l = [] + # for f in fields: + # if hasattr(self, f): + # fields_l.append(getattr(self, f)) + # else: + # raise ValidationError( + # _("Id field %(FIELD)s is not defined in model %(MODEL)s") + # % { + # "FIELD": f, + # "MODEL": self._name, + # } + # ) + # return fields_l def id2dict(self, _id, in_field=True, alt_field=False): """Return a dict with the internal or external fields and their values diff --git a/connector_extension/models/__init__.py b/connector_extension/models/__init__.py new file mode 100644 index 000000000..3f5e44055 --- /dev/null +++ b/connector_extension/models/__init__.py @@ -0,0 +1,2 @@ +from . import backend +from . import binding diff --git a/connector_extension/common/__init__.py b/connector_extension/models/backend/__init__.py similarity index 100% rename from connector_extension/common/__init__.py rename to connector_extension/models/backend/__init__.py diff --git a/connector_extension/models/backend/backend.py b/connector_extension/models/backend/backend.py new file mode 100644 index 000000000..a16030a34 --- /dev/null +++ b/connector_extension/models/backend/backend.py @@ -0,0 +1,82 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +import logging + +import pytz + +from odoo import api, fields, models + +_logger = logging.getLogger(__name__) + +_tzs = [ + (tz, tz) + for tz in sorted( + pytz.all_timezones, key=lambda tz: tz if not tz.startswith("Etc/") else "_" + ) +] + + +def _tz_get(self): + return _tzs + + +class ConnectorBackend(models.AbstractModel): + # _name = "connector.backend.extension" + _inherit = "connector.backend" + _description = "Connector Backend Extension" + + @api.model + def _select_state(self): + return [ + ("draft", "Draft"), + ("validated", "Validated"), + ] + + # TODO: REVIEW: Create a template view to be inherited + state = fields.Selection( + selection="_select_state", + default="draft", + ) + active = fields.Boolean( + default=True, + ) + + version = fields.Text(readonly=True) + + tz = fields.Selection( + _tz_get, + string="Timezone", + required=True, + default=lambda self: self._context.get("tz") or self.env.user.tz or "UTC", + help="This field is used in order to define in which timezone the backend will work.", + ) + + def _check_connection(self): + self.ensure_one() + with self.work_on(self._name) as work: + component = work.component(usage="backend.adapter") + self.version = component.get_version() + + def button_check_connection(self): + for rec in self: + rec._check_connection() + rec.state = "validated" + return + + def button_reset_to_draft(self): + self.ensure_one() + self.write({"state": "draft", "version": False}) + + def tz_to_utc(self, datetime_local_naive): + datetime_local = pytz.timezone(self.tz).localize(datetime_local_naive) + datetime_utc = datetime_local.astimezone(pytz.utc) + datetime_utc_naive = datetime_utc.replace(tzinfo=None) + return datetime_utc_naive + + def tz_to_local(self, datetime_utc_naive): + local_tz = pytz.timezone(self.tz) + datetime_utc = pytz.utc.localize(datetime_utc_naive) + datetime_local = datetime_utc.astimezone(local_tz) + datetime_local_naive = datetime_local.replace(tzinfo=None) + return datetime_local_naive diff --git a/connector_extension/models/binding/__init__.py b/connector_extension/models/binding/__init__.py new file mode 100644 index 000000000..0fec82e8a --- /dev/null +++ b/connector_extension/models/binding/__init__.py @@ -0,0 +1 @@ +from . import binding diff --git a/connector_extension/components/binding.py b/connector_extension/models/binding/binding.py similarity index 100% rename from connector_extension/components/binding.py rename to connector_extension/models/binding/binding.py From e0d1b21e086b0ec4262fa39b109881a9df66567c Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Mon, 15 May 2023 10:05:28 +0200 Subject: [PATCH 04/68] [FIX] connector_extension: id fields method returns the fields itself instead of the values --- connector_extension/components/binder.py | 27 ++++++++++++------------ 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index 0e3380fdd..f72c6abc4 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -64,20 +64,19 @@ def get_id_fields(self, in_field=True, alt_field=False): fields = self._external_alt_field if alt_field else self._external_field if not isinstance(fields, (tuple, list)): fields = [fields] - return fields - # fields_l = [] - # for f in fields: - # if hasattr(self, f): - # fields_l.append(getattr(self, f)) - # else: - # raise ValidationError( - # _("Id field %(FIELD)s is not defined in model %(MODEL)s") - # % { - # "FIELD": f, - # "MODEL": self._name, - # } - # ) - # return fields_l + fields_l = [] + for f in fields: + if hasattr(self, f): + fields_l.append(getattr(self, f)) + else: + raise ValidationError( + _("Id field %(FIELD)s is not defined in model %(MODEL)s") + % { + "FIELD": f, + "MODEL": self._name, + } + ) + return fields_l def id2dict(self, _id, in_field=True, alt_field=False): """Return a dict with the internal or external fields and their values From 498936b0788f6be4a1fc364a9270f030218ef34c Mon Sep 17 00:00:00 2001 From: KNVx Date: Thu, 11 May 2023 09:17:57 +0200 Subject: [PATCH 05/68] [IMP] connector_extension: improvements --- connector_extension/components/__init__.py | 2 + connector_extension/components/adapter.py | 2 + .../components/adapter_woocommerce.py | 82 ++++++++++++++++ .../components/adapter_wordpress.py | 88 +++++++++++++++++ connector_extension/components/binder.py | 79 +++++++++++---- connector_extension/components/exporter.py | 97 ++++++++++--------- connector_extension/components/importer.py | 9 +- connector_extension/components/mapper.py | 32 +++++- connector_extension/models/backend/backend.py | 6 ++ connector_extension/models/binding/binding.py | 14 +-- 10 files changed, 335 insertions(+), 76 deletions(-) create mode 100644 connector_extension/components/adapter_woocommerce.py create mode 100644 connector_extension/components/adapter_wordpress.py diff --git a/connector_extension/components/__init__.py b/connector_extension/components/__init__.py index efb61ad64..cf55a8d8d 100644 --- a/connector_extension/components/__init__.py +++ b/connector_extension/components/__init__.py @@ -2,6 +2,8 @@ from . import adapter_sql from . import adapter_mysql from . import adapter_mssql +from . import adapter_woocommerce +from . import adapter_wordpress from . import binder from . import exporter from . import importer diff --git a/connector_extension/components/adapter.py b/connector_extension/components/adapter.py index 9337e9a79..6916b4402 100644 --- a/connector_extension/components/adapter.py +++ b/connector_extension/components/adapter.py @@ -131,6 +131,8 @@ def _normalize_value(self, value): value = value.strftime(self._date_format) elif isinstance(value, (int, str, list, tuple, bool)): pass + elif value is None: + pass else: raise ValidationError(_("Type '%s' not supported") % type(value)) return value diff --git a/connector_extension/components/adapter_woocommerce.py b/connector_extension/components/adapter_woocommerce.py new file mode 100644 index 000000000..142b1a715 --- /dev/null +++ b/connector_extension/components/adapter_woocommerce.py @@ -0,0 +1,82 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +import logging + +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent + +_logger = logging.getLogger(__name__) + + +class WooCommerceAdapterCRUD(AbstractComponent): + _name = "base.backend.woocommerce.adapter.crud" + _inherit = "base.backend.adapter.crud" + + # TODO: manage retryable_errors + def _exec(self, op, resource, *args, **kwargs): + func = getattr(self, "_exec_%s" % op) + return func(resource, *args, **kwargs) + + def get_total_items(self, resource, domain=None): + filters_values = self._get_filters_values() + real_domain, common_domain = self._extract_domain_clauses( + domain, filters_values + ) + res = self.wcapi.get( + resource, + params=self._domain_to_normalized_dict(real_domain), + ) + total_items = int(res.headers.get("X-WP-Total")) + return total_items + + def _get_filters_values(self): + return ["per_page", "page"] + + def _exec_get(self, resource, *args, **kwargs): + domain = [] + if "domain" in kwargs: + domain = kwargs.pop("domain") + filters_values = self._get_filters_values() + real_domain, common_domain = self._extract_domain_clauses( + domain, filters_values + ) + res = self.wcapi.get( + resource, + *args, + **kwargs, + params=self._domain_to_normalized_dict(real_domain), + ) + res = res.json() + if isinstance(res, dict): + res = [res] + res = self._filter(res, common_domain) + return res + + def _exec_post(self, resource, *args, **kwargs): + res = self.wcapi.post(resource, *args, **kwargs) + if res.status_code in [400, 401, 403, 404, 500]: + raise ValidationError(res.json().get("message")) + try: + res = res.json() + except Exception as e: + raise ValidationError(e) + return res + + def _exec_put(self, resource, *args, **kwargs): + return self.wcapi.put(resource, *args, **kwargs) + + def _exec_delete(self, resource, *args, **kwargs): + raise NotImplementedError() + + def _exec_options(self, resource, *args, **kwargs): + raise NotImplementedError() + + def get_version(self): + system_status = self._exec("get", "system_status") + version = False + if system_status: + version = system_status.get("environment").get("version") + return version diff --git a/connector_extension/components/adapter_wordpress.py b/connector_extension/components/adapter_wordpress.py new file mode 100644 index 000000000..580e05c85 --- /dev/null +++ b/connector_extension/components/adapter_wordpress.py @@ -0,0 +1,88 @@ +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). + +import logging + +import requests + +from odoo import _ +from odoo.exceptions import ValidationError + +from odoo.addons.component.core import AbstractComponent + +_logger = logging.getLogger(__name__) + + +class WordpressAdapterCRUD(AbstractComponent): + _name = "base.backend.wordpress.adapter.crud" + _inherit = "base.backend.adapter.crud" + + # TODO: manage retryable_errors + def _exec(self, op, resource, *args, **kwargs): + func = getattr(self, "_exec_%s" % op) + return func(resource, *args, **kwargs) + + def _exec_get(self, resource, *args, **kwargs): + url = self.backend_record.url + "/wp-json/wp/v2/" + resource + res = requests.get( + url=url, + auth=( + self.backend_record.consumer_key, + self.backend_record.consumer_secret, + ), + ) + if res.status_code in [400, 401, 403, 404, 500]: + raise ValidationError(res.json().get("message")) + try: + res = res.json() + except Exception as e: + raise ValidationError(e) + return res + + def _exec_post(self, resource, *args, **kwargs): + # TODO: this auth method is working like this because if we call + # the export from the woocommerce backend, + # the credentials are in the wordpress backend. Refactor + auth = False + if "wordpress_backend_id" in self.backend_record: + backend = self.backend_record.wordpress_backend_id + auth = (backend.consumer_key, backend.consumer_secret) + data_aux = kwargs.pop("data", {}) + headers = data_aux.pop("headers", {}) + data = data_aux.pop("data", {}) + checksum = False + if data_aux.get("checksum"): + checksum = data_aux.pop("checksum") + url = self.backend_record.url + "/wp-json/wp/v2/" + resource + res = requests.post( + url=url, + headers=headers, + data=data, + auth=auth + or (self.backend_record.consumer_key, self.backend_record.consumer_secret), + ) + if res.status_code in [400, 401, 403, 404, 500]: + raise ValidationError(res.json().get("message")) + try: + res = res.json() + if checksum: + res["checksum"] = checksum + except Exception as e: + raise ValidationError(e) + return res + + def _exec_put(self, resource, *args, **kwargs): + return self.wpapi.put(resource, *args, **kwargs) + + def _exec_delete(self, resource, *args, **kwargs): + raise NotImplementedError() + + def _exec_options(self, resource, *args, **kwargs): + raise NotImplementedError() + + def get_version(self): + settings = self._exec("get", "settings") + if settings.get("title"): + return "Wordpress '%s' connected" % settings.get("title") + else: + raise ValidationError(_("Wordpress not connected")) diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index f72c6abc4..b955620a5 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -30,8 +30,7 @@ class BinderComposite(AbstractComponent): """The same as Binder but allowing composite external keys""" - # TODO: Better name instead of composite - _name = "base.binder.composite" + _name = "generic.binder" _inherit = "base.binder" _internal_field = "internal_id" @@ -67,7 +66,11 @@ def get_id_fields(self, in_field=True, alt_field=False): fields_l = [] for f in fields: if hasattr(self, f): - fields_l.append(getattr(self, f)) + fields = getattr(self, f) + if isinstance(fields, (tuple, list)): + fields_l.extend(fields) + else: + fields_l.append(fields) else: raise ValidationError( _("Id field %(FIELD)s is not defined in model %(MODEL)s") @@ -84,15 +87,25 @@ def id2dict(self, _id, in_field=True, alt_field=False): :param in_field: with True value, _internal_field defined in binder are used. With this parameter False, _external_field will be used. """ - if not isinstance(_id, (tuple, list)): - _id = [_id] + if _id: + fields = self.get_id_fields(in_field=in_field, alt_field=alt_field) + return dict(zip(fields, _id)) else: - if len(_id) == 1: - raise ValidationError( - _("If the id has only 1 element, it shouldn't be a list ") - ) - fields = self.get_id_fields(in_field=in_field, alt_field=alt_field) - return dict(zip(fields, _id)) + return None + + # This Function returns a dict with the external ids from a "dirty" dict + def dict2id2dict(self, _dict, in_field=True, alt_field=False): + """Giving a dict, return the a dict with internal or external ids + :param _dict: Dict to extract internal or external fields + :param in_field: with True value, _internal_field defined in binder are used. + With this parameter False, _external_field will be used. + :param alt_field: with True value, alternative id fields defined in binder are used. + """ + return self.id2dict( + self.dict2id(_dict, in_field=in_field, alt_field=alt_field), + in_field=in_field, + alt_field=alt_field, + ) def dict2id(self, _dict, in_field=True, alt_field=False): """Giving a dict, return the values of the internal or external fields @@ -104,15 +117,18 @@ def dict2id(self, _dict, in_field=True, alt_field=False): res = [] for f in fields: f_splitted = f.split(".") - val = _dict[f_splitted[0]] + if f_splitted[0] in _dict or _dict.get(f_splitted[0]) is not None: + val = _dict[f_splitted[0]] + else: + return None if len(f_splitted) == 2: if isinstance(val, models.BaseModel): val = val[f_splitted[1]] if len(f_splitted) > 2: raise NotImplementedError(_("Multiple dot notation is not supported")) res.append(val) - if len(res) == 1: - return res[0] + # if len(res) == 1: + # return res[0] return res def is_complete_id(self, _id, in_field=True): @@ -322,6 +338,14 @@ def bind_export(self, external_data, relation): def _additional_external_binding_fields(self, external_data): return {} + def is_id_null(self, _id): + if not isinstance(_id, (list, tuple)): + _id = [_id] + for value in _id: + if value is None: + return True + return False + def _get_internal_record_domain(self, values): return [(k, "=", v) for k, v in values.items()] @@ -439,7 +463,7 @@ def to_binding_from_external_key(self, map_record, sync_date): else: values[self._odoo_field] = record.id self.bind_import(map_record.source, values, sync_date, for_create=True) - importer = self.component(usage="direct.record.importer") + importer = self.component(usage="record.direct.importer") binding = importer._create(values) _logger.debug("%d linked from Backend", binding) return binding @@ -481,9 +505,16 @@ def to_binding_from_internal_key(self, relation): % (ext_alt_id,) ) id_values = mapper_external_data.values( - for_create=True, fields=id_fields, binding=self.model + for_create=True, + fields=id_fields, + binding=self.model, + ignore_required_fields=True, ) record = self._get_external_record_alt(id_values) + # TODO: check if we can put this in a hook + external_alt_id = self.dict2id(id_values, in_field=False, alt_field=True) + if self.is_id_null(external_alt_id): + return self.model if record: if len(record) > 1: raise InvalidDataError( @@ -511,7 +542,7 @@ def to_binding_from_internal_key(self, relation): binding_ext_fields = mapper_internal_data._mapper.get_target_fields( mapper_internal_data, fields=self.model._fields ) - importer = self.component(usage="direct.record.importer") + importer = self.component(usage="record.direct.importer") importer.run( external_id, external_data=record, @@ -540,6 +571,20 @@ def unwrap_binding(self, binding): odoo_object_ids = [binding] return self.model.browse(odoo_object_ids) + def check_external_id(self, external_id, relation): + assert external_id, ( + "Unexpected error on %s:" + "The backend id cannot be obtained." + "At this stage, the backend record should have been already linked via " + "._export_dependencies. " % relation._name + ) + + def get_external_dict_ids(self, relation, check_external_id=True): + external_id = self.to_external(relation, wrap=False) + if check_external_id: + self.check_external_id(external_id, relation) + return self.id2dict(external_id, in_field=False) + # TODO: naming the methods more intuitively # TODO: unify both methods, they have a lot of common code diff --git a/connector_extension/components/exporter.py b/connector_extension/components/exporter.py index 8cbe3db15..6b8daa26d 100644 --- a/connector_extension/components/exporter.py +++ b/connector_extension/components/exporter.py @@ -8,7 +8,7 @@ from odoo import _, fields from odoo.addons.component.core import AbstractComponent -from odoo.addons.connector.exception import IDMissingInBackend, RetryableJobError +from odoo.addons.connector.exception import RetryableJobError _logger = logging.getLogger(__name__) @@ -18,35 +18,36 @@ class GenericDirectExporter(AbstractComponent): _name = "generic.record.direct.exporter" _inherit = "base.exporter" + _usage = "record.direct.exporter" - def __init__(self, working_context): - super().__init__(working_context) - self.binding = None - self.external_id = None + # def __init__(self, working_context): + # super().__init__(working_context) + # self.binding = None + # self.external_id = None - def _should_import(self): + def _should_import(self, binding): return False - def _delay_import(self): - """Schedule an import of the record. - - Adapt in the sub-classes when the model is not imported - using ``import_record``. - """ - # force is True because the sync_date will be more recent - # so the import would be skipped - assert self.external_id - self.binding.with_delay().import_record( - self.backend_record, self.external_id, force=True - ) - - def _mapper_options(self): - return {"binding": self.binding} - - def _force_binding_creation(self, relation): - if not self.binding: - self.binding = self.binder.wrap_record(relation, force=True) + # def _delay_import(self, binding): + # """Schedule an import of the record. + # + # Adapt in the sub-classes when the model is not imported + # using ``import_record``. + # """ + # # force is True because the sync_date will be more recent + # # so the import would be skipped + # assert self.external_id + # binding.with_delay().import_record( + # self.backend_record, self.external_id, force=True + # ) + + def _mapper_options(self, binding): + return {"binding": binding} + + # def _force_binding_creation(self, relation): + # if not self.binding: + # self.binding = self.binder.wrap_record(relation, force=True) def run(self, relation, internal_fields=None): """Run the synchronization @@ -56,29 +57,29 @@ def run(self, relation, internal_fields=None): now_fmt = fields.Datetime.now() result = None # get binding from real record - self.binding = self.binder.wrap_record(relation) + binding = self.binder_for().wrap_record(relation) # if not binding, try to link to existing external record with # the same alternate key and create/update binding - if not self.binding: - self.binding = ( - self.binder.to_binding_from_internal_key(relation) or self.binding + if not binding: + binding = ( + self.binder_for().to_binding_from_internal_key(relation) or binding ) - try: - should_import = self._should_import() - except IDMissingInBackend: - # self.external_id = None - should_import = False - if should_import: - self._delay_import() - - if not self.binding: + # try: + # should_import = self._should_import(binding) + # except IDMissingInBackend: + # # self.external_id = None + # should_import = False + # if should_import: + # self._delay_import(binding) + + if not binding: internal_fields = None # should be created with all the fields + # TODO: pongo el relation porque si no tiene binding no podemos hacer comprobaciones + if self._has_to_skip(binding, relation): + return _("Nothing to export") - if self._has_to_skip(): - result = _("Nothing to export") - - # export the missing linked resources + # export the missing linked resources self._export_dependencies(relation) # prevent other jobs to export the same record @@ -88,23 +89,23 @@ def run(self, relation, internal_fields=None): map_record = self.mapper.map_record(relation) # passing info to the mapper - opts = self._mapper_options() - if self.binding: + opts = self._mapper_options(binding) + if binding: values = self._update_data(map_record, fields=internal_fields, **opts) if values: - external_id = self.binder_for().dict2id(self.binding, in_field=True) + external_id = self.binder_for().dict2id(binding, in_field=True) result = self._update(external_id, values) else: values = self._create_data(map_record, fields=internal_fields, **opts) if values: external_data = self._create(values) - self.binding = self.binder.bind_export(external_data, relation) + binding = self.binder_for().bind_export(external_data, relation) if not values: result = _("Nothing to export") if not result: result = _("Record exported with ID %s on Backend.") % "external_id" self._after_export() - self.binding[self.binder._sync_date_field] = now_fmt + binding[self.binder_for()._sync_date_field] = now_fmt return result def _after_export(self): @@ -142,7 +143,7 @@ def _lock(self, record): % (self.model._name, record.id) ) from e - def _has_to_skip(self): + def _has_to_skip(self, binding, relation): """Return True if the export can be skipped""" return False diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py index 5ee50f6fd..defd33702 100644 --- a/connector_extension/components/importer.py +++ b/connector_extension/components/importer.py @@ -22,7 +22,7 @@ class GenericDirectImporter(AbstractComponent): _name = "generic.record.direct.importer" _inherit = "base.importer" - _usage = "direct.record.importer" + _usage = "record.direct.importer" @contextmanager def _retry_unique_violation(self): @@ -57,7 +57,7 @@ def _import_dependency( self, external_id, binding_model, - sync_date, + # sync_date, external_data=None, importer=None, adapter=None, @@ -157,7 +157,10 @@ def run(self, external_id, sync_date, external_data=None, external_fields=None): ) # import the missing linked resources - self._import_dependencies(external_data, sync_date) + self._import_dependencies( + external_data, + # sync_date + ) # map_data # this one knows how to convert backend data to odoo data diff --git a/connector_extension/components/mapper.py b/connector_extension/components/mapper.py index ca388a3b9..2425ee55b 100644 --- a/connector_extension/components/mapper.py +++ b/connector_extension/components/mapper.py @@ -14,6 +14,28 @@ _logger = logging.getLogger(__name__) +def required(*args): + """Decorator, declare that a method is a required field. + + It is then used by the :py:class:`Mapper` to validate the records. + + Usage:: + + @required + def any(self, record): + return {'output_field': record['input_field']} + + """ + + def required_mapping(func): + func.required = args + return func + + return required_mapping + # func.is_required = True + # return func + + class Mapper(AbstractComponent): _inherit = "base.mapper" @@ -54,6 +76,12 @@ def _apply_with_options(self, map_record): "%s: invalid return value for the " "mapping method %s" % (values, meth) ) + if not self.options.get("ignore_required_fields"): + for field_required in getattr(meth, "required", []): + if field_required not in values or not values[field_required]: + raise ValidationError( + _("Missing required field '%s'") % field_required + ) result.update(values) for from_attr, to_attr, model_name in self.children: @@ -103,7 +131,6 @@ def get_target_fields(self, map_record, fields): if to_attr in result: raise ValidationError(_("Field '%s' mapping defined twice")) result[to_attr] = from_attr - return list(set(result.values())) @@ -214,7 +241,8 @@ class ExportMapChild(AbstractComponent): def _child_bind(self, map_record, item_values): # TODO: implement this method - raise NotImplementedError + return + # raise NotImplementedError def classify_items(self, mapped, to_attr, options): return mapped diff --git a/connector_extension/models/backend/backend.py b/connector_extension/models/backend/backend.py index a16030a34..31c185d6a 100644 --- a/connector_extension/models/backend/backend.py +++ b/connector_extension/models/backend/backend.py @@ -52,6 +52,12 @@ def _select_state(self): help="This field is used in order to define in which timezone the backend will work.", ) + chunk_size = fields.Integer( + string="Chunk Size", + default=-1, + help="This field is used in order to define the chunk size for the backend.", + ) + def _check_connection(self): self.ensure_one() with self.work_on(self._name) as work: diff --git a/connector_extension/models/binding/binding.py b/connector_extension/models/binding/binding.py index af410c999..f539cd40d 100644 --- a/connector_extension/models/binding/binding.py +++ b/connector_extension/models/binding/binding.py @@ -26,7 +26,8 @@ def import_batch(self, backend_record, domain=None, delayed=True): domain = [] with backend_record.work_on(self._name) as work: importer = work.component( - usage=delayed and "batch.delayed.importer" or "batch.direct.importer" + usage="batch.direct.importer" + # usage=delayed and "batch.delayed.importer" or "batch.direct.importer" ) return importer.run(domain) @@ -37,10 +38,10 @@ def export_batch(self, backend_record, domain=None, delayed=True): domain = [] with backend_record.work_on(self._name) as work: exporter = work.component( - # usage="batch.direct.exporter" - usage=delayed - and "batch.delayed.exporter" - or "batch.direct.exporter" + usage="batch.direct.exporter" + # usage=delayed + # and "batch.delayed.exporter" + # or "batch.direct.exporter" ) return exporter.run(domain=domain) @@ -72,7 +73,8 @@ def import_chunk( """Prepare the chunk import of records modified on Backend""" with backend_record.work_on(self._name) as work: importer = work.component( - usage=delayed and "delayed.chunk.importer" or "direct.chunk.importer" + # usage=delayed and "delayed.chunk.importer" or "direct.chunk.importer" + "chunk.direct.importer" ) return importer.run(domain, offset, chunk_size) From 59dd9e2fbfa5aebf8869da0fa3c0a208e9216512 Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Tue, 20 Jun 2023 14:42:47 +0200 Subject: [PATCH 06/68] [IMP] connector_extension: split into specific connectors --- connector_extension/__manifest__.py | 7 ------- connector_extension/components/__init__.py | 5 ----- connector_extension/components/importer.py | 2 +- 3 files changed, 1 insertion(+), 13 deletions(-) diff --git a/connector_extension/__manifest__.py b/connector_extension/__manifest__.py index 185a257ee..56ed024fd 100644 --- a/connector_extension/__manifest__.py +++ b/connector_extension/__manifest__.py @@ -10,12 +10,5 @@ "license": "AGPL-3", "category": "Connector", "website": "https://github.com/nuobit/odoo-addons", - "external_dependencies": { - "python": [ - "requests", - "mysql-connector-python", - ], - }, "depends": ["connector"], - "data": [], } diff --git a/connector_extension/components/__init__.py b/connector_extension/components/__init__.py index cf55a8d8d..bd5b1ccd6 100644 --- a/connector_extension/components/__init__.py +++ b/connector_extension/components/__init__.py @@ -1,9 +1,4 @@ from . import adapter -from . import adapter_sql -from . import adapter_mysql -from . import adapter_mssql -from . import adapter_woocommerce -from . import adapter_wordpress from . import binder from . import exporter from . import importer diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py index defd33702..b726627e8 100644 --- a/connector_extension/components/importer.py +++ b/connector_extension/components/importer.py @@ -57,7 +57,7 @@ def _import_dependency( self, external_id, binding_model, - # sync_date, + sync_date, external_data=None, importer=None, adapter=None, From d1de5ecf814a66722849e42641bf211fb71bdf3e Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Tue, 20 Jun 2023 14:43:27 +0200 Subject: [PATCH 07/68] [ADD] connector_extension_sql: new module --- connector_extension/components/adapter_sql.py | 404 ------------------ 1 file changed, 404 deletions(-) delete mode 100644 connector_extension/components/adapter_sql.py diff --git a/connector_extension/components/adapter_sql.py b/connector_extension/components/adapter_sql.py deleted file mode 100644 index cc8b6bcbf..000000000 --- a/connector_extension/components/adapter_sql.py +++ /dev/null @@ -1,404 +0,0 @@ -# Copyright NuoBiT Solutions - Eric Antones -# Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -import datetime -import logging -import random - -import mysql.connector as mysql # pylint: disable=W7936 - -from odoo import _ -from odoo.exceptions import ValidationError - -from odoo.addons.component.core import AbstractComponent - -_logger = logging.getLogger(__name__) - - -class SQLAdapterCRUD(AbstractComponent): - _name = "base.backend.sql.adapter.crud" - _inherit = "base.backend.adapter.crud" - - _date_format = "%Y-%m-%d" - _datetime_format = "%Y-%m-%dT%H:%M:%SZ" - - def get_version(self): - conn = self.conn() - cr = conn.cursor() - cr.execute(self._sql_version) - version = cr.fetchone()[0] - cr.close() - conn.close() - return version - - def _escape(self, s): - return s.replace("'", "").replace('"', "") - - def _check_schema(self): - conn = self.conn() - cr = conn.cursor() - # self._execute(cr, sql, params) - # TODO: REVIEW Get the operation (read) automatically from def _exec - self._execute("read", cr, self._sql_schema, (self.schema,)) - # cr.execute(self._sql_schema, (self.schema,)) - headers = [desc[0] for desc in cr.description] - res = [] - for row in cr: - res.append(dict(zip(headers, row))) - cr.close() - conn.close() - # schema_exists = self._exec_sql(self._sql_schema, (self.schema,)) - if not res: - raise mysql.InternalError("The schema %s does not exist" % self.schema) - - def _convert_dict(self, data, to_backend=True): - if not isinstance(data, dict): - raise ValidationError(_("Expected a dictionary, found %s") % data) - for k, v in data.items(): - if isinstance(v, datetime.datetime): - if to_backend: - func = self.backend_record.tz_to_local - else: - func = self.backend_record.tz_to_utc - data[k] = func(v) - return data - - def _execute(self, op, cr, sql, params=None): - return cr.execute(sql, params=params) - - def _exec(self, op, *args, **kwargs): - func = getattr(self, "_exec_%s" % op) - return func(*args, **kwargs) - - # read/search - def _exec_read(self, filters=None, fields=None): - if not filters: - filters = [] - sql = self._sql_read - if self.schema: - # check if schema exists to avoid injection - self._check_schema() - sql = sql % dict(schema=self.schema) - - values = [] - if filters or fields: - # TODO: Is it really necessary? - sql_l = ["with t as (%s)" % sql] - fields_l = fields or ["*"] - if fields: - if self._id: - for f in self._id: - if f not in fields_l: - fields_l.append(f) - sql_l.append("select %s from t" % (", ".join(fields_l),)) - - if filters: - where = [] - for k, operator, v in filters: - if v is None: - if operator == "=": - operator = "is" - elif operator == "!=": - operator = "is not" - else: - raise Exception( - "Operator '%s' is not implemented on NULL values" - % operator - ) - where.append("%s %s %%s" % (k, operator)) - values.append(v) - sql_l.append("where %s" % (" and ".join(where),)) - - sql = " ".join(sql_l) - - # res = self._exec_sql(sql, tuple(values)) - conn = self.conn() - cr = conn.cursor() - self._execute("read", cr, sql, tuple(values)) - # cr.execute(sql, tuple(values)) - headers = [desc[0] for desc in cr.description] - res = [] - for row in cr: - res.append(dict(zip(headers, row))) - cr.close() - conn.close() - - filter_keys_s = {e[0] for e in filters} - # TODO: Modified with getattr - id_fields = self.binder_for().get_id_fields(in_field=False) - if id_fields and set(id_fields).issubset(filter_keys_s): - self._check_uniq(res, id_fields) - - return res - - def _check_uniq(self, data, id_fields): - uniq = set() - for rec in data: - id_t = tuple([rec[f] for f in id_fields]) - if id_t in uniq: - raise ValidationError( - _("Unexpected error: ID duplicated: %(ID_FIELDS)s - %(ID_T)s") - % { - "ID_FIELDS": id_fields, - "ID_T": id_t, - } - ) - uniq.add(id_t) - - def search_read(self, filters=None): - """Search records according to some criterias - and returns a list of ids - - :rtype: list - """ - _logger.debug("method search_read, sql %s, filters %s", self._sql_read, filters) - if not filters: - filters = [] - res = self._exec("read", filters=filters) - - return res - - def search(self, filters=None): - """Search records according to some criterias - and returns a list of ids - - :rtype: list - """ - _logger.debug("method search, sql %s, filters %s", self._sql_read, filters) - if not filters: - filters = [] - res = self.search_read(filters=filters) - - res = [tuple([x[f] for f in self._id]) for x in res] - - return res - - # read - # pylint: disable=W8106 - def read(self, _id, attributes=None): - """Returns the information of a record - - :rtype: dict - """ - _logger.debug( - "method read, sql %s id %s, attributes %s", self._sql_read, _id, attributes - ) - id_list = list(self.binder_for().id2dict(_id, in_field=False).items()) - filters = [(key, "=", value) for key, value in id_list] - res = self._exec("read", filters=filters) - if len(res) > 1: - raise mysql.IntegrityError( - "Unexpected error: Returned more the one rows:\n%s" % ("\n".join(res),) - ) - return res and res[0] or [] - - # write - # pylint: disable=W8106 - def write(self, _id, values_d): - return self._exec("write", _id, values_d) - - def _check_write_result(self, conn, cr, id_d): - count = cr.rowcount - # On mysql if record is not modified the rowcount is 0 - # if count == 0: - # raise Exception( - # _( - # "Impossible to update external record with ID '%s': " - # "Register not found on Backend" - # ) - # % (id_d,) - # ) - if count > 1: - conn.rollback() - raise mysql.IntegrityError( - "Unexpected error: Returned more the one row with ID: %s" % (id_d,) - ) - return count - - def _exec_write(self, _id, values_d): # pylint: disable=W8106 - """Update records on the external system""" - _logger.debug( - "method write, sql %s id %s, values %s", self._sql_update, _id, values_d - ) - params_dict = {} - if not values_d: - return 0 - if self.schema: - # check if schema exists to avoid injection - self._check_schema() - params_dict["schema"] = self.schema - - # get id fieldnames and values - id_d = self.binder_for().id2dict(_id, in_field=False) - # fix same field on set and on where, change set fields - qset_map_d = {} - for k, v in values_d.items(): - if k in id_d: - while True: - k9 = "%s%i" % (k, random.randint(0, 999)) - if k9 not in values_d and k9 not in id_d: - qset_map_d[k] = (k9, v) - break - else: - qset_map_d[k] = (k, v) - - # get the set data - qset_l = [] - for k, (k9, _v) in qset_map_d.items(): - qset_l.append("%(field)s = %%(%(field9)s)s" % dict(field=k, field9=k9)) - qset = "%s" % (", ".join(qset_l),) - params_dict["qset"] = qset - - # prepare the sql with base strucrture - sql = self._sql_update % params_dict - - # prepare params - params = dict(id_d) - for k9, v in qset_map_d.values(): - params[k9] = v - params = self._convert_dict(params, to_backend=True) - - conn = self.conn() - cr = conn.cursor() - self._execute("write", cr, sql, params) - # cr.execute(sql, params) # pylint: disable=E8103 - count = self._check_write_result(conn, cr, id_d) - conn.commit() - cr.close() - conn.close() - - return count - - # create - # pylint: disable=W8106 - def create(self, values_d): - return self._exec("create", values_d) - - def _exec_create(self, values_d): # pylint: disable=W8106 - """Create a record on the external system""" - _logger.debug("method create, model %s, attributes %s", self._name, values_d) - - params_dict = {} - if not values_d: - return 0 - if self.schema: - # check if schema exists to avoid injection - self._check_schema() - params_dict["schema"] = self.schema - - # build the sql parts - fields, params, phvalues = [], [], [] - for k, v in values_d.items(): - fields.append(k) - params.append(v) - if v is None or isinstance(v, (str, datetime.date, datetime.datetime)): - phvalues.append("%s") - elif isinstance(v, (int, float)): - phvalues.append("%s") - else: - raise NotImplementedError("Type %s" % type(v)) - - # build retvalues - id_list = list(self.binder_for().id2dict(id, in_field=False).keys()) - retvalues = id_list - params_dict["fields"] = ", ".join(fields) - params_dict["phvalues"] = ", ".join(phvalues) - params_dict["retvalues"] = ", ".join(retvalues) - - # prepare the sql with base structure - sql = self._sql_insert % dict(params_dict) - - # executem la insercio - res = [] - try: - conn = self.conn() - cr = conn.cursor() - # self._execute(cr, sql, params) - self._execute("create", cr, sql, tuple(params)) - # cr.execute(sql, tuple(params)) - headers = [desc[0] for desc in cr.description] - for row in cr: - res.append(dict(zip(headers, row))) - conn.commit() - cr.close() - conn.close() - - # res = self._exec_sql(sql, tuple(params), commit=True) - except mysql.IntegrityError as e: - # Workaround: Because of Microsoft SQL Server - # removes the spaces on varchars on comparisions - # where the varchar belongs to a PK or UK. - # This produces a no existent IntegrityViolation, - # so we need to make user aware of that in order to solve the issue. - if e.args[0] == 2627: - raise ValidationError( - _( - "%s\nThis can be caused by a Microsoft SQL Server " - "missbehaviour where a field belonging to a PK or " - "UK cannot have trailing spaces." - "If it has any then a fake IntegrityViolation can be thrown. " - "Please check that there's no other " - "record on the database with the same key " - "fields but with/without trailing spaces, " - "then fix it and try again." - ) - % (e,) - ) from e - - raise - - if not res: - raise Exception(_("Unexpected!! Nothing created: %s") % (values_d,)) - elif len(res) > 1: - raise Exception( - "Unexpected!!: Returned more the one row:%s - %s" - % ( - res, - values_d, - ) - ) - - return res[0] - - # delete - def delete(self, _id): - return self._exec("delete", _id) - - def _exec_delete(self, _id): - """ - Delete the record with _id - """ - _logger.debug("method delete, model %s, is %s", self._name, _id) - sql = self._sql_delete - if self.schema: - # check if schema exists to avoid injection - self._check_schema() - sql = sql % dict(schema=self.schema) - - # get id fieldnames and values - params = dict(zip(self._id, _id)) - params = self._convert_dict(params, to_backend=True) - - conn = self.conn() - cr = conn.cursor() - self._execute("delete", cr, sql, params) - # cr.execute(sql, params) # pylint: disable=E8103 - count = cr.rowcount - if count == 0: - raise Exception( - _( - "Impossible to delete external record with ID '%s': " - "Register not found on Backend" - ) - % (params,) - ) - elif count > 1: - conn.rollback() - raise mysql.IntegrityError( - "Unexpected error: Returned more the one row with ID: %s" % (params,) - ) - conn.commit() - cr.close() - conn.close() - - return count From 2aeca941a77b3274ab35b0b0a119c3fe406a8d79 Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Tue, 20 Jun 2023 14:43:20 +0200 Subject: [PATCH 08/68] [ADD] connector_extension_mysql: new module --- .../components/adapter_mysql.py | 41 ------------------- 1 file changed, 41 deletions(-) delete mode 100644 connector_extension/components/adapter_mysql.py diff --git a/connector_extension/components/adapter_mysql.py b/connector_extension/components/adapter_mysql.py deleted file mode 100644 index 8a9586660..000000000 --- a/connector_extension/components/adapter_mysql.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright NuoBiT Solutions - Eric Antones -# Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -import logging - -from odoo import _ -from odoo.exceptions import ValidationError - -from odoo.addons.component.core import AbstractComponent - -_logger = logging.getLogger(__name__) - - -class MySQLAdapterCRUD(AbstractComponent): - _name = "base.backend.mysql.adapter.crud" - _inherit = "base.backend.sql.adapter.crud" - - _sql_version = "select version()" - - def _execute(self, op, cr, sql, params): - if not sql: - raise ValidationError(_("Empty SQL statement")) - sql_l = sql.split(";") - if op == "create": - if len(sql_l) > 2: - raise ValidationError(_("Unexpected SQL statement")) - if len(sql_l) == 2: - if not "last_insert_id()".lower() in sql_l[1].lower(): - raise ValidationError( - _("Only last_insert_id() is allowed in insert statement.") - ) - else: - if len(sql_l) != 1: - raise ValidationError( - _("Only one query is allowed on non insert SQL statements.") - ) - - res = super()._execute(op, cr, sql_l[0], params) - if op == "create": - res = cr.execute(sql_l[1]) - return res From 7c544042f29de14903549d6c26bd54ab2fc0dd11 Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Tue, 20 Jun 2023 14:43:11 +0200 Subject: [PATCH 09/68] [ADD] connector_extension_mssql: new module --- .../components/adapter_mssql.py | 37 ------------------- 1 file changed, 37 deletions(-) delete mode 100644 connector_extension/components/adapter_mssql.py diff --git a/connector_extension/components/adapter_mssql.py b/connector_extension/components/adapter_mssql.py deleted file mode 100644 index 95773499c..000000000 --- a/connector_extension/components/adapter_mssql.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright NuoBiT Solutions - Eric Antones -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). -import logging - -from odoo.addons.component.core import AbstractComponent - -_logger = logging.getLogger(__name__) - - -class MSSQLAdapterCRUD(AbstractComponent): - _name = "base.backend.mssql.adapter.crud" - _inherit = "base.backend.sql.adapter.crud" - - _sql_version = "select @@version" - - # def _execute(self, op, cr, sql, params): - # if not sql: - # raise ValidationError(_("Empty SQL statement")) - # sql_l = sql.split(";") - # if op == "create": - # if len(sql_l) > 2: - # raise ValidationError(_("Unexpected SQL statement")) - # if len(sql_l) == 2: - # if not "last_insert_id()".lower() in sql_l[1].lower(): - # raise ValidationError( - # _("Only last_insert_id() is allowed in insert statement.") - # ) - # else: - # if len(sql_l) != 1: - # raise ValidationError( - # _("Only one query is allowed on non insert SQL statements.") - # ) - # - # res = super()._execute(op, cr, sql_l[0], params) - # if op == "create": - # res = cr.execute(sql_l[1]) - # return res From 6d464f0df1e860cd44d311feb2f6a743cb3d69ad Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Thu, 22 Jun 2023 19:32:09 +0200 Subject: [PATCH 10/68] [IMP] connector_extension: pre-commit stuff --- connector_extension/models/backend/backend.py | 1 - 1 file changed, 1 deletion(-) diff --git a/connector_extension/models/backend/backend.py b/connector_extension/models/backend/backend.py index 31c185d6a..4996ddfd0 100644 --- a/connector_extension/models/backend/backend.py +++ b/connector_extension/models/backend/backend.py @@ -53,7 +53,6 @@ def _select_state(self): ) chunk_size = fields.Integer( - string="Chunk Size", default=-1, help="This field is used in order to define the chunk size for the backend.", ) From 3a39b85596ce4c15c01982501e51cd834c6ee866 Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Tue, 27 Jun 2023 10:13:17 +0200 Subject: [PATCH 11/68] [FIX] connector_extension: sync_date parameter not found --- connector_extension/components/importer.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py index b726627e8..53cdf6bf3 100644 --- a/connector_extension/components/importer.py +++ b/connector_extension/components/importer.py @@ -157,10 +157,7 @@ def run(self, external_id, sync_date, external_data=None, external_fields=None): ) # import the missing linked resources - self._import_dependencies( - external_data, - # sync_date - ) + self._import_dependencies(external_data, sync_date) # map_data # this one knows how to convert backend data to odoo data From 8894dc56bd4ff2705b50cf1b0e1cca4ccbde568a Mon Sep 17 00:00:00 2001 From: KNVx Date: Mon, 26 Jun 2023 10:58:03 +0200 Subject: [PATCH 12/68] [FIX] connector_extension: unwrap_binding return a binding instead of a normal record --- connector_extension/components/binder.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index b955620a5..3a1dc0ec9 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -565,11 +565,15 @@ def to_binding_from_internal_key(self, relation): return self.model def unwrap_binding(self, binding): - if isinstance(binding, models.BaseModel): - odoo_object_ids = binding.mapped(lambda x: x[self._odoo_field].id) - else: - odoo_object_ids = [binding] - return self.model.browse(odoo_object_ids) + if not isinstance(binding, models.BaseModel): + if isinstance(binding, (tuple, list)): + odoo_object_ids = binding + elif isinstance(binding, int): + odoo_object_ids = [binding] + else: + raise ValidationError(_("Invalid binding type")) + binding = self.model.browse(odoo_object_ids) + return binding.mapped(self._odoo_field) def check_external_id(self, external_id, relation): assert external_id, ( From c9e013dc765856c4b9b49c118ec80a5f16fbd677 Mon Sep 17 00:00:00 2001 From: KNVx Date: Thu, 6 Jul 2023 11:55:39 +0200 Subject: [PATCH 13/68] [FIX] connector_extension: binding is nor returned on run when it's created --- connector_extension/components/importer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py index 53cdf6bf3..1f82b3167 100644 --- a/connector_extension/components/importer.py +++ b/connector_extension/components/importer.py @@ -199,7 +199,7 @@ def run(self, external_id, sync_date, external_data=None, external_fields=None): for_create=True, fields=external_fields, **opts ) binder.bind_import(external_data, values, sync_date, for_create=True) - self._create(values) + binding = self._create(values) _logger.debug("%d created from Backend %s", binding, external_id) # last update From 7c898cb3410081dc74463b90dd7b5180031f4069 Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Sun, 9 Jul 2023 22:36:24 +0200 Subject: [PATCH 14/68] [FIX] connector_extension: batch delayed disabled, enable again --- connector_extension/models/binding/binding.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/connector_extension/models/binding/binding.py b/connector_extension/models/binding/binding.py index f539cd40d..8d912e3f7 100644 --- a/connector_extension/models/binding/binding.py +++ b/connector_extension/models/binding/binding.py @@ -26,8 +26,7 @@ def import_batch(self, backend_record, domain=None, delayed=True): domain = [] with backend_record.work_on(self._name) as work: importer = work.component( - usage="batch.direct.importer" - # usage=delayed and "batch.delayed.importer" or "batch.direct.importer" + usage=delayed and "batch.delayed.importer" or "batch.direct.importer" ) return importer.run(domain) @@ -38,10 +37,7 @@ def export_batch(self, backend_record, domain=None, delayed=True): domain = [] with backend_record.work_on(self._name) as work: exporter = work.component( - usage="batch.direct.exporter" - # usage=delayed - # and "batch.delayed.exporter" - # or "batch.direct.exporter" + usage=delayed and "batch.delayed.exporter" or "batch.direct.exporter" ) return exporter.run(domain=domain) From 5a096fcbbbcd0a42fe962bcc5af907e4676c8e87 Mon Sep 17 00:00:00 2001 From: KNVx Date: Mon, 10 Jul 2023 12:33:16 +0200 Subject: [PATCH 15/68] [FIX] connector_extension: names of components are wrong. delayed.chunk.importer and direct.chunk.importer are used instead of chunk.delayed.importer and chunk.direct.importer + chunk delayed disabled, enable again --- connector_extension/models/binding/binding.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/connector_extension/models/binding/binding.py b/connector_extension/models/binding/binding.py index 8d912e3f7..8c9e29379 100644 --- a/connector_extension/models/binding/binding.py +++ b/connector_extension/models/binding/binding.py @@ -69,8 +69,10 @@ def import_chunk( """Prepare the chunk import of records modified on Backend""" with backend_record.work_on(self._name) as work: importer = work.component( - # usage=delayed and "delayed.chunk.importer" or "direct.chunk.importer" - "chunk.direct.importer" + usage=delayed + and "chunk.delayed.importer" + or "chunk.direct.importer" + # "chunk.direct.importer" ) return importer.run(domain, offset, chunk_size) From 28c884af0b976df3f3154817287b0ffa01a51dbc Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Wed, 12 Jul 2023 10:37:35 +0200 Subject: [PATCH 16/68] [IMP] connector_extension: added sync offset because sometimes in some backends some registers are not imported due to erratic backend behaviour --- connector_extension/models/backend/backend.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/connector_extension/models/backend/backend.py b/connector_extension/models/backend/backend.py index 4996ddfd0..8d69f08ee 100644 --- a/connector_extension/models/backend/backend.py +++ b/connector_extension/models/backend/backend.py @@ -57,6 +57,13 @@ def _select_state(self): help="This field is used in order to define the chunk size for the backend.", ) + sync_offset = fields.Integer( + required=True, + default=0, + help="Minutes to start the synchronization " + "before(negative)/after(positive) the last one", + ) + def _check_connection(self): self.ensure_one() with self.work_on(self._name) as work: From 501292ef484f6d6695765ea682fe5555dbd7cdab Mon Sep 17 00:00:00 2001 From: KNVx Date: Wed, 19 Jul 2023 12:49:47 +0200 Subject: [PATCH 17/68] [IMP] connector_extension: removed useless code (__init__ _delay_import, _should_import and _force_binding_creation) --- connector_extension/components/exporter.py | 33 ---------------------- 1 file changed, 33 deletions(-) diff --git a/connector_extension/components/exporter.py b/connector_extension/components/exporter.py index 6b8daa26d..04b803549 100644 --- a/connector_extension/components/exporter.py +++ b/connector_extension/components/exporter.py @@ -21,34 +21,9 @@ class GenericDirectExporter(AbstractComponent): _usage = "record.direct.exporter" - # def __init__(self, working_context): - # super().__init__(working_context) - # self.binding = None - # self.external_id = None - - def _should_import(self, binding): - return False - - # def _delay_import(self, binding): - # """Schedule an import of the record. - # - # Adapt in the sub-classes when the model is not imported - # using ``import_record``. - # """ - # # force is True because the sync_date will be more recent - # # so the import would be skipped - # assert self.external_id - # binding.with_delay().import_record( - # self.backend_record, self.external_id, force=True - # ) - def _mapper_options(self, binding): return {"binding": binding} - # def _force_binding_creation(self, relation): - # if not self.binding: - # self.binding = self.binder.wrap_record(relation, force=True) - def run(self, relation, internal_fields=None): """Run the synchronization @@ -65,17 +40,9 @@ def run(self, relation, internal_fields=None): binding = ( self.binder_for().to_binding_from_internal_key(relation) or binding ) - # try: - # should_import = self._should_import(binding) - # except IDMissingInBackend: - # # self.external_id = None - # should_import = False - # if should_import: - # self._delay_import(binding) if not binding: internal_fields = None # should be created with all the fields - # TODO: pongo el relation porque si no tiene binding no podemos hacer comprobaciones if self._has_to_skip(binding, relation): return _("Nothing to export") From 846a7dbed3184a6c6b691a39e5b12da74334ef6e Mon Sep 17 00:00:00 2001 From: KNVx Date: Wed, 19 Jul 2023 13:10:35 +0200 Subject: [PATCH 18/68] [IMP] connector_extension: Refactor get_external_dict_ids to remove function check_external_id --- connector_extension/components/binder.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index 3a1dc0ec9..de861116f 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -1,5 +1,6 @@ # Copyright 2013-2017 Camptocamp SA # Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) """ @@ -127,8 +128,6 @@ def dict2id(self, _dict, in_field=True, alt_field=False): if len(f_splitted) > 2: raise NotImplementedError(_("Multiple dot notation is not supported")) res.append(val) - # if len(res) == 1: - # return res[0] return res def is_complete_id(self, _id, in_field=True): @@ -575,18 +574,15 @@ def unwrap_binding(self, binding): binding = self.model.browse(odoo_object_ids) return binding.mapped(self._odoo_field) - def check_external_id(self, external_id, relation): - assert external_id, ( - "Unexpected error on %s:" - "The backend id cannot be obtained." - "At this stage, the backend record should have been already linked via " - "._export_dependencies. " % relation._name - ) - def get_external_dict_ids(self, relation, check_external_id=True): external_id = self.to_external(relation, wrap=False) if check_external_id: - self.check_external_id(external_id, relation) + assert external_id, ( + "Unexpected error on %s:" + "The backend id cannot be obtained." + "At this stage, the backend record should have been already linked via " + "._export_dependencies. " % relation._name + ) return self.id2dict(external_id, in_field=False) From 706b130a9079c47a9360d03493841199c5ae855a Mon Sep 17 00:00:00 2001 From: KNVx Date: Thu, 20 Jul 2023 16:35:04 +0200 Subject: [PATCH 19/68] [FIX] connector_extension: filter by hash is mapping 'veloconnect_hash' --- connector_extension/components/adapter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/connector_extension/components/adapter.py b/connector_extension/components/adapter.py index 6916b4402..65ae27625 100644 --- a/connector_extension/components/adapter.py +++ b/connector_extension/components/adapter.py @@ -112,14 +112,14 @@ def chunks(self, lst, n): for i in range(0, len(lst), n): yield lst[i : i + n] - def _filter_by_hash(self, data): + def _filter_by_hash(self, data, hash_field): indexed_data = {x["Hash"]: x for x in data} odoo_hashes = set( self.model.search( [ ("backend_id", "=", self.backend_record.id), ] - ).mapped("veloconnect_hash") + ).mapped(hash_field) ) changed_hashes = set(indexed_data.keys()) - odoo_hashes return [indexed_data[x] for x in changed_hashes] From 8f955b2e4e7d56aad156a79084a962ff584a7946 Mon Sep 17 00:00:00 2001 From: KNVx Date: Mon, 31 Jul 2023 14:21:51 +0200 Subject: [PATCH 20/68] [IMP] connector_extension: Included page_size on backend --- connector_extension/models/backend/backend.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/connector_extension/models/backend/backend.py b/connector_extension/models/backend/backend.py index 8d69f08ee..df80a98d4 100644 --- a/connector_extension/models/backend/backend.py +++ b/connector_extension/models/backend/backend.py @@ -49,12 +49,18 @@ def _select_state(self): string="Timezone", required=True, default=lambda self: self._context.get("tz") or self.env.user.tz or "UTC", - help="This field is used in order to define in which timezone the backend will work.", + help="This field is used to define in which timezone the backend will work.", ) chunk_size = fields.Integer( default=-1, - help="This field is used in order to define the chunk size for the backend.", + help="This field is used to define the chunk size to import from the backend.", + ) + page_size = fields.Integer( + string="Page Size", + default=-1, + help="This field is used in order to define the " + "number of records imported at the same time.", ) sync_offset = fields.Integer( From 4817295e7a3d836b08a5a0ce11a8ee78b7c40f8e Mon Sep 17 00:00:00 2001 From: KNVx Date: Wed, 26 Jul 2023 09:01:04 +0200 Subject: [PATCH 21/68] [IMP] connector_extension: included parameter unwrap in dict2id to return id without list --- connector_extension/components/binder.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index de861116f..2d7f4be01 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -108,7 +108,7 @@ def dict2id2dict(self, _dict, in_field=True, alt_field=False): alt_field=alt_field, ) - def dict2id(self, _dict, in_field=True, alt_field=False): + def dict2id(self, _dict, in_field=True, alt_field=False, unwrap=False): """Giving a dict, return the values of the internal or external fields :param _dict: Dict (usually binder) to extract internal or external fields :param in_field: with True value, _internal_field defined in binder are used. @@ -128,6 +128,11 @@ def dict2id(self, _dict, in_field=True, alt_field=False): if len(f_splitted) > 2: raise NotImplementedError(_("Multiple dot notation is not supported")) res.append(val) + if unwrap: + if len(res) == 1: + return res[0] + else: + raise ValidationError(_("It's not possible to unwrap a composite id")) return res def is_complete_id(self, _id, in_field=True): From a06678fb8999f551d5dc3c7d8ca05c0d320e0418 Mon Sep 17 00:00:00 2001 From: KNVx Date: Thu, 20 Jul 2023 16:34:11 +0200 Subject: [PATCH 22/68] [IMP] connector_extension: tools are included in common dir --- connector_extension/common/tools.py | 121 ++++++++++++++++++++++++++++ 1 file changed, 121 insertions(+) create mode 100644 connector_extension/common/tools.py diff --git a/connector_extension/common/tools.py b/connector_extension/common/tools.py new file mode 100644 index 000000000..5ab52ed5b --- /dev/null +++ b/connector_extension/common/tools.py @@ -0,0 +1,121 @@ +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl) +import datetime +import hashlib +import unicodedata + +from odoo import _ +from odoo.exceptions import ValidationError + + +def list2hash(_list): + _hash = hashlib.sha256() + for e in _list: + if isinstance(e, int): + e9 = str(e) + elif isinstance(e, str): + e9 = e + elif isinstance(e, float): + e9 = str(e) + elif e is None: + e9 = "" + else: + raise Exception("Unexpected type for a key: type %s" % type(e)) + _hash.update(e9.encode("utf8")) + return _hash.hexdigest() + + +def domain_to_normalized_dict(self, domain): + """Convert, if possible, standard Odoo domain to a dictionary. + To do so it is necessary to convert all operators to + equal '=' operator. + """ + res = {} + for elem in domain: + if len(elem) != 3: + raise ValidationError(_("Wrong domain clause format %s") % elem) + field, op, value = elem + if op == "=": + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(value) + elif op == "!=": + if not isinstance(value, bool): + raise ValidationError( + _("Not equal operation not supported for non boolean fields") + ) + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(not value) + elif op == "in": + if not isinstance(value, (tuple, list)): + raise ValidationError( + _( + "Operator '%(OPERATOR)s' only supports tuples or lists, not %(TYPES)s" + ) + % { + "OPERATOR": op, + "TYPES": type(value), + } + ) + if field in res: + raise ValidationError(_("Duplicated field %s") % field) + res[field] = self._normalize_value(value) + elif op in (">", ">=", "<", "<="): + if not isinstance(value, (datetime.date, datetime.datetime, int)): + raise ValidationError( + _("Type {} not supported for operator {}").format(type(value), op) + ) + if op in (">", "<"): + adj = 1 + if isinstance(value, (datetime.date, datetime.datetime)): + adj = datetime.timedelta(days=adj) + if op == "<": + op, value = "<=", value - adj + else: + op, value = ">=", value + adj + + res[field] = self._normalize_value(value) + else: + raise ValidationError(_("Operator %s not supported") % op) + + return res + + +def convert_item_to_json(item, ct, namespace): + jitem = {} + for path, func, key, multi in ct: + if key in jitem: + raise ValidationError(_("Key %s already exists") % key) + value = item.xpath(path, namespaces=namespace) + if not value: + jitem[key] = None + else: + if multi: + jitem[key] = func(value) + else: + if len(value) > 1: + raise ValidationError(_("Multiple values found for '%s'") % path) + else: + jitem[key] = func(value[0]) + return jitem + + +def convert_to_json(data, ct, namespace): + res = [] + for d in data: + res.append(convert_item_to_json(d, ct, namespace)) + return res + + +def slugify(value): + if not value: + return None + return ( + unicodedata.normalize("NFKD", value) + .encode("ascii", "ignore") + .decode("ascii") + .lower() + .replace(" ", "") + ) From 19e8304120f65e43bba61570d96b37f14a0d47e3 Mon Sep 17 00:00:00 2001 From: KNVx Date: Mon, 24 Jul 2023 14:35:17 +0200 Subject: [PATCH 23/68] [IMP] connector_extension: Included trim_domain on tools. This function takes a domain and return the same domain without spaces. --- connector_extension/common/tools.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/connector_extension/common/tools.py b/connector_extension/common/tools.py index 5ab52ed5b..29bbb0440 100644 --- a/connector_extension/common/tools.py +++ b/connector_extension/common/tools.py @@ -119,3 +119,21 @@ def slugify(value): .lower() .replace(" ", "") ) + + +def trim_domain(domain): + trimmed_domain = [] + for d in domain: + if isinstance(d, (list, tuple)): + if len(d) == 3 and isinstance(d[2], str): + trimmed_domain.append((d[0], d[1], d[2].strip())) + elif len(d) == 3 and isinstance(d[2], (list, tuple)): + trimmed_value = [ + value.strip() if isinstance(value, str) else value for value in d[2] + ] + trimmed_domain.append((d[0], d[1], trimmed_value)) + else: + trimmed_domain.append(d) + else: + raise Exception("Unexpected domain format: %s" % d) + return trimmed_domain From 4936151113d5736e5df0cf6efd99eada9c6b85b8 Mon Sep 17 00:00:00 2001 From: KNVx Date: Tue, 1 Aug 2023 09:29:07 +0200 Subject: [PATCH 24/68] [IMP] connector_extension: hooks created on importer on _create and _update to validate data. --- connector_extension/components/importer.py | 36 +++++++++++++++++++--- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py index 1f82b3167..510dedf36 100644 --- a/connector_extension/components/importer.py +++ b/connector_extension/components/importer.py @@ -130,10 +130,6 @@ def _must_skip(self, binding): def _mapper_options(self, binding, sync_date): return {"binding": binding, "sync_date": sync_date} - def _create(self, values): - """Create the Internal record""" - return self.model.with_context(connector_no_export=True).create(values) - def run(self, external_id, sync_date, external_data=None, external_fields=None): if not external_data: external_data = {} @@ -191,7 +187,7 @@ def run(self, external_id, sync_date, external_data=None, external_fields=None): # if exists, we update it values = internal_data.values(fields=external_fields, **opts) binder.bind_import(external_data, values, sync_date) - binding.with_context(connector_no_export=True).write(values) + self._update(binding, values) _logger.debug("%d updated from Backend %s", binding, external_id) else: # or we create it @@ -205,3 +201,33 @@ def run(self, external_id, sync_date, external_data=None, external_fields=None): # last update self._after_import(binding) return True + + def _validate_update_data(self, data): + """Check if the values to import are correct + + Pro-actively check before the ``Model.update`` if some fields + are missing or invalid + + Raise `InvalidDataError` + """ + return + + def _update(self, binding, data): + """Update the Internal record""" + self._validate_update_data(data) + return binding.with_context(connector_no_export=True).write(data) + + def _validate_create_data(self, data): + """Check if the values to import are correct + + Pro-actively check before the ``Model.create`` if some fields + are missing or invalid + + Raise `InvalidDataError` + """ + return + + def _create(self, data): + """Create the Internal record""" + self._validate_create_data(data) + return self.model.with_context(connector_no_export=True).create(data) From d268926e2c9b128a4021578bc3bfe8224dc55f3e Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Mon, 2 Oct 2023 12:16:01 +0200 Subject: [PATCH 25/68] [IMP] connector_extension: pre-commit stuff --- connector_extension/components/adapter_woocommerce.py | 2 +- connector_extension/components/adapter_wordpress.py | 6 ++++-- connector_extension/models/backend/backend.py | 1 - 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/connector_extension/components/adapter_woocommerce.py b/connector_extension/components/adapter_woocommerce.py index 142b1a715..4a72bd221 100644 --- a/connector_extension/components/adapter_woocommerce.py +++ b/connector_extension/components/adapter_woocommerce.py @@ -62,7 +62,7 @@ def _exec_post(self, resource, *args, **kwargs): try: res = res.json() except Exception as e: - raise ValidationError(e) + raise ValidationError(e) from e return res def _exec_put(self, resource, *args, **kwargs): diff --git a/connector_extension/components/adapter_wordpress.py b/connector_extension/components/adapter_wordpress.py index 580e05c85..bba3f2565 100644 --- a/connector_extension/components/adapter_wordpress.py +++ b/connector_extension/components/adapter_wordpress.py @@ -30,13 +30,14 @@ def _exec_get(self, resource, *args, **kwargs): self.backend_record.consumer_key, self.backend_record.consumer_secret, ), + timeout=120, ) if res.status_code in [400, 401, 403, 404, 500]: raise ValidationError(res.json().get("message")) try: res = res.json() except Exception as e: - raise ValidationError(e) + raise ValidationError(e) from e return res def _exec_post(self, resource, *args, **kwargs): @@ -60,6 +61,7 @@ def _exec_post(self, resource, *args, **kwargs): data=data, auth=auth or (self.backend_record.consumer_key, self.backend_record.consumer_secret), + timeout=120, ) if res.status_code in [400, 401, 403, 404, 500]: raise ValidationError(res.json().get("message")) @@ -68,7 +70,7 @@ def _exec_post(self, resource, *args, **kwargs): if checksum: res["checksum"] = checksum except Exception as e: - raise ValidationError(e) + raise ValidationError(e) from e return res def _exec_put(self, resource, *args, **kwargs): diff --git a/connector_extension/models/backend/backend.py b/connector_extension/models/backend/backend.py index df80a98d4..2d1f83c37 100644 --- a/connector_extension/models/backend/backend.py +++ b/connector_extension/models/backend/backend.py @@ -57,7 +57,6 @@ def _select_state(self): help="This field is used to define the chunk size to import from the backend.", ) page_size = fields.Integer( - string="Page Size", default=-1, help="This field is used in order to define the " "number of records imported at the same time.", From cacc2f9f61e369389afb4f4a94b71b26c58f0347 Mon Sep 17 00:00:00 2001 From: KNVx Date: Wed, 19 Jul 2023 12:50:11 +0200 Subject: [PATCH 26/68] [IMP] connector_extension: Generic Batch Exporters created on exporter --- connector_extension/components/exporter.py | 53 ++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/connector_extension/components/exporter.py b/connector_extension/components/exporter.py index 04b803549..807d38e2b 100644 --- a/connector_extension/components/exporter.py +++ b/connector_extension/components/exporter.py @@ -245,3 +245,56 @@ def _update(self, external_id, data): # special check on data before export self._validate_update_data(data) return self.backend_adapter.write(external_id, data) + + +class GenericBatchExporter(AbstractComponent): + """Generic Synchronizer for importing data from backend to Odoo""" + + _name = "generic.batch.exporter" + _inherit = "base.exporter" + + _usage = "batch.direct.importer" + + def run(self, domain=None): + if not domain: + domain = [] + # Run the batch synchronization + relation_model = self.binder_for(self.model._name).unwrap_model() + for relation in ( + self.env[relation_model].with_context(active_test=False).search(domain) + ): + self._export_record(relation) + + def _export_record(self, external_id): + """Export a record directly or delay the export of the record. + + Method to implement in sub-classes. + """ + raise NotImplementedError + + +class BatchDirectExporter(AbstractComponent): + """Import the records directly, without delaying the jobs.""" + + _name = "generic.batch.direct.exporter" + _inherit = "generic.batch.exporter" + + _usage = "batch.direct.exporter" + + def _export_record(self, relation): + """export the record directly""" + self.model.export_record(self.backend_record, relation) + + +class BatchDelayedExporter(AbstractComponent): + """Delay import of the records""" + + _name = "generic.batch.delayed.exporter" + _inherit = "generic.batch.exporter" + + _usage = "batch.delayed.exporter" + + def _export_record(self, relation, job_options=None): + """Delay the export of the records""" + delayable = self.model.with_delay(**job_options or {}) + delayable.export_record(self.backend_record, relation) From e06f072a1e8f07a1feb78c693181326710727f37 Mon Sep 17 00:00:00 2001 From: KNVx Date: Tue, 1 Aug 2023 09:15:31 +0200 Subject: [PATCH 27/68] [IMP] connector_extension: adapter, backend, binding and mapper create a new abstract model to be inhereted --- connector_extension/components/adapter.py | 5 ++-- connector_extension/components/mapper.py | 26 +++++++++++++------ connector_extension/models/backend/backend.py | 2 +- connector_extension/models/binding/binding.py | 4 +-- 4 files changed, 24 insertions(+), 13 deletions(-) diff --git a/connector_extension/components/adapter.py b/connector_extension/components/adapter.py index 65ae27625..27033f563 100644 --- a/connector_extension/components/adapter.py +++ b/connector_extension/components/adapter.py @@ -10,8 +10,9 @@ from odoo.addons.component.core import AbstractComponent -class BackendAdapter(AbstractComponent): - _inherit = "base.backend.adapter" +class ConnectorExtensionAdapterCRUD(AbstractComponent): + _name = "connector.extension.adapter.crud" + _inherit = "base.backend.adapter.crud" _date_format = "%Y-%m-%d" _datetime_format = "%Y-%m-%dT%H:%M:%SZ" diff --git a/connector_extension/components/mapper.py b/connector_extension/components/mapper.py index 2425ee55b..ed7dac3b8 100644 --- a/connector_extension/components/mapper.py +++ b/connector_extension/components/mapper.py @@ -36,7 +36,8 @@ def required_mapping(func): # return func -class Mapper(AbstractComponent): +class ConnectorExtensionMapper(AbstractComponent): + _name = "connector.extension.mapper" _inherit = "base.mapper" def _apply_with_options(self, map_record): @@ -134,7 +135,8 @@ def get_target_fields(self, map_record, fields): return list(set(result.values())) -class BaseChildMapper(AbstractComponent): +class ConnectorExtensionChildMapper(AbstractComponent): + _name = "connector.extension.child.mapper" _inherit = "base.map.child" def get_all_items(self, mapper, items, parent, to_attr, options): @@ -162,8 +164,9 @@ def classify_items(self, mapped, to_attr, options): raise NotImplementedError -class ImportMapChild(AbstractComponent): - _inherit = "base.map.child.import" +class ConnectorExtensionMapChildImport(AbstractComponent): + _name = "connector.extension.map.child.import" + _inherit = ["base.map.child.import", "connector.extension.child.mapper"] def _child_bind(self, map_record, item_values): binder = self.binder_for() @@ -236,8 +239,9 @@ def keygen(_id): return mapped -class ExportMapChild(AbstractComponent): - _inherit = "base.map.child.export" +class ConnectorExtensionMapChildExport(AbstractComponent): + _name = "connector.extension.map.child.export" + _inherit = ["base.map.child.export", "connector.extension.child.mapper"] def _child_bind(self, map_record, item_values): # TODO: implement this method @@ -249,8 +253,9 @@ def classify_items(self, mapped, to_attr, options): # TODO: create a fix on OCA repo and remove this class -class ExportMapper(AbstractComponent): - _inherit = "base.export.mapper" +class ConnectorExtensionExportMapper(AbstractComponent): + _name = "connector.extension.export.mapper" + _inherit = ["base.export.mapper", "connector.extension.mapper"] def _map_direct(self, record, from_attr, to_attr): """Apply the ``direct`` mappings. @@ -287,6 +292,11 @@ def check_external_id(self, external_id, relation): ) +class ConnectorExtensionImportMapper(AbstractComponent): + _name = "connector.extension.import.mapper" + _inherit = ["base.import.mapper", "connector.extension.mapper"] + + class DeleteMapChild(AbstractComponent): """:py:class:`MapChild` for the Deleters""" diff --git a/connector_extension/models/backend/backend.py b/connector_extension/models/backend/backend.py index 2d1f83c37..735e44fef 100644 --- a/connector_extension/models/backend/backend.py +++ b/connector_extension/models/backend/backend.py @@ -22,7 +22,7 @@ def _tz_get(self): class ConnectorBackend(models.AbstractModel): - # _name = "connector.backend.extension" + _name = "connector.extension.backend" _inherit = "connector.backend" _description = "Connector Backend Extension" diff --git a/connector_extension/models/binding/binding.py b/connector_extension/models/binding/binding.py index 8c9e29379..37a84dad3 100644 --- a/connector_extension/models/binding/binding.py +++ b/connector_extension/models/binding/binding.py @@ -5,9 +5,9 @@ from odoo import api, fields, models -class ExternalBinding(models.AbstractModel): +class ConnectorExtensionExternalBinding(models.AbstractModel): + _name = "connector.extension.external.binding" _inherit = "external.binding" - # by default we consider sync_date as the import one @api.model From 9ddea8c1dd38557ae8ab1e1f6618a67322faa840 Mon Sep 17 00:00:00 2001 From: KNVx Date: Tue, 1 Aug 2023 09:51:24 +0200 Subject: [PATCH 28/68] [IMP] connector_extension: Created generic batch, chunk and record on importer --- connector_extension/components/importer.py | 176 ++++++++++++++++++++- 1 file changed, 175 insertions(+), 1 deletion(-) diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py index 510dedf36..73ad2a8e9 100644 --- a/connector_extension/components/importer.py +++ b/connector_extension/components/importer.py @@ -7,7 +7,8 @@ import psycopg2 -from odoo import _ +from odoo import _, fields +from odoo.exceptions import ValidationError from odoo.addons.component.core import AbstractComponent from odoo.addons.connector.exception import IDMissingInBackend @@ -231,3 +232,176 @@ def _create(self, data): """Create the Internal record""" self._validate_create_data(data) return self.model.with_context(connector_no_export=True).create(data) + + +class GenericBatchImporter(AbstractComponent): + """Generic Synchronizer for importing data from backend to Odoo""" + + _name = "generic.batch.importer" + _inherit = "base.importer" + + _usage = "batch.direct.importer" + + def run(self, domain=None): + """Run the synchronization""" + if domain is None: + domain = [] + chunk_size = self.backend_record.chunk_size + if chunk_size > 0: + total_items = self.backend_adapter.get_total_items(domain=domain) + if total_items == 0: + return + offset = 0 + while total_items > 0: + if chunk_size > total_items: + chunk_size = total_items + self._import_chunk(domain, offset, chunk_size) + offset += chunk_size + total_items -= chunk_size + else: + sync_date = fields.Datetime.now() + data, len_items = self.backend_adapter.search_read(domain) + for d in data: + external_id = self.binder_for().dict2id(d, in_field=False) + self._import_record(external_id, sync_date, external_data=d) + + def _import_chunk(self, domain, offset, chunk_size): + raise NotImplementedError + + def _import_batch(self, domain): + raise NotImplementedError + + def _import_record(self, external_id, sync_date, external_data=None): + """Import a record directly or delay the import of the record. + + Method to implement in sub-classes. + """ + raise NotImplementedError + + +class BatchDirectImporter(AbstractComponent): + """Import the records directly, without delaying the jobs.""" + + _name = "generic.batch.direct.importer" + _inherit = "generic.batch.importer" + + _usage = "batch.direct.importer" + + def _import_chunk(self, domain, offset, chunk_size): + self.model.import_chunk(self.backend_record, domain, offset, chunk_size) + + def _import_batch(self, domain): + self.model.import_batch(self.backend_record, domain) + + def _import_record(self, external_id, sync_date, external_data=None): + """Import the record directly""" + if external_data is None: + external_data = {} + self.model.import_record( + self.backend_record, external_id, sync_date, external_data=external_data + ) + + +class BatchDelayedImporter(AbstractComponent): + """Delay import of the records""" + + _name = "generic.batch.delayed.importer" + _inherit = "generic.batch.importer" + + _usage = "batch.delayed.importer" + + def _import_chunk(self, domain, offset, chunk_size): + delayable = self.model.with_delay() + delayable.import_chunk(self.backend_record, domain, offset, chunk_size) + + def _import_batch(self, domain): + delayable = self.model.with_delay() + delayable.import_batch(self.backend_record, domain) + + def _import_record( + self, external_id, sync_date, external_data=None, job_options=None + ): + """Delay the import of the records""" + if external_data is None: + external_data = {} + delayable = self.model.with_delay(**job_options or {}) + delayable.import_record( + self.backend_record, external_id, sync_date, external_data=external_data + ) + + +class GenericChunkImporter(AbstractComponent): + """The role of a ChunkImporter is to search for a list of + items to import, then it can either import them directly or delay + the import of each item separately. + """ + + _name = "generic.chunk.importer" + _inherit = "base.importer" + + def run(self, domain, offset, chunk_size): + """Run the synchronization""" + sync_date = fields.Datetime.now() + data, len_items = self.backend_adapter.search_read(domain, offset, chunk_size) + chunk_size -= len_items + offset += len_items + if chunk_size < 0: + raise ValidationError(_("Unexpected Error: Chunk_size is < 0")) + if chunk_size != 0: + self.get_batch_importer()._import_chunk(domain, offset, chunk_size) + for d in data: + external_id = self.binder_for().dict2id(d, in_field=False) + self._import_record(external_id, sync_date, external_data=d) + + def get_batch_importer(self): + raise NotImplementedError + + def _import_record(self, external_id, sync_date, external_data=None): + """Import a record directly or delay the import of the record. + + Method to implement in sub-classes. + """ + raise NotImplementedError + + +class ChunkDirectImporter(AbstractComponent): + """Import the records directly, without delaying the jobs.""" + + _name = "generic.chunk.direct.importer" + _inherit = "generic.chunk.importer" + + _usage = "chunk.direct.importer" + + def get_batch_importer(self): + return self.component(usage="batch.direct.importer") + + def _import_record(self, external_id, sync_date, external_data=None): + """Import the record directly""" + if external_data is None: + external_data = {} + self.model.import_record( + self.backend_record, external_id, sync_date, external_data=external_data + ) + + +class ChunkDelayedImporter(AbstractComponent): + """Delay import of the records""" + + _name = "generic.chunk.delayed.importer" + _inherit = "generic.chunk.importer" + + _usage = "chunk.delayed.importer" + + def get_batch_importer(self): + return self.component(usage="batch.delayed.importer") + + def _import_record( + self, external_id, sync_date, external_data=None, job_options=None + ): + """Delay the import of the records""" + if external_data is None: + external_data = {} + delayable = self.model.with_delay(**job_options or {}) + delayable.import_record( + self.backend_record, external_id, sync_date, external_data=external_data + ) From 39c03f7c026eb106cb79a8349ac40072baa296d4 Mon Sep 17 00:00:00 2001 From: KNVx Date: Wed, 9 Aug 2023 14:18:39 +0200 Subject: [PATCH 29/68] [IMP] Connector_extension: Included hook on _lock to be inhereted --- connector_extension/components/exporter.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/connector_extension/components/exporter.py b/connector_extension/components/exporter.py index 807d38e2b..b01b07d4a 100644 --- a/connector_extension/components/exporter.py +++ b/connector_extension/components/exporter.py @@ -78,6 +78,9 @@ def run(self, relation, internal_fields=None): def _after_export(self): """Can do several actions after exporting a record on the backend""" + def _get_sql_lock(self, record): + return "SELECT id FROM %s WHERE ID = %%s FOR UPDATE NOWAIT" % record._table + def _lock(self, record): """Lock the binding record. @@ -94,7 +97,7 @@ def _lock(self, record): on the binding record it has to export. """ - sql = "SELECT id FROM %s WHERE ID = %%s FOR UPDATE NOWAIT" % record._table + sql = self._get_sql_lock(record) try: self.env.cr.execute(sql, (record.id,), log_exceptions=False) except psycopg2.OperationalError as e: From 281f5635961dac5f8a3cfcfe63351b66834ddbce Mon Sep 17 00:00:00 2001 From: KNVx Date: Wed, 9 Aug 2023 14:19:14 +0200 Subject: [PATCH 30/68] [IMP] Connector_extension: commit is done before binding creation --- connector_extension/components/binder.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index 2d7f4be01..6d3c2431c 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -18,6 +18,7 @@ import psycopg2 +import odoo from odoo import _, fields, models, tools from odoo.exceptions import ValidationError @@ -228,7 +229,6 @@ def wrap_binding(self, relation, binding_field=None, binding_extra_vals=None): .sudo() .create(_bind_values) ) - if not tools.config["test_enable"]: self.env.cr.commit() # pylint: disable=invalid-commit else: @@ -329,7 +329,7 @@ def bind_export(self, external_data, relation): external_id = self.dict2id(external_data, in_field=False) with self._retry_unique_violation(): - return self.model.with_context(connector_no_export=True).create( + binding = self.model.with_context(connector_no_export=True).create( { self._backend_field: self.backend_record.id, self._odoo_field: relation_id, @@ -338,6 +338,14 @@ def bind_export(self, external_data, relation): **self._additional_external_binding_fields(external_data), } ) + # Eager commit to avoid having 2 jobs + # exporting at the same time. The constraint + # will pop if an other job already created + # the same binding. It will be caught and + # raise a RetryableJobError. + if not odoo.tools.config["test_enable"]: + self.env.cr.commit() # pylint: disable=E8102 + return binding def _additional_external_binding_fields(self, external_data): return {} From 581c19868b5fe8f61d0d3e7400f2e285bd04550a Mon Sep 17 00:00:00 2001 From: KNVx Date: Wed, 11 Oct 2023 17:23:20 +0200 Subject: [PATCH 31/68] [IMP] connector_extension: License is updated to LPGL-3.0. --- connector_extension/README.rst | 6 +++--- connector_extension/__manifest__.py | 6 +++--- connector_extension/common/tools.py | 2 +- connector_extension/components/adapter.py | 2 +- connector_extension/components/binder.py | 2 +- connector_extension/components/exporter.py | 5 +++-- connector_extension/components/importer.py | 2 +- connector_extension/components/mapper.py | 2 +- connector_extension/models/backend/backend.py | 2 +- connector_extension/models/binding/binding.py | 2 +- connector_extension/static/description/index.html | 2 +- 11 files changed, 17 insertions(+), 16 deletions(-) diff --git a/connector_extension/README.rst b/connector_extension/README.rst index 889e249eb..d72dee574 100644 --- a/connector_extension/README.rst +++ b/connector_extension/README.rst @@ -10,9 +10,9 @@ Connector Extension .. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png :target: https://odoo-community.org/page/development-status :alt: Beta -.. |badge2| image:: https://img.shields.io/badge/licence-AGPL--3-blue.png - :target: http://www.gnu.org/licenses/agpl-3.0-standalone.html - :alt: License: AGPL-3 +.. |badge2| image:: https://img.shields.io/badge/licence-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 .. |badge3| image:: https://img.shields.io/badge/github-nuobit%2Fodoo--addons-lightgray.png?logo=github :target: https://github.com/nuobit/odoo-addons/tree/16.0/connector_extension :alt: nuobit/odoo-addons diff --git a/connector_extension/__manifest__.py b/connector_extension/__manifest__.py index 56ed024fd..f4ede4d27 100644 --- a/connector_extension/__manifest__.py +++ b/connector_extension/__manifest__.py @@ -1,13 +1,13 @@ # Copyright NuoBiT Solutions - Kilian Niubo # Copyright NuoBiT Solutions - Eric Antones -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl) +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) { "name": "Connector Extension", "summary": "This module extends the connector module", - "version": "16.0.1.0.1", + "version": "16.0.1.0.2", "author": "NuoBiT Solutions SL", - "license": "AGPL-3", + "license": "LGPL-3", "category": "Connector", "website": "https://github.com/nuobit/odoo-addons", "depends": ["connector"], diff --git a/connector_extension/common/tools.py b/connector_extension/common/tools.py index 29bbb0440..c8759190b 100644 --- a/connector_extension/common/tools.py +++ b/connector_extension/common/tools.py @@ -1,6 +1,6 @@ # Copyright NuoBiT Solutions - Eric Antones # Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl) +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) import datetime import hashlib import unicodedata diff --git a/connector_extension/components/adapter.py b/connector_extension/components/adapter.py index 27033f563..e236d54d1 100644 --- a/connector_extension/components/adapter.py +++ b/connector_extension/components/adapter.py @@ -1,6 +1,6 @@ # Copyright NuoBiT Solutions - Eric Antones # Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) import datetime diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index 6d3c2431c..d9a449779 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -1,7 +1,7 @@ # Copyright 2013-2017 Camptocamp SA # Copyright NuoBiT Solutions - Eric Antones # Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html) +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) """ Binders diff --git a/connector_extension/components/exporter.py b/connector_extension/components/exporter.py index b01b07d4a..54fed56cb 100644 --- a/connector_extension/components/exporter.py +++ b/connector_extension/components/exporter.py @@ -1,5 +1,6 @@ -# Copyright 2021 Eric Antones -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +# Copyright NuoBiT Solutions - Eric Antones +# Copyright NuoBiT Solutions - Kilian Niubo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) import logging from contextlib import contextmanager diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py index 73ad2a8e9..dfd13875a 100644 --- a/connector_extension/components/importer.py +++ b/connector_extension/components/importer.py @@ -1,6 +1,6 @@ # Copyright NuoBiT Solutions - Eric Antones # Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl) +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) import logging from contextlib import contextmanager diff --git a/connector_extension/components/mapper.py b/connector_extension/components/mapper.py index ed7dac3b8..5f4c054d1 100644 --- a/connector_extension/components/mapper.py +++ b/connector_extension/components/mapper.py @@ -1,6 +1,6 @@ # Copyright NuoBiT Solutions - Eric Antones # Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) import collections import logging from itertools import zip_longest diff --git a/connector_extension/models/backend/backend.py b/connector_extension/models/backend/backend.py index 735e44fef..b3f243dcd 100644 --- a/connector_extension/models/backend/backend.py +++ b/connector_extension/models/backend/backend.py @@ -1,6 +1,6 @@ # Copyright NuoBiT Solutions - Eric Antones # Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) import logging import pytz diff --git a/connector_extension/models/binding/binding.py b/connector_extension/models/binding/binding.py index 37a84dad3..bd6c4e702 100644 --- a/connector_extension/models/binding/binding.py +++ b/connector_extension/models/binding/binding.py @@ -1,6 +1,6 @@ # Copyright NuoBiT Solutions - Eric Antones # Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) from odoo import api, fields, models diff --git a/connector_extension/static/description/index.html b/connector_extension/static/description/index.html index 00c512f25..087b1b080 100644 --- a/connector_extension/static/description/index.html +++ b/connector_extension/static/description/index.html @@ -367,7 +367,7 @@

Connector Extension

!! This file is generated by oca-gen-addon-readme !! !! changes will be overwritten. !! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! --> -

Beta License: AGPL-3 nuobit/odoo-addons

+

Beta License: LGPL-3 nuobit/odoo-addons

The “connector_extension” module is an add-on for the Odoo ERP system that enhances the functionality of the base “connector” module. This extension provides additional features, tools, and integrations, making it easier for developers to create, manage, and maintain connections between Odoo and various third-party systems, APIs, or services.

The module aims to simplify the connector development process by providing a robust and flexible framework. The “connector_extension” module allows developers to focus on implementing specific business logic and requirements, while the extension handles common tasks.

    From 08ed1cdf523e5491cdc9bff592789565b0259462 Mon Sep 17 00:00:00 2001 From: KNVx Date: Mon, 16 Oct 2023 12:11:39 +0200 Subject: [PATCH 32/68] [IMP] connector_extension: removed adapters woocommerce and wordpress from connector_extension --- .../components/adapter_woocommerce.py | 82 ----------------- .../components/adapter_wordpress.py | 90 ------------------- 2 files changed, 172 deletions(-) delete mode 100644 connector_extension/components/adapter_woocommerce.py delete mode 100644 connector_extension/components/adapter_wordpress.py diff --git a/connector_extension/components/adapter_woocommerce.py b/connector_extension/components/adapter_woocommerce.py deleted file mode 100644 index 4a72bd221..000000000 --- a/connector_extension/components/adapter_woocommerce.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright NuoBiT Solutions - Eric Antones -# Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). - -import logging - -from odoo.exceptions import ValidationError - -from odoo.addons.component.core import AbstractComponent - -_logger = logging.getLogger(__name__) - - -class WooCommerceAdapterCRUD(AbstractComponent): - _name = "base.backend.woocommerce.adapter.crud" - _inherit = "base.backend.adapter.crud" - - # TODO: manage retryable_errors - def _exec(self, op, resource, *args, **kwargs): - func = getattr(self, "_exec_%s" % op) - return func(resource, *args, **kwargs) - - def get_total_items(self, resource, domain=None): - filters_values = self._get_filters_values() - real_domain, common_domain = self._extract_domain_clauses( - domain, filters_values - ) - res = self.wcapi.get( - resource, - params=self._domain_to_normalized_dict(real_domain), - ) - total_items = int(res.headers.get("X-WP-Total")) - return total_items - - def _get_filters_values(self): - return ["per_page", "page"] - - def _exec_get(self, resource, *args, **kwargs): - domain = [] - if "domain" in kwargs: - domain = kwargs.pop("domain") - filters_values = self._get_filters_values() - real_domain, common_domain = self._extract_domain_clauses( - domain, filters_values - ) - res = self.wcapi.get( - resource, - *args, - **kwargs, - params=self._domain_to_normalized_dict(real_domain), - ) - res = res.json() - if isinstance(res, dict): - res = [res] - res = self._filter(res, common_domain) - return res - - def _exec_post(self, resource, *args, **kwargs): - res = self.wcapi.post(resource, *args, **kwargs) - if res.status_code in [400, 401, 403, 404, 500]: - raise ValidationError(res.json().get("message")) - try: - res = res.json() - except Exception as e: - raise ValidationError(e) from e - return res - - def _exec_put(self, resource, *args, **kwargs): - return self.wcapi.put(resource, *args, **kwargs) - - def _exec_delete(self, resource, *args, **kwargs): - raise NotImplementedError() - - def _exec_options(self, resource, *args, **kwargs): - raise NotImplementedError() - - def get_version(self): - system_status = self._exec("get", "system_status") - version = False - if system_status: - version = system_status.get("environment").get("version") - return version diff --git a/connector_extension/components/adapter_wordpress.py b/connector_extension/components/adapter_wordpress.py deleted file mode 100644 index bba3f2565..000000000 --- a/connector_extension/components/adapter_wordpress.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright NuoBiT Solutions - Kilian Niubo -# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). - -import logging - -import requests - -from odoo import _ -from odoo.exceptions import ValidationError - -from odoo.addons.component.core import AbstractComponent - -_logger = logging.getLogger(__name__) - - -class WordpressAdapterCRUD(AbstractComponent): - _name = "base.backend.wordpress.adapter.crud" - _inherit = "base.backend.adapter.crud" - - # TODO: manage retryable_errors - def _exec(self, op, resource, *args, **kwargs): - func = getattr(self, "_exec_%s" % op) - return func(resource, *args, **kwargs) - - def _exec_get(self, resource, *args, **kwargs): - url = self.backend_record.url + "/wp-json/wp/v2/" + resource - res = requests.get( - url=url, - auth=( - self.backend_record.consumer_key, - self.backend_record.consumer_secret, - ), - timeout=120, - ) - if res.status_code in [400, 401, 403, 404, 500]: - raise ValidationError(res.json().get("message")) - try: - res = res.json() - except Exception as e: - raise ValidationError(e) from e - return res - - def _exec_post(self, resource, *args, **kwargs): - # TODO: this auth method is working like this because if we call - # the export from the woocommerce backend, - # the credentials are in the wordpress backend. Refactor - auth = False - if "wordpress_backend_id" in self.backend_record: - backend = self.backend_record.wordpress_backend_id - auth = (backend.consumer_key, backend.consumer_secret) - data_aux = kwargs.pop("data", {}) - headers = data_aux.pop("headers", {}) - data = data_aux.pop("data", {}) - checksum = False - if data_aux.get("checksum"): - checksum = data_aux.pop("checksum") - url = self.backend_record.url + "/wp-json/wp/v2/" + resource - res = requests.post( - url=url, - headers=headers, - data=data, - auth=auth - or (self.backend_record.consumer_key, self.backend_record.consumer_secret), - timeout=120, - ) - if res.status_code in [400, 401, 403, 404, 500]: - raise ValidationError(res.json().get("message")) - try: - res = res.json() - if checksum: - res["checksum"] = checksum - except Exception as e: - raise ValidationError(e) from e - return res - - def _exec_put(self, resource, *args, **kwargs): - return self.wpapi.put(resource, *args, **kwargs) - - def _exec_delete(self, resource, *args, **kwargs): - raise NotImplementedError() - - def _exec_options(self, resource, *args, **kwargs): - raise NotImplementedError() - - def get_version(self): - settings = self._exec("get", "settings") - if settings.get("title"): - return "Wordpress '%s' connected" % settings.get("title") - else: - raise ValidationError(_("Wordpress not connected")) From b62502828d8bd3db104c17f140eec94880df444b Mon Sep 17 00:00:00 2001 From: KNVx Date: Mon, 16 Oct 2023 11:50:24 +0200 Subject: [PATCH 33/68] [IMP] connector_extension: wrap_binding function on binder has been removed --- connector_extension/components/binder.py | 47 +----------------------- 1 file changed, 2 insertions(+), 45 deletions(-) diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index d9a449779..85c85d4a6 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -19,7 +19,7 @@ import psycopg2 import odoo -from odoo import _, fields, models, tools +from odoo import _, fields, models from odoo.exceptions import ValidationError from odoo.addons.component.core import AbstractComponent @@ -201,47 +201,6 @@ def _find_binding(self, relation, binding_extra_vals=None): binding.ensure_one() return binding - def wrap_binding(self, relation, binding_field=None, binding_extra_vals=None): - if not binding_extra_vals: - binding_extra_vals = {} - if not relation: - return - - if binding_field is None: - if not self._default_binding_field: - raise Exception( - "_binding_field defined on synchronizer class is mandatory" - ) - binding_field = self._default_binding_field - - wrap = relation._name != self.model._name - if wrap and hasattr(relation, binding_field): - binding = self._find_binding(relation, binding_extra_vals) - if not binding: - _bind_values = { - self._odoo_field: relation.id, - self._backend_field: self.backend_record.id, - } - _bind_values.update(binding_extra_vals) - with self._retry_unique_violation(): - binding = ( - self.model.with_context(connector_no_export=True) - .sudo() - .create(_bind_values) - ) - if not tools.config["test_enable"]: - self.env.cr.commit() # pylint: disable=invalid-commit - else: - binding = relation - - if not self._is_binding(binding): - raise Exception( - "Expected binding '%s' and found regular model '%s'" - % (self.model._name, relation._name) - ) - - return binding - def to_internal(self, external_id, unwrap=False): """Give the Odoo recordset for an external ID @@ -388,8 +347,6 @@ def wrap_record(self, relation): """Give the real record :param relation: Odoo real record for which we want to get its binding - :param force: if this is True and not binding found it creates an - empty binding :return: binding corresponding to the real record or empty recordset if the record has no binding """ @@ -411,7 +368,7 @@ def wrap_record(self, relation): "The object '%s' is already wrapped, it's already a binding object. " "You can only wrap Odoo objects" ) - % (relation) + % relation ) binding = self.model.with_context(active_test=False).search( From c6f8eb4e415d602a5bb73d112f90d5b015da61e9 Mon Sep 17 00:00:00 2001 From: KNVx Date: Mon, 16 Oct 2023 11:52:57 +0200 Subject: [PATCH 34/68] [IMP] connector_extension: _usage in batch exporter and batch importer has been renamed --- connector_extension/components/exporter.py | 2 +- connector_extension/components/importer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/connector_extension/components/exporter.py b/connector_extension/components/exporter.py index 54fed56cb..74c1ddb41 100644 --- a/connector_extension/components/exporter.py +++ b/connector_extension/components/exporter.py @@ -257,7 +257,7 @@ class GenericBatchExporter(AbstractComponent): _name = "generic.batch.exporter" _inherit = "base.exporter" - _usage = "batch.direct.importer" + _usage = "batch.exporter" def run(self, domain=None): if not domain: diff --git a/connector_extension/components/importer.py b/connector_extension/components/importer.py index dfd13875a..c805fd6e1 100644 --- a/connector_extension/components/importer.py +++ b/connector_extension/components/importer.py @@ -240,7 +240,7 @@ class GenericBatchImporter(AbstractComponent): _name = "generic.batch.importer" _inherit = "base.importer" - _usage = "batch.direct.importer" + _usage = "batch.importer" def run(self, domain=None): """Run the synchronization""" From eb4e0ecfcb4d8e311899a9ae9f4cc74fb623d2f2 Mon Sep 17 00:00:00 2001 From: KNVx Date: Mon, 16 Oct 2023 12:01:47 +0200 Subject: [PATCH 35/68] [IMP] connector_extension: run of exporter refactor. New decorator atomic created to lock relation to do the _run logic in a different transaction --- connector_extension/common/database.py | 86 ++++++++++++++ connector_extension/components/__init__.py | 1 + connector_extension/components/binder.py | 36 +++--- connector_extension/components/core.py | 53 +++++++++ connector_extension/components/exporter.py | 124 ++++++++++++--------- 5 files changed, 229 insertions(+), 71 deletions(-) create mode 100644 connector_extension/common/database.py create mode 100644 connector_extension/components/core.py diff --git a/connector_extension/common/database.py b/connector_extension/common/database.py new file mode 100644 index 000000000..135e9147b --- /dev/null +++ b/connector_extension/common/database.py @@ -0,0 +1,86 @@ +# Copyright NuoBiT Solutions - Kilian Niubo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +import hashlib +import logging +import struct + +_logger = logging.getLogger(__name__) + + +def get_int_lock(lock): + hasher = hashlib.sha1(str(lock).encode()) + # pg_lock accepts an int8 so we build an hash composed with + # contextual information and we throw away some bits + return struct.unpack("q", hasher.digest()[:8]) + + +def session_pg_try_advisory_lock(env, lock): + """Try to acquire a Postgres session advisory lock. + + The function tries to acquire a lock, returns a boolean indicating + if it could be obtained or not. An acquired lock is released at the + advisory unlock. + + A typical use is to acquire a lock at the beginning of an importer + to prevent 2 jobs to do the same import at the same time. Since the + record doesn't exist yet, we can't put a lock on a record, so we put + an advisory lock. + + Example: + - Job 1 imports Partner A + - Job 2 imports Partner B + - Partner A has a category X which happens not to exist yet + - Partner B has a category X which happens not to exist yet + - Job 1 import category X as a dependency + - Job 2 import category X as a dependency + + Since both jobs are executed concurrently, they both create a record + for category X so we have duplicated records. With this lock: + + - Job 1 imports Partner A, it acquires a lock for this partner + - Job 2 imports Partner B, it acquires a lock for this partner + - Partner A has a category X which happens not to exist yet + - Partner B has a category X which happens not to exist yet + - Job 1 import category X as a dependency, it acquires a lock for + this category + - Job 2 import category X as a dependency, try to acquire a lock + but can't, Job 2 is retried later, and when it is retried, it + sees the category X created by Job 1. + + The lock is acquired until the end of the transaction. + + Usage example: + + :: + + lock_name = 'import_record({}, {}, {}, {})'.format( + self.backend_record._name, + self.backend_record.id, + self.model._name, + self.external_id, + ) + if pg_try_advisory_lock(lock_name): + # do sync + else: + raise RetryableJobError('Could not acquire advisory lock', + seconds=2, + ignore_retry=True) + + :param env: the Odoo Environment + :param lock: The lock name. Can be anything convertible to a + string. It needs to represents what should not be synchronized + concurrently so usually the string will contain at least: the + action, the backend type, the backend id, the model name, the + external id + :return True/False whether lock was acquired. + """ + int_lock = get_int_lock(lock) + env.cr.execute("SELECT pg_try_advisory_lock(%s);", (int_lock)) + return env.cr.fetchone()[0] + + +def session_pg_advisory_unlock(env, lock): + int_lock = get_int_lock(lock) + env.cr.execute("SELECT pg_advisory_unlock(%s);", (int_lock)) + return env.cr.fetchone()[0] diff --git a/connector_extension/components/__init__.py b/connector_extension/components/__init__.py index bd5b1ccd6..befc988bf 100644 --- a/connector_extension/components/__init__.py +++ b/connector_extension/components/__init__.py @@ -1,5 +1,6 @@ from . import adapter from . import binder +from . import core from . import exporter from . import importer from . import mapper diff --git a/connector_extension/components/binder.py b/connector_extension/components/binder.py index 85c85d4a6..6b31c85d9 100644 --- a/connector_extension/components/binder.py +++ b/connector_extension/components/binder.py @@ -287,24 +287,24 @@ def bind_export(self, external_data, relation): relation_id = relation external_id = self.dict2id(external_data, in_field=False) - with self._retry_unique_violation(): - binding = self.model.with_context(connector_no_export=True).create( - { - self._backend_field: self.backend_record.id, - self._odoo_field: relation_id, - self._sync_date_field: fields.Datetime.now(), - **self.id2dict(external_id, in_field=True), - **self._additional_external_binding_fields(external_data), - } - ) - # Eager commit to avoid having 2 jobs - # exporting at the same time. The constraint - # will pop if an other job already created - # the same binding. It will be caught and - # raise a RetryableJobError. - if not odoo.tools.config["test_enable"]: - self.env.cr.commit() # pylint: disable=E8102 - return binding + binding = self.model.with_context(connector_no_export=True).create( + { + self._backend_field: self.backend_record.id, + self._odoo_field: relation_id, + self._sync_date_field: fields.Datetime.now(), + **self.id2dict(external_id, in_field=True), + **self._additional_external_binding_fields(external_data), + } + ) + + # Eager commit to avoid having 2 jobs + # exporting at the same time. The constraint + # will pop if an other job already created + # the same binding. It will be caught and + # raise a RetryableJobError. + if not odoo.tools.config["test_enable"]: + self.env.cr.commit() # pylint: disable=E8102 + return binding def _additional_external_binding_fields(self, external_data): return {} diff --git a/connector_extension/components/core.py b/connector_extension/components/core.py new file mode 100644 index 000000000..df251d948 --- /dev/null +++ b/connector_extension/components/core.py @@ -0,0 +1,53 @@ +# Copyright NuoBiT Solutions - Kilian Niubo +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +from odoo.addons.component.core import AbstractComponent +from odoo.addons.queue_job.exception import RetryableJobError + +from ..common.database import session_pg_advisory_unlock, session_pg_try_advisory_lock + + +class BaseConnectorComponent(AbstractComponent): + _inherit = "base.connector" + + def session_advisory_lock_or_retry(self, lock, retry_seconds=1): + """Acquire a Postgres session advisory lock or retry job + + When the lock cannot be acquired, it raises a + :exc:`odoo.addons.queue_job.exception.RetryableJobError` so the job + is retried after n ``retry_seconds``. + + Usage example: + + .. code-block:: python + + lock_name = 'import_record({}, {}, {}, {})'.format( + self.backend_record._name, + self.backend_record.id, + self.model._name, + self.external_id, + ) + self.session_advisory_lock_or_retry(lock_name, retry_seconds=2) + + See :func:`odoo.addons.connector.connector.session_pg_try_advisory_lock` for + details. + + :param lock: The lock name. Can be anything convertible to a + string. It needs to represent what should not be synchronized + concurrently, usually the string will contain at least: the + action, the backend name, the backend id, the model name, the + external id + :param retry_seconds: number of seconds after which a job should + be retried when the lock cannot be acquired. + """ + if not session_pg_try_advisory_lock(self.env, lock): + raise RetryableJobError( + "Could not acquire advisory lock", + seconds=retry_seconds, + ignore_retry=True, + ) + + def session_pg_advisory_unlock( + self, + lock, + ): + return session_pg_advisory_unlock(self.env, lock) diff --git a/connector_extension/components/exporter.py b/connector_extension/components/exporter.py index 74c1ddb41..c7e6071e0 100644 --- a/connector_extension/components/exporter.py +++ b/connector_extension/components/exporter.py @@ -6,7 +6,8 @@ import psycopg2 -from odoo import _, fields +import odoo +from odoo import _, api, fields from odoo.addons.component.core import AbstractComponent from odoo.addons.connector.exception import RetryableJobError @@ -25,56 +26,82 @@ class GenericDirectExporter(AbstractComponent): def _mapper_options(self, binding): return {"binding": binding} - def run(self, relation, internal_fields=None): - """Run the synchronization - - :param binding: binding record to export - """ - now_fmt = fields.Datetime.now() + def _get_lock_name(self, relation): + lock_name = "export_record({}, {}, {}, {})".format( + self.backend_record._name, + self.backend_record.id, + relation._name, + relation.id, + ) + return lock_name + + def atomic(func): # noqa: B902 + def wrapper(self, now_fmt, relation, always, internal_fields): + lock_name = self._get_lock_name(relation) + self.session_advisory_lock_or_retry(lock_name) + try: + with odoo.registry(self.env.cr.dbname).cursor() as new_cr: + new_env = api.Environment(new_cr, self.env.uid, self.env.context) + new_backend_record = new_env[self.backend_record._name].browse( + self.backend_record.id + ) + with new_backend_record.work_on(self.model._name) as work: + new_self = work.component(self._usage) + result = func( + new_self, now_fmt, relation, always, internal_fields + ) + finally: + self.session_pg_advisory_unlock(lock_name) + return result + + return wrapper + + @atomic + def _run(self, now_fmt, relation, always, internal_fields): result = None - # get binding from real record binding = self.binder_for().wrap_record(relation) - # if not binding, try to link to existing external record with - # the same alternate key and create/update binding if not binding: binding = ( self.binder_for().to_binding_from_internal_key(relation) or binding ) - if not binding: internal_fields = None # should be created with all the fields - if self._has_to_skip(binding, relation): - return _("Nothing to export") - - # export the missing linked resources - self._export_dependencies(relation) - - # prevent other jobs to export the same record - # will be released on commit (or rollback) - self._lock(relation) - map_record = self.mapper.map_record(relation) # passing info to the mapper opts = self._mapper_options(binding) - if binding: - values = self._update_data(map_record, fields=internal_fields, **opts) - if values: - external_id = self.binder_for().dict2id(binding, in_field=True) - result = self._update(external_id, values) - else: - values = self._create_data(map_record, fields=internal_fields, **opts) - if values: - external_data = self._create(values) - binding = self.binder_for().bind_export(external_data, relation) - if not values: - result = _("Nothing to export") - if not result: - result = _("Record exported with ID %s on Backend.") % "external_id" - self._after_export() - binding[self.binder_for()._sync_date_field] = now_fmt - return result + if always or not binding: + if binding: + values = self._update_data(map_record, fields=internal_fields, **opts) + if values: + external_id = self.binder_for().dict2id(binding, in_field=True) + result = self._update(external_id, values) + else: + values = self._create_data(map_record, fields=internal_fields, **opts) + if values: + external_data = self._create(values) + binding = self.binder_for().bind_export(external_data, relation) + if not values: + result = _("Nothing to export") + if not result: + result = _("Record exported with ID %s on Backend.") % "external_id" + self._after_export() + binding[self.binder_for()._sync_date_field] = now_fmt + return result + + def run(self, relation, always=True, internal_fields=None): + """Run the synchronization + + :param binding: binding record to export + """ + now_fmt = fields.Datetime.now() + if self._has_to_skip(relation): + return _("Nothing to export") + self._export_dependencies(relation) + return self._run( + now_fmt, relation, always=always, internal_fields=internal_fields + ) def _after_export(self): """Can do several actions after exporting a record on the backend""" @@ -111,10 +138,12 @@ def _lock(self, record): raise RetryableJobError( "A concurrent job is already exporting the same record " "(%s with id %s). The job will be retried later." - % (self.model._name, record.id) + % (self.model._name, record.id), + seconds=5, + ignore_retry=True, ) from e - def _has_to_skip(self, binding, relation): + def _has_to_skip(self, relation): """Return True if the export can be skipped""" return False @@ -192,19 +221,8 @@ def _export_dependency( pass extra values for this binding :type binding_extra_vals: dict """ - if not relation: - return - - binding = None - if not always: - rel_binder = self.binder_for(binding_model) - binding = rel_binder.wrap_record(relation) - if not binding: - binding = rel_binder.to_binding_from_internal_key(relation) - - if always or not binding: - exporter = self.component(usage=component_usage, model_name=binding_model) - exporter.run(relation) + exporter = self.component(usage=component_usage, model_name=binding_model) + exporter.run(relation, always=always) def _export_dependencies(self, relation): """Export the dependencies for the record""" From ab5c84208eaa9dd8d11cde410be481d7560503c0 Mon Sep 17 00:00:00 2001 From: KNVx Date: Thu, 19 Oct 2023 16:44:32 +0200 Subject: [PATCH 36/68] [IMP] connector_extension: _description created on connector extension external binding --- connector_extension/models/binding/binding.py | 1 + 1 file changed, 1 insertion(+) diff --git a/connector_extension/models/binding/binding.py b/connector_extension/models/binding/binding.py index bd6c4e702..3405ff3f4 100644 --- a/connector_extension/models/binding/binding.py +++ b/connector_extension/models/binding/binding.py @@ -8,6 +8,7 @@ class ConnectorExtensionExternalBinding(models.AbstractModel): _name = "connector.extension.external.binding" _inherit = "external.binding" + _description = "Connector Extension External Binding (abstract)" # by default we consider sync_date as the import one @api.model From 22bc73c44edc7b9fbc7d298838fa5fb22cc07a66 Mon Sep 17 00:00:00 2001 From: Eric Antones Date: Tue, 31 Oct 2023 19:03:43 +0100 Subject: [PATCH 37/68] [REF] connector_extension: Update copier template --- connector_extension/README.rst | 9 +++-- .../static/description/index.html | 34 ++++++++++--------- 2 files changed, 24 insertions(+), 19 deletions(-) diff --git a/connector_extension/README.rst b/connector_extension/README.rst index d72dee574..195bd8068 100644 --- a/connector_extension/README.rst +++ b/connector_extension/README.rst @@ -2,10 +2,13 @@ Connector Extension =================== -.. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! !! This file is generated by oca-gen-addon-readme !! !! changes will be overwritten. !! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:39b64c10f88d1224bc1e05fc967b018cce54a165a66cc8fd65dbf1c03f196c2d + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! .. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png :target: https://odoo-community.org/page/development-status @@ -17,7 +20,7 @@ Connector Extension :target: https://github.com/nuobit/odoo-addons/tree/16.0/connector_extension :alt: nuobit/odoo-addons -|badge1| |badge2| |badge3| +|badge1| |badge2| |badge3| The "connector_extension" module is an add-on for the Odoo ERP system that enhances the functionality of the base "connector" module. This extension provides additional features, tools, and integrations, making it easier for developers to create, manage, and maintain connections between Odoo and various third-party systems, APIs, or services. @@ -40,7 +43,7 @@ Bug Tracker Bugs are tracked on `GitHub Issues `_. In case of trouble, please check there if your issue has already been reported. -If you spotted it first, help us smashing it by providing a detailed and welcomed +If you spotted it first, help us to smash it by providing a detailed and welcomed `feedback `_. Do not contact contributors directly about support or help with technical issues. diff --git a/connector_extension/static/description/index.html b/connector_extension/static/description/index.html index 087b1b080..07bf56a1c 100644 --- a/connector_extension/static/description/index.html +++ b/connector_extension/static/description/index.html @@ -1,20 +1,20 @@ - + - + Connector Extension