# HG changeset patch
# User Vincent Hatakeyama <vincent.hatakeyama@xcg-consulting.fr>
# Date 1591717167 -7200
#      Tue Jun 09 17:39:27 2020 +0200
# Node ID 00330764ffcf8b607e734ad7b54287bbf9e0760c
# Parent  311514c74d0741912ea441c7a3148b30994a8a9c
✨ import SQL

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,11 +1,12 @@
-stages:
-- test
-- build
-- push
-
 include:
 - file: python3-lint.gitlab-ci.yaml
   project: xcg/ci-templates
+- file: docker-build.gitlab-ci.yaml
+  project: xcg/ci-templates
+
+variables:
+  TAG_LATEST: branch/default
+  DOCKER_IMAGE: xcgd/odoo_scripts
 
 import_jsonrpc_odoo11_test:
   stage: test
@@ -30,14 +31,7 @@
   - import_jsonrpc -v --host localhost --password admin -d test_setup --protocol jsonrpc -p 8069 --directory tests/import
   - import_jsonrpc -v --host localhost --password admin -d test_setup --protocol jsonrpc -p 8069 --file tests/context_import/res.company.csv --context test=value
   - import_jsonrpc -v --host localhost --password admin -d test_setup --protocol jsonrpc -p 8069 --directory tests/import_emptyyaml --delimiter ";"
-
-docker:
-  stage: build
-  image: docker.orus.io/cc/docker
-  variables:
-    DOCKER_IMAGE: xcgd/odoo_scripts
-  script:
-  - docker-build
+  - import_sql -vv --host postgres --user odoo --database test_setup --directory tests/import_sql
 
 build-documentation:
   stage: build
@@ -53,7 +47,7 @@
   - for language in en fr ; do LANGUAGE=$language BUILDDIRSUFFIX=/$(hg identify --debug --branch) make html ; done
 
 push-documentation:
-  stage: push
+  stage: deploy
   image:
     name: minio/mc:RELEASE.2020-01-13T22-49-03Z
     entrypoint: ["/bin/busybox"]
diff --git a/Dockerfile b/Dockerfile
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,8 @@
 FROM python:3-alpine
 ADD . /usr/src/odoo_scripts
 RUN set -x ;\
-    apk add --no-cache --update zsh rsync && \
-    pip3 install --no-cache /usr/src/odoo_scripts && \
+    apk add --no-cache --update zsh rsync postgresql-libs && \
+    apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev && \
+    python3 -m pip install /usr/src/odoo_scripts --no-cache-dir && \
+    apk --purge del .build-deps && \
     rm -rf /usr/src/odoo_scripts
diff --git a/NEWS.rst b/NEWS.rst
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -7,6 +7,7 @@
 
 Add odoo 13.
 Add ODOO_ADDONS_PATH env var to start.
+Add import_sql script.
 Add option to bind odoo sources in the image
 
 3.0
diff --git a/README.rst b/README.rst
--- a/README.rst
+++ b/README.rst
@@ -162,11 +162,26 @@
 
 - selection fields must use the technical value, not the displayed value or one of its translation.
 - many2one and many2many fields must use the xmlid of the field they are referencing, with the column name in the form field_name/id or field_name:id.
-- one2many fields in the same file are not supported for now,
+- one2many fields in the same file are not supported for now.
 - date and datetime must be in ISO format.
 - context does not include base.import usual import key, so they need to be added as argument or in the configuration file.
 
-import_jsonrpc tries to find an `import.yaml` file alongside any imported file. It will use the delimiter indicated in the file if it finds one and will add the context key to the context used in the jsonrpc calls.
+import_jsonrpc tries to find an ``import.yaml`` file alongside any imported file. It will use the delimiter indicated in the file if it finds one and will add the context key to the context used in the jsonrpc calls.
+
+import_sql
+----------
+
+Import CSV files into an Odoo database. The import is done directly as SQL insert and update commands.
+
+The format is the similar to the one used with the import module, with many more restrictions:
+
+- selection fields must use the technical value, not the displayed value or one of its translation.
+- many2one and many2many fields can not be used.
+- one2many fields in the same file are not supported.
+- date and datetime must be in ISO format.
+
+When inserting, the create_uid/write_uid are not set.
+create_date and write_date is updated as needed, and so is the date_update in ``ir.model.data``.
 
 setup.cfg
 =========
@@ -209,6 +224,7 @@
   conf2reST --help | ~/src/zsh-completion-generator/help2comp.py conf2reST > ~/.local/share/zsh/completion/_conf2reST
   import_base_import --help | ~/src/zsh-completion-generator/help2comp.py import_base_import > ~/.local/share/zsh/completion/_import_base_import
   import_jsonrpc --help | ~/src/zsh-completion-generator/help2comp.py import_jsonrpc > ~/.local/share/zsh/completion/_import_jsonrpc
+  import_sql --help | ~/src/zsh-completion-generator/help2comp.py import_sql > ~/.local/share/zsh/completion/_import_sql
 
 
 Alternatives: genzshcomp 
diff --git a/odoo_scripts/__init__.py b/odoo_scripts/__init__.py
--- a/odoo_scripts/__init__.py
+++ b/odoo_scripts/__init__.py
@@ -1,3 +1,2 @@
 # expose some useful functions
-from .logging_utils import logging_from_verbose  # noqa: F401
 from .odoo import odoo_connect_parser, odoo_login  # noqa: F401
diff --git a/odoo_scripts/import_base_import.py b/odoo_scripts/import_base_import.py
--- a/odoo_scripts/import_base_import.py
+++ b/odoo_scripts/import_base_import.py
@@ -7,12 +7,16 @@
 import logging
 import sys
 from datetime import datetime
-from typing import List, Tuple
+from typing import Dict, List
 
 from requests_toolbelt import MultipartEncoder
 
-from . import logging_from_verbose, odoo_connect_parser, odoo_login
-from .importing import add_importing_file_parsing, extract_info_from_parsed
+from . import odoo_connect_parser, odoo_login
+from .importing import (
+    add_importing_file_parsing,
+    extract_model_lang_from_parsed,
+)
+from .parsing import logging_from_verbose
 
 _logger = logging.getLogger(__name__)
 
@@ -29,7 +33,7 @@
     host: str,
     port: int,
     timeout: int,
-    model_filenames: List[Tuple[str, str, str]],
+    model_filenames: List[Dict[str, str]],
     update_parameter: bool = True,
 ) -> int:
     o, session_id = odoo_login(
@@ -62,7 +66,10 @@
         "headers": True,
     }
     _logger.info("%d files to import", len(model_filenames))
-    for model, csv_file, lang in model_filenames:
+    for element in model_filenames:
+        model = element["model"]
+        csv_file = element["filename"]
+        lang = element["lang"]
         _logger.info(
             "Importing - %s in model %s (lang %s)", csv_file, model, lang
         )
@@ -162,7 +169,7 @@
         protocol=nmspc.protocol,
         timeout=nmspc.timeout,
         database=nmspc.database,
-        model_filenames=extract_info_from_parsed(nmspc),
+        model_filenames=extract_model_lang_from_parsed(nmspc),
     )
 
 
diff --git a/odoo_scripts/import_jsonrpc.py b/odoo_scripts/import_jsonrpc.py
--- a/odoo_scripts/import_jsonrpc.py
+++ b/odoo_scripts/import_jsonrpc.py
@@ -12,8 +12,12 @@
 from dateutil.parser import isoparse
 from odoorpc.error import RPCError
 
-from . import logging_from_verbose, odoo_connect_parser, odoo_login
-from .importing import add_importing_file_parsing, extract_info_from_parsed
+from .importing import (
+    add_importing_file_parsing,
+    extract_model_lang_from_parsed,
+)
+from .odoo import odoo_connect_parser, odoo_login
+from .parsing import logging_from_verbose
 
 _logger = logging.getLogger(__name__)
 
@@ -30,7 +34,7 @@
     host: str,
     port: int,
     timeout: int,
-    model_filenames: List[Tuple[str, str, str]],
+    model_filenames: List[Dict[str, str]],
     delimiter: str = ",",
     extra_context: Dict[str, str] = dict(),
 ) -> int:
@@ -114,7 +118,10 @@
     # this dictionary is used to store the import.yaml loaded data to avoid
     # accessing the disk too frequently
     import_yaml_cache = dict()
-    for model, csv_file, lang in model_filenames:
+    for element in model_filenames:
+        model = element["model"]
+        csv_file = element["filename"]
+        lang = element["lang"]
         _logger.info(
             "Importing - %s in model %s (lang %s)", csv_file, model, lang
         )
@@ -323,7 +330,9 @@
         protocol=nmspc.protocol,
         timeout=nmspc.timeout,
         database=nmspc.database,
-        model_filenames=extract_info_from_parsed(nmspc, r"(.*/)?import\.yaml"),
+        model_filenames=extract_model_lang_from_parsed(
+            nmspc, r"(.*/)?import\.yaml"
+        ),
         delimiter=nmspc.delimiter,
         extra_context=extra_context,
     )
diff --git a/odoo_scripts/import_sql.py b/odoo_scripts/import_sql.py
new file mode 100644
--- /dev/null
+++ b/odoo_scripts/import_sql.py
@@ -0,0 +1,220 @@
+#!/usr/bin/env python3
+"""Imports CSV files into an Oddo database using SQL.
+"""
+# Version 3.0
+import csv
+import logging
+import sys
+from typing import List, Tuple
+
+from .importing import (
+    add_importing_file_parsing,
+    extract_model_table_from_parsed,
+)
+from .postgres import postgres_apply, postgres_connect_parser
+
+_logger = logging.getLogger(__name__)
+
+__version__ = "1.0.0"
+__date__ = "2020-06-08"
+__updated__ = "2020-06-08"
+
+
+def _execute(cursor, query, parameters):
+    _logger.debug("Query %s, parameters %s", query, parameters)
+    cursor.execute(query, parameters)
+
+
+def sql_import(
+    connection, table_filenames: List[Tuple[str, str]], delimiter: str
+) -> None:
+    # quick check to fail quicker
+    with connection.cursor() as cur:
+        for element in table_filenames:
+            model = element["model"]
+            csv_file = element["filename"]
+            if "table" in element and element["table"]:
+                table = element["table"]
+            else:
+                # do Odoo model to table conversion
+                # (same as _build_model_attributes)
+                table = model.replace(".", "_")
+            _logger.info(
+                "Importing - %s in %s (table %s)", csv_file, model, table
+            )
+            with open(csv_file, "r") as file:
+                reader = csv.DictReader(
+                    file, delimiter=delimiter, quotechar='"'
+                )
+                headers_check = False
+                created = written = 0
+                for row in reader:
+                    # for each file read the headers
+                    if not headers_check:
+                        _logger.debug("Checking headers")
+                        many2x_columns = list()
+                        for column_name in row.keys():
+                            if column_name.endswith(
+                                "/id"
+                            ) or column_name.endswith(":id"):
+                                many2x_columns.append(column_name)
+                        if many2x_columns:
+                            msg = (
+                                "Importing many2many or many2one is not "
+                                "supported: %s" % (", ".join(many2x_columns))
+                            )
+                            _logger.fatal(msg)
+                            raise Exception(msg)
+                        headers_check = True
+                    if "id" in row:
+                        xmlid = row.pop("id")
+                        if "." in xmlid:
+                            module, name = xmlid.split(".", 1)
+                        else:
+                            module = ""
+                            name = xmlid
+                        # look up if the given id exists
+                        _execute(
+                            cur,
+                            "SELECT id, res_id "
+                            "FROM ir_model_data where model=%s "
+                            "AND module=%s AND name=%s",
+                            (model, module, name),
+                        )
+                        imd_data = cur.fetchone()
+                        if imd_data:
+                            imd_id, res_id = imd_data
+                            _logger.debug(
+                                "Found id %d for xmlid %s", res_id, xmlid
+                            )
+                            # if the id exists, do an update
+                            query = (
+                                "UPDATE {table} "
+                                "SET {data_placeholders} "
+                                "WHERE id=%s"
+                            ).format(
+                                table=table,
+                                data_placeholders=",".join(
+                                    ["write_date=(now() at time zone 'UTC')"]
+                                    + ["{}=%s".format(col) for col in row]
+                                ),
+                            )
+                            _execute(cur, query, list(row.values()) + [res_id])
+                            # update date_update in ir_model_data
+                            _execute(
+                                cur,
+                                "UPDATE ir_model_data "
+                                "SET date_update=(now() at time zone 'UTC') "
+                                "WHERE id=%s",
+                                (imd_id,),
+                            )
+                            written += 1
+                        else:
+                            # otherwise do an insert and update the columns
+                            query = (
+                                "INSERT INTO {table} ({columns}) "
+                                "VALUES ({data_placeholders}) "
+                                "RETURNING id"
+                            ).format(
+                                table=table,
+                                columns=",".join(
+                                    ["create_date", "write_date"]
+                                    + list(row.keys())
+                                ),
+                                data_placeholders=",".join(
+                                    ["(now() at time zone 'UTC')"] * 2
+                                    + ["%s"] * len(row)
+                                ),
+                            )
+                            _execute(cur, query, tuple(row.values()))
+                            # and insert into ir_model_data
+                            db_id = cur.fetchone()[0]
+                            _execute(
+                                cur,
+                                "INSERT INTO ir_model_data (module, name, "
+                                "model, res_id, date_init, date_update) "
+                                "VALUES (%s, %s, %s, %s, "
+                                "(now() at time zone 'UTC'), "
+                                "(now() at time zone 'UTC'))",
+                                (module, name, model, db_id),
+                            )
+                            created += 1
+                    else:
+                        # no xml id, just insert lines into the table
+                        query = (
+                            "INSERT INTO {table} ({columns}) "
+                            "VALUES ({data_placeholders})"
+                        ).format(
+                            table=table,
+                            columns=",".join(
+                                ["create_date", "write_date"]
+                                + list(row.keys())
+                            ),
+                            data_placeholders=",".join(
+                                ["(now() at time zone 'UTC')"] * 2
+                                + ["%s"] * len(row)
+                            ),
+                        )
+                        _execute(cur, query, tuple(row.values()))
+                        created += 1
+                    if (created + written) % 100 == 0:
+                        _logger.info(
+                            "%s progress: created %d, wrote %d",
+                            csv_file,
+                            created,
+                            written,
+                        )
+                _logger.info(
+                    "%s: created %d, wrote %d", csv_file, created, written
+                )
+
+        connection.commit()
+        _logger.info("Commited.")
+
+
+def main(argv=None):  # IGNORE:C0111
+    """Parse arguments and launch conversion
+    """
+    program_version = __version__
+    program_build_date = str(__updated__)
+    program_version_message = "%%(prog)s %s (%s)" % (
+        program_version,
+        program_build_date,
+    )
+    program_shortdesc = __doc__.split(".")[0]
+    program_license = """%s
+
+  Created by Vincent Hatakeyama on %s.
+  Copyright 2020 XCG Consulting. All rights reserved.
+
+  Licensed under the MIT License
+
+  Distributed on an "AS IS" basis without warranties
+  or conditions of any kind, either express or implied.
+
+USAGE
+""" % (
+        program_shortdesc,
+        str(__date__),
+    )
+    parser = postgres_connect_parser(program_license, program_version_message)
+    add_importing_file_parsing(parser)
+    parser.add_argument(
+        "--delimiter", help="CSV delimiter [default: %(default)s]", default=","
+    )
+
+    nmspc = parser.parse_args(argv)
+    conn = postgres_apply(nmspc)
+
+    sql_import(
+        connection=conn,
+        table_filenames=extract_model_table_from_parsed(nmspc),
+        delimiter=nmspc.delimiter,
+    )
+    conn.close()
+
+
+if __name__ == "__main__":
+    return_code = main(sys.argv[1:])
+    if return_code:
+        exit(return_code)
diff --git a/odoo_scripts/importing.py b/odoo_scripts/importing.py
--- a/odoo_scripts/importing.py
+++ b/odoo_scripts/importing.py
@@ -4,7 +4,7 @@
 import logging
 import os
 import re
-from typing import List, Tuple
+from typing import Dict, List
 
 _logger = logging.getLogger(__name__)
 
@@ -17,38 +17,71 @@
     )
 
 
+def extract_model_lang_from_parsed(
+    namespace: argparse.Namespace, expected_filenames: str = None
+) -> List[Dict[str, str]]:
+    pattern = re.compile(
+        r"(?P<filename>"
+        r"(?P<path>.*/)?"
+        r"(?:[\d\s\w_-]*\s)?"
+        r"(?P<model>[\w.]+?)"
+        r"(?:@(?P<lang>\w{2,3}(?:@latin|_\w{2})))?"
+        r".csv"
+        r")"
+    )
+    expected_filenames_pattern = (
+        re.compile(expected_filenames)
+        if expected_filenames is not None
+        else None
+    )
+    return extract_info_from_parsed(
+        namespace, pattern, expected_filenames_pattern
+    )
+
+
+def extract_model_table_from_parsed(
+    namespace: argparse.Namespace
+) -> List[Dict[str, str]]:
+    pattern = re.compile(
+        r"(?P<filename>"
+        r"(?P<path>.*/)?"
+        r"(?:[\d\s\w_-]*\s)?"
+        r"(?P<model>[\w.]+?)"
+        r"(?:@(?P<table>\w+))?"
+        r".csv"
+        r")"
+    )
+    expected_filenames_pattern = None
+    return extract_info_from_parsed(
+        namespace, pattern, expected_filenames_pattern
+    )
+
+
 def extract_info_from_parsed(
-    nmspc: argparse.Namespace, expected_filenames: str = None
-) -> List[Tuple[str, str, str]]:
+    nmspc: argparse.Namespace, pattern, expected_filenames_pattern=None
+) -> List[Dict[str, str]]:
     """
     :param nmspc: result of parser.parse_args
     :param expected_filenames: a pattern to use in re.compile
     :return: list of (model, filename, lang)
     """
     return extract_info_from_filename(
-        nmspc.file, nmspc.directory, expected_filenames
+        nmspc.file, nmspc.directory, pattern, expected_filenames_pattern
     )
 
 
 def extract_info_from_filename(
-    files, directories, expected_filenames: str = None
-) -> List[Tuple[str, str, str]]:
+    files, directories, pattern, expected_filenames_pattern=None
+) -> List[Dict[str, str]]:
     """from filename or directories, extract model and lang
 
     :param files:
     :param directories:
-    :param expected_filenames: a pattern to use in re.compile
-    :return: list of (model, filename, lang)
+    :param pattern: a re.compile pattern to match the file agains
+    :param expected_filenames_pattern: a re.compile pattern
+    :return: list of dict with the group data in it
     """
-    model_filenames: List[Tuple[str, str, str]] = list()
-    # files need to be named according to the following regexp
-    pattern = re.compile(
-        r"(?P<path>.*/)?"
-        r"(?:[\d\s\w_-]*\s)?"
-        r"(?P<model>[\w.]+?)"
-        r"(?:@(?P<lang>\w{2,3}(?:@latin|_\w{2})))?"
-        r".csv"
-    )
+    model_filenames: List[Dict[str, str]] = list()
     file_list = list()
     if files:
         file_list.extend(files)
@@ -56,18 +89,12 @@
         for directory in directories:
             for root, subdirectories, files in os.walk(directory):
                 file_list.extend(os.path.join(root, f) for f in sorted(files))
-    expected_filenames_pattern = (
-        re.compile(expected_filenames)
-        if expected_filenames is not None
-        else None
-    )
+
     for filename in file_list:
         _logger.debug("%s to import", filename)
         result = pattern.fullmatch(filename)
         if result:
-            model = result.group("model")
-            lang = result.group("lang")
-            model_filenames.append((model, filename, lang))
+            model_filenames.append(result.groupdict())
         else:
             if (
                 expected_filenames_pattern
diff --git a/odoo_scripts/odoo.py b/odoo_scripts/odoo.py
--- a/odoo_scripts/odoo.py
+++ b/odoo_scripts/odoo.py
@@ -1,9 +1,12 @@
+"""Function to ease connection to an odoo server
+"""
 import argparse
 import logging
+from typing import Optional
 
 import odoorpc
 
-from .logging_utils import add_verbosity_to_parser
+from .parsing import basic_parser
 
 _logger = logging.getLogger(__name__)
 
@@ -44,13 +47,10 @@
     return o, session_id
 
 
-def odoo_connect_parser(description, version) -> argparse.ArgumentParser:
-    parser = argparse.ArgumentParser(
-        description=description,
-        formatter_class=argparse.RawDescriptionHelpFormatter,
-    )
-    parser.add_argument("-V", "--version", action="version", version=version)
-    add_verbosity_to_parser(parser)
+def odoo_connect_parser(
+    description: Optional[str], version: str
+) -> argparse.ArgumentParser:
+    parser = basic_parser(description, version)
     parser.add_argument(
         "--login", help="Odoo user [default: %(default)s]", default="admin"
     )
diff --git a/odoo_scripts/logging_utils.py b/odoo_scripts/parsing.py
rename from odoo_scripts/logging_utils.py
rename to odoo_scripts/parsing.py
--- a/odoo_scripts/logging_utils.py
+++ b/odoo_scripts/parsing.py
@@ -1,5 +1,8 @@
+"""Function to help create and parse options.
+"""
 import argparse
 import logging
+from typing import Optional
 
 
 def add_verbosity_to_parser(parser: argparse.ArgumentParser):
@@ -24,3 +27,24 @@
         level=level,
         format="%(asctime)s %(levelname)8s [%(lineno)3d]: %(message)s",
     )
+
+
+def basic_parser(
+    description: Optional[str], version: str
+) -> argparse.ArgumentParser:
+    """Create a basic parser
+    """
+    parser = argparse.ArgumentParser(
+        description=description,
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+    )
+    parser.add_argument("-V", "--version", action="version", version=version)
+    add_verbosity_to_parser(parser)
+
+    return parser
+
+
+def apply(namespace: argparse.Namespace):
+    """Use the basic parser options
+    """
+    logging_from_verbose(namespace)
diff --git a/odoo_scripts/postgres.py b/odoo_scripts/postgres.py
new file mode 100644
--- /dev/null
+++ b/odoo_scripts/postgres.py
@@ -0,0 +1,44 @@
+"""Function to ease connection to a postgres database
+"""
+import argparse
+import logging
+from typing import Optional
+
+import psycopg2
+
+from .parsing import apply, basic_parser
+
+_logger = logging.getLogger(__name__)
+
+
+def postgres_connect_parser(
+    description: Optional[str], version: str
+) -> argparse.ArgumentParser:
+    """Create a parser for a postgres connection
+    """
+    parser = basic_parser(description, version)
+    parser.add_argument("--database", help="the database name")
+    parser.add_argument("--user", help="user name used to authenticate")
+    parser.add_argument("--password", help="password used to authenticate")
+    parser.add_argument(
+        "--host",
+        help="database host address (defaults to UNIX socket if not provided)",
+    )
+    parser.add_argument(
+        "--port",
+        help="connection port number (defaults to 5432 if not provided)",
+        type=int,
+    )
+    return parser
+
+
+def postgres_apply(namespace: argparse.Namespace):
+    """Use postgres_connect_parser result to create a postgres connection
+    :return: a psycopg2 connection
+    """
+    apply(namespace)
+    connect_args = dict()
+    for argument in ("database", "user", "password", "host", "port"):
+        if getattr(namespace, argument):
+            connect_args[argument] = getattr(namespace, argument)
+    return psycopg2.connect(**connect_args)
diff --git a/requirements b/requirements
--- a/requirements
+++ b/requirements
@@ -2,4 +2,4 @@
 PyYAML
 odoorpc==0.7.0
 requests_toolbelt==0.8.0
-
+python-dateutil>=2.7.0
diff --git a/setup.cfg b/setup.cfg
--- a/setup.cfg
+++ b/setup.cfg
@@ -20,4 +20,10 @@
 
 [bumpversion:file:do_tests.py]
 
+[bumpversion:file:import_base_import.py]
+
+[bumpversion:file:import_jsonrpc.py]
+
+[bumpversion:file:import_sql.py]
+
 [bumpversion:file:setup.py]
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -16,11 +16,13 @@
         "requests_toolbelt==0.8.0",
         "python-dateutil>=2.7.0",
         # How to indicate to install python3-docker?
+        "psycopg2",  # used by import_sql
     ],
     entry_points={
         "console_scripts": [
             "import_base_import=odoo_scripts.import_base_import:main",
             "import_jsonrpc=odoo_scripts.import_jsonrpc:main",
+            "import_sql=odoo_scripts.import_sql:main",
             "docker_dev_start=odoo_scripts.docker_dev_start:main",
             "do_tests=odoo_scripts.do_tests:main",
             "docker_build=odoo_scripts.docker_build:main",
diff --git a/start b/start
--- a/start
+++ b/start
@@ -120,7 +120,6 @@
     then
         addons_path=""
     else
-        # TODO ne pas faire ça si ODOO_ADDONS_PATH est mis à vide ou a une valeur
         if [ -n "${ODOO_ADDONS_PATH+set}" ];
         then
             if [ -n "${ODOO_ADDONS_PATH}" ];
diff --git a/tests/import_sql/ir.rule.csv b/tests/import_sql/ir.rule.csv
new file mode 100644
--- /dev/null
+++ b/tests/import_sql/ir.rule.csv
@@ -0,0 +1,2 @@
+id,active
+base.res_users_log_rule,False
diff --git a/tests/import_sql/res.config.csv b/tests/import_sql/res.config.csv
new file mode 100644
--- /dev/null
+++ b/tests/import_sql/res.config.csv
@@ -0,0 +1,2 @@
+write_uid
+1
diff --git a/tests/import_sql/with-id res.config.csv b/tests/import_sql/with-id res.config.csv
new file mode 100644
--- /dev/null
+++ b/tests/import_sql/with-id res.config.csv	
@@ -0,0 +1,2 @@
+id
+test_import_sql_with_id