Compare commits
52 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e2d0794379 | |||
| ad80bc8113 | |||
| d7d0768a9c | |||
| ae282ab468 | |||
| bfc45bd0f0 | |||
| 51f76c72e2 | |||
| 42474371db | |||
| dec145ada5 | |||
| 30f119ffc7 | |||
| b1fdf488ad | |||
| 61deaad251 | |||
| c873cc462e | |||
| ead51bcd5a | |||
| 2ca7842e4f | |||
| 4cb3832213 | |||
| e397890098 | |||
| 6ee008e169 | |||
| c6d1822f3b | |||
| e037aece6a | |||
| 8c5918b9fb | |||
| 4f80085254 | |||
| 7e3e892002 | |||
| 19574ea4a0 | |||
| 1e7722de91 | |||
| fc250a433c | |||
| c38cd2c179 | |||
| 8c3948ff33 | |||
| e494bdd2b9 | |||
| 1aa70eba8b | |||
| 33ac0e008e | |||
| 45dabce956 | |||
| 2b16c5573e | |||
|
|
6e9ff28016 | ||
|
|
8f147e7445 | ||
|
|
c4a0b038e8 | ||
|
|
6c94b13b12 | ||
|
|
65bbc95ae2 | ||
|
|
4f68a2f360 | ||
| 0a1d9229b4 | |||
|
|
435497eca3 | ||
|
|
9fabecf406 | ||
|
|
b3e4e91df8 | ||
|
|
ba8f57ddc1 | ||
|
|
328f8d9952 | ||
|
|
a73896b75d | ||
|
|
15b2cb07ba | ||
|
|
7ee551d446 | ||
|
|
d14b005fd2 | ||
|
|
f43a066341 | ||
|
|
84a8beaf46 | ||
|
|
746522368e | ||
|
|
94d8c3d6df |
83 changed files with 7704 additions and 486 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -1,4 +1,6 @@
|
|||
*~
|
||||
*.pyc
|
||||
.coverage
|
||||
.tox/
|
||||
dist/
|
||||
docs/_build/
|
||||
|
|
|
|||
4
.pylintrc
Normal file
4
.pylintrc
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
# -*- mode: conf; -*-
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=fixme
|
||||
79
CHANGELOG.md
79
CHANGELOG.md
|
|
@ -5,6 +5,85 @@ All notable changes to WuttaSync will be documented in this file.
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## v0.6.0 (2026-03-17)
|
||||
|
||||
### Feat
|
||||
|
||||
- add concept of (non-)default importers for handler
|
||||
- add support for Wutta <-> Wutta import/export
|
||||
|
||||
## v0.5.1 (2026-02-13)
|
||||
|
||||
### Fix
|
||||
|
||||
- fix source/target data order for import diff warning email
|
||||
- fix importer `get_keys()` logic to honor class attribute
|
||||
- add date type coercion logic for CSV importer
|
||||
|
||||
## v0.5.0 (2026-01-03)
|
||||
|
||||
### Feat
|
||||
|
||||
- add support for `wutta export-csv` command
|
||||
|
||||
### Fix
|
||||
|
||||
- add `actioner` property for ImportHandler
|
||||
|
||||
## v0.4.0 (2025-12-31)
|
||||
|
||||
### Feat
|
||||
|
||||
- add support for `--comment` CLI param, to set versioning comment
|
||||
- add support for `--runas` CLI param, to set versioning authorship
|
||||
|
||||
### Fix
|
||||
|
||||
- make pylint happy
|
||||
- accept either `--recip` or `--recips` param for import commands
|
||||
|
||||
## v0.3.0 (2025-12-20)
|
||||
|
||||
### Feat
|
||||
|
||||
- add `warnings` mode for import/export handlers, commands
|
||||
- add the `import-versions` command, handler logic
|
||||
|
||||
### Fix
|
||||
|
||||
- run all models when none specified, for import/export commands
|
||||
- allow passing just `key` to ImportCommandHandler
|
||||
- add `--comment` param for `import-versions` command
|
||||
- add basic data type coercion for CSV -> SQLAlchemy import
|
||||
- refactor some more for tests + pylint
|
||||
- refactor per pylint; add to tox
|
||||
- format all code with black
|
||||
- tweak logging when deleting object
|
||||
- add logging when deleting target object
|
||||
|
||||
## v0.2.1 (2025-06-29)
|
||||
|
||||
### Fix
|
||||
|
||||
- avoid empty keys for importer
|
||||
- do not assign simple/supported fields in Importer constructor
|
||||
- make `--input-path` optional for import/export commands
|
||||
|
||||
## v0.2.0 (2024-12-07)
|
||||
|
||||
### Feat
|
||||
|
||||
- add `wutta import-csv` command
|
||||
|
||||
### Fix
|
||||
|
||||
- expose `ToWuttaHandler`, `ToWutta` in `wuttasync.importing` namespace
|
||||
- implement deletion logic; add cli params for max changes
|
||||
- add `--key` (or `--keys`) param for import/export commands
|
||||
- add `--list-models` option for import/export commands
|
||||
- require latest wuttjamaican
|
||||
- add `--fields` and `--exclude` params for import/export cli
|
||||
|
||||
## v0.1.0 (2024-12-05)
|
||||
|
||||
### Feat
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
# WuttaSync
|
||||
|
||||
Wutta framework for data import/export and real-time sync
|
||||
Wutta Framework for data import/export and real-time sync
|
||||
|
||||
See docs at https://rattailproject.org/docs/wuttasync/
|
||||
See docs at https://docs.wuttaproject.org/wuttasync/
|
||||
|
|
|
|||
0
docs/_static/.keepme
vendored
Normal file
0
docs/_static/.keepme
vendored
Normal file
6
docs/api/wuttasync.app.rst
Normal file
6
docs/api/wuttasync.app.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.app``
|
||||
=================
|
||||
|
||||
.. automodule:: wuttasync.app
|
||||
:members:
|
||||
6
docs/api/wuttasync.cli.base.rst
Normal file
6
docs/api/wuttasync.cli.base.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.cli.base``
|
||||
======================
|
||||
|
||||
.. automodule:: wuttasync.cli.base
|
||||
:members:
|
||||
6
docs/api/wuttasync.cli.export_csv.rst
Normal file
6
docs/api/wuttasync.cli.export_csv.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.cli.export_csv``
|
||||
============================
|
||||
|
||||
.. automodule:: wuttasync.cli.export_csv
|
||||
:members:
|
||||
6
docs/api/wuttasync.cli.export_wutta.rst
Normal file
6
docs/api/wuttasync.cli.export_wutta.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.cli.export_wutta``
|
||||
==============================
|
||||
|
||||
.. automodule:: wuttasync.cli.export_wutta
|
||||
:members:
|
||||
6
docs/api/wuttasync.cli.import_csv.rst
Normal file
6
docs/api/wuttasync.cli.import_csv.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.cli.import_csv``
|
||||
============================
|
||||
|
||||
.. automodule:: wuttasync.cli.import_csv
|
||||
:members:
|
||||
6
docs/api/wuttasync.cli.import_versions.rst
Normal file
6
docs/api/wuttasync.cli.import_versions.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.cli.import_versions``
|
||||
=================================
|
||||
|
||||
.. automodule:: wuttasync.cli.import_versions
|
||||
:members:
|
||||
6
docs/api/wuttasync.cli.import_wutta.rst
Normal file
6
docs/api/wuttasync.cli.import_wutta.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.cli.import_wutta``
|
||||
==============================
|
||||
|
||||
.. automodule:: wuttasync.cli.import_wutta
|
||||
:members:
|
||||
6
docs/api/wuttasync.cli.rst
Normal file
6
docs/api/wuttasync.cli.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.cli``
|
||||
=================
|
||||
|
||||
.. automodule:: wuttasync.cli
|
||||
:members:
|
||||
6
docs/api/wuttasync.conf.rst
Normal file
6
docs/api/wuttasync.conf.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.conf``
|
||||
==================
|
||||
|
||||
.. automodule:: wuttasync.conf
|
||||
:members:
|
||||
6
docs/api/wuttasync.emails.rst
Normal file
6
docs/api/wuttasync.emails.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.emails``
|
||||
====================
|
||||
|
||||
.. automodule:: wuttasync.emails
|
||||
:members:
|
||||
6
docs/api/wuttasync.exporting.base.rst
Normal file
6
docs/api/wuttasync.exporting.base.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.exporting.base``
|
||||
============================
|
||||
|
||||
.. automodule:: wuttasync.exporting.base
|
||||
:members:
|
||||
6
docs/api/wuttasync.exporting.csv.rst
Normal file
6
docs/api/wuttasync.exporting.csv.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.exporting.csv``
|
||||
===========================
|
||||
|
||||
.. automodule:: wuttasync.exporting.csv
|
||||
:members:
|
||||
6
docs/api/wuttasync.exporting.handlers.rst
Normal file
6
docs/api/wuttasync.exporting.handlers.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.exporting.handlers``
|
||||
================================
|
||||
|
||||
.. automodule:: wuttasync.exporting.handlers
|
||||
:members:
|
||||
6
docs/api/wuttasync.exporting.rst
Normal file
6
docs/api/wuttasync.exporting.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.exporting``
|
||||
=======================
|
||||
|
||||
.. automodule:: wuttasync.exporting
|
||||
:members:
|
||||
6
docs/api/wuttasync.importing.versions.rst
Normal file
6
docs/api/wuttasync.importing.versions.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.importing.versions``
|
||||
================================
|
||||
|
||||
.. automodule:: wuttasync.importing.versions
|
||||
:members:
|
||||
6
docs/api/wuttasync.testing.rst
Normal file
6
docs/api/wuttasync.testing.rst
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
|
||||
``wuttasync.testing``
|
||||
=====================
|
||||
|
||||
.. automodule:: wuttasync.testing
|
||||
:members:
|
||||
39
docs/conf.py
39
docs/conf.py
|
|
@ -8,33 +8,42 @@
|
|||
|
||||
from importlib.metadata import version as get_version
|
||||
|
||||
project = 'WuttaSync'
|
||||
copyright = '2024, Lance Edgar'
|
||||
author = 'Lance Edgar'
|
||||
release = get_version('WuttaSync')
|
||||
project = "WuttaSync"
|
||||
copyright = "2024, Lance Edgar"
|
||||
author = "Lance Edgar"
|
||||
release = get_version("WuttaSync")
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.todo',
|
||||
'enum_tools.autoenum',
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.viewcode",
|
||||
"sphinx.ext.todo",
|
||||
"enum_tools.autoenum",
|
||||
"sphinxcontrib.programoutput",
|
||||
]
|
||||
|
||||
templates_path = ['_templates']
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
templates_path = ["_templates"]
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
intersphinx_mapping = {
|
||||
'python': ('https://docs.python.org/3/', None),
|
||||
'wuttjamaican': ('https://rattailproject.org/docs/wuttjamaican/', None),
|
||||
"python": ("https://docs.python.org/3/", None),
|
||||
"rattail-manual": ("https://docs.wuttaproject.org/rattail-manual/", None),
|
||||
"sqlalchemy": ("http://docs.sqlalchemy.org/en/latest/", None),
|
||||
"sqlalchemy-continuum": (
|
||||
"https://sqlalchemy-continuum.readthedocs.io/en/latest/",
|
||||
None,
|
||||
),
|
||||
"sqlalchemy-utils": ("https://sqlalchemy-utils.readthedocs.io/en/latest/", None),
|
||||
"wutta-continuum": ("https://docs.wuttaproject.org/wutta-continuum/", None),
|
||||
"wuttjamaican": ("https://docs.wuttaproject.org/wuttjamaican/", None),
|
||||
}
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||
|
||||
html_theme = 'furo'
|
||||
html_static_path = ['_static']
|
||||
html_theme = "furo"
|
||||
html_static_path = ["_static"]
|
||||
|
|
|
|||
50
docs/glossary.rst
Normal file
50
docs/glossary.rst
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
.. _glossary:
|
||||
|
||||
Glossary
|
||||
========
|
||||
|
||||
.. glossary::
|
||||
:sorted:
|
||||
|
||||
import/export key
|
||||
Unique key representing a particular type of import/export job,
|
||||
i.e. the source/target and orientation (import vs. export).
|
||||
|
||||
For instance "Wutta → CSV export" uses the key:
|
||||
``export.to_csv.from_wutta``
|
||||
|
||||
More than one :term:`import handler` can share a key, e.g. one
|
||||
may subclass another and inherit the key.
|
||||
|
||||
However only one handler is "designated" for a given key; it will
|
||||
be used by default for running those jobs.
|
||||
|
||||
This key is used for lookup in
|
||||
:meth:`~wuttasync.app.WuttaSyncAppProvider.get_import_handler()`.
|
||||
|
||||
See also
|
||||
:meth:`~wuttasync.importing.handlers.ImportHandler.get_key()`
|
||||
method on the import/export handler.
|
||||
|
||||
import handler
|
||||
This a type of :term:`handler` which is responsible for a
|
||||
particular set of data import/export task(s).
|
||||
|
||||
The import handler manages data connections and transactions, and
|
||||
invokes one or more :term:`importers <importer>` to process the
|
||||
data. See also :ref:`import-handler-vs-importer`.
|
||||
|
||||
Note that "import/export handler" is the more proper term to use
|
||||
here but it is often shortened to just "import handler" for
|
||||
convenience.
|
||||
|
||||
importer
|
||||
This refers to a Python class/instance responsible for processing
|
||||
a particular :term:`data model` for an import/export job.
|
||||
|
||||
For instance there is usually one importer per table, when
|
||||
importing to the :term:`app database` (regardless of source).
|
||||
See also :ref:`import-handler-vs-importer`.
|
||||
|
||||
Note that "importer/exporter" is the more proper term to use here
|
||||
but it is often shortened to just "importer" for convenience.
|
||||
|
|
@ -2,28 +2,94 @@
|
|||
WuttaSync
|
||||
=========
|
||||
|
||||
This package adds data import/export and real-time sync utilities for
|
||||
the `Wutta Framework <https://wuttaproject.org>`_.
|
||||
This provides a "batteries included" way to handle data sync between
|
||||
arbitrary source and target.
|
||||
|
||||
While it of course supports import/export to/from the Wutta :term:`app
|
||||
database`, it may be used for any "source → target" data flow.
|
||||
This builds / depends on :doc:`WuttJamaican <wuttjamaican:index>`, for
|
||||
sake of a common :term:`config object` and :term:`handler` interface.
|
||||
It was originally designed for import to / export from the :term:`app
|
||||
database` but **both** the source and target can be "anything" -
|
||||
e.g. CSV or Excel file, cloud API, another DB.
|
||||
|
||||
The basic idea is as follows:
|
||||
|
||||
* read a data set from "source"
|
||||
* read corresonding data from "target"
|
||||
* compare the two data sets
|
||||
* where they differ, create/update/delete records on the target
|
||||
|
||||
Although in some cases (e.g. export to CSV) the target has no
|
||||
meaningful data so all source records are "created" on / written to
|
||||
the target.
|
||||
|
||||
.. note::
|
||||
|
||||
You may already have guessed, that this approach may not work for
|
||||
"big data" - and indeed, it is designed for "small" data sets,
|
||||
ideally 500K records or smaller. It reads both (source/target)
|
||||
data sets into memory so that is the limiting factor.
|
||||
|
||||
You can work around this to some extent, by limiting the data sets
|
||||
to a particular date range (or other "partitionable" aspect of the
|
||||
data), and only syncing that portion.
|
||||
|
||||
However this is not meant to be an ETL engine involving a data
|
||||
lake/warehouse. It is for more "practical" concerns where some
|
||||
disparate "systems" must be kept in sync, or basic import from /
|
||||
export to file.
|
||||
|
||||
The general "source → target" concept can be used for both import and
|
||||
export, since "everything is an import" from the target's perspective.
|
||||
|
||||
In addition to the import/export framework proper, a CLI framework is
|
||||
also provided.
|
||||
|
||||
A "real-time sync" framework is also (eventually) planned, similar to
|
||||
the one developed in the Rattail Project;
|
||||
cf. :doc:`rattail-manual:data/sync/index`.
|
||||
|
||||
.. image:: https://img.shields.io/badge/linting-pylint-yellowgreen
|
||||
:target: https://github.com/pylint-dev/pylint
|
||||
|
||||
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||
:target: https://github.com/psf/black
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Documentation
|
||||
|
||||
glossary
|
||||
narr/install
|
||||
narr/cli/index
|
||||
narr/concepts
|
||||
narr/custom/index
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:caption: API
|
||||
:caption: Package API
|
||||
|
||||
api/wuttasync
|
||||
api/wuttasync.app
|
||||
api/wuttasync.cli
|
||||
api/wuttasync.cli.base
|
||||
api/wuttasync.cli.export_csv
|
||||
api/wuttasync.cli.export_wutta
|
||||
api/wuttasync.cli.import_csv
|
||||
api/wuttasync.cli.import_versions
|
||||
api/wuttasync.cli.import_wutta
|
||||
api/wuttasync.conf
|
||||
api/wuttasync.emails
|
||||
api/wuttasync.exporting
|
||||
api/wuttasync.exporting.base
|
||||
api/wuttasync.exporting.csv
|
||||
api/wuttasync.exporting.handlers
|
||||
api/wuttasync.importing
|
||||
api/wuttasync.importing.base
|
||||
api/wuttasync.importing.csv
|
||||
api/wuttasync.importing.handlers
|
||||
api/wuttasync.importing.model
|
||||
api/wuttasync.importing.versions
|
||||
api/wuttasync.importing.wutta
|
||||
api/wuttasync.testing
|
||||
api/wuttasync.util
|
||||
|
|
|
|||
90
docs/narr/cli/builtin.rst
Normal file
90
docs/narr/cli/builtin.rst
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
|
||||
===================
|
||||
Built-in Commands
|
||||
===================
|
||||
|
||||
Below are the :term:`subcommands <subcommand>` which come with
|
||||
WuttaSync.
|
||||
|
||||
It is fairly simple to add more; see :doc:`custom`.
|
||||
|
||||
|
||||
.. _wutta-export-csv:
|
||||
|
||||
``wutta export-csv``
|
||||
--------------------
|
||||
|
||||
Export data from the Wutta :term:`app database` to CSV file(s).
|
||||
|
||||
This *should* be able to automatically export any table mapped in the
|
||||
:term:`app model`. The only caveat is that it is "dumb" and does not
|
||||
have any special field handling. This means the column headers in the
|
||||
CSV will be the same as in the source table, and some data types may
|
||||
not behave as expected etc.
|
||||
|
||||
Defined in: :mod:`wuttasync.cli.export_csv`
|
||||
|
||||
.. program-output:: wutta export-csv --help
|
||||
|
||||
|
||||
.. _wutta-export-wutta:
|
||||
|
||||
``wutta export-wutta``
|
||||
----------------------
|
||||
|
||||
Export data to another Wutta :term:`app database`, from the local one.
|
||||
|
||||
Defined in: :mod:`wuttasync.cli.export_wutta`
|
||||
|
||||
.. program-output:: wutta export-wutta --help
|
||||
|
||||
|
||||
.. _wutta-import-csv:
|
||||
|
||||
``wutta import-csv``
|
||||
--------------------
|
||||
|
||||
Import data from CSV file(s) to the Wutta :term:`app database`.
|
||||
|
||||
This *should* be able to automatically target any table mapped in the
|
||||
:term:`app model`. The only caveat is that it is "dumb" and does not
|
||||
have any special field handling. This means the column headers in the
|
||||
CSV file must be named the same as in the target table, and some data
|
||||
types may not behave as expected etc.
|
||||
|
||||
Defined in: :mod:`wuttasync.cli.import_csv`
|
||||
|
||||
.. program-output:: wutta import-csv --help
|
||||
|
||||
|
||||
.. _wutta-import-versions:
|
||||
|
||||
``wutta import-versions``
|
||||
-------------------------
|
||||
|
||||
Import latest data to version tables, for the Wutta :term:`app
|
||||
database`.
|
||||
|
||||
The purpose of this is to ensure version tables accurately reflect
|
||||
the current "live" data set, for given table(s). It is only
|
||||
relevant/usable if versioning is configured and enabled. For more
|
||||
on that see :doc:`wutta-continuum:index`.
|
||||
|
||||
This command can check/update version tables for any versioned class
|
||||
in the :term:`app model`.
|
||||
|
||||
Defined in: :mod:`wuttasync.cli.import_versions`
|
||||
|
||||
.. program-output:: wutta import-versions --help
|
||||
|
||||
|
||||
.. _wutta-import-wutta:
|
||||
|
||||
``wutta import-wutta``
|
||||
----------------------
|
||||
|
||||
Import data from another Wutta :term:`app database`, to the local one.
|
||||
|
||||
Defined in: :mod:`wuttasync.cli.import_wutta`
|
||||
|
||||
.. program-output:: wutta import-wutta --help
|
||||
64
docs/narr/cli/custom.rst
Normal file
64
docs/narr/cli/custom.rst
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
|
||||
=================
|
||||
Custom Commands
|
||||
=================
|
||||
|
||||
This section describes how to add a custom :term:`subcommand` which
|
||||
wraps a particular :term:`import handler`.
|
||||
|
||||
See also :doc:`wuttjamaican:narr/cli/custom` for more information
|
||||
on the general concepts etc.
|
||||
|
||||
|
||||
Basic Import/Export
|
||||
-------------------
|
||||
|
||||
Here we'll assume you have a typical "Poser" app based on Wutta
|
||||
Framework, and the "Foo → Poser" (``FromFooToPoser`` handler) import
|
||||
logic is defined in the ``poser.importing.foo`` module.
|
||||
|
||||
We'll also assume you already have a ``poser`` top-level
|
||||
:term:`command` (in ``poser.cli``), and our task now is to add the
|
||||
``poser import-foo`` subcommand to wrap the import handler.
|
||||
|
||||
And finally we'll assume this is just a "typical" import handler and
|
||||
we do not need any custom CLI params exposed.
|
||||
|
||||
Here is the code and we'll explain below::
|
||||
|
||||
from poser.cli import poser_typer
|
||||
from wuttasync.cli import import_command, ImportCommandHandler
|
||||
|
||||
@poser_typer.command()
|
||||
@import_command
|
||||
def import_foo(ctx, **kwargs):
|
||||
"""
|
||||
Import data from Foo API to Poser DB
|
||||
"""
|
||||
config = ctx.parent.wutta_config
|
||||
handler = ImportCommandHandler(
|
||||
config, import_handler='poser.importing.foo:FromFooToPoser')
|
||||
handler.run(ctx)
|
||||
|
||||
Hopefully it's straightforward but to be clear:
|
||||
|
||||
* subcommand is really just a function, **with desired name**
|
||||
* wrap with ``@poser_typer.command()`` to register as subcomand
|
||||
* wrap with ``@import_command`` to get typical CLI params
|
||||
* call ``ImportCommandHandler.run()`` with import handler spec
|
||||
|
||||
So really - in addition to
|
||||
:func:`~wuttasync.cli.base.import_command()` - the
|
||||
:class:`~wuttasync.cli.base.ImportCommandHandler` is doing the heavy
|
||||
lifting for all import/export subcommands, it just needs to know which
|
||||
:term:`import handler` to use.
|
||||
|
||||
.. note::
|
||||
|
||||
If your new subcommand is defined in a different module than is the
|
||||
top-level command (e.g. as in example above) then you may need to
|
||||
"eagerly" import the subcommand module. (Otherwise auto-discovery
|
||||
may not find it.)
|
||||
|
||||
This is usually done from within the top-level command's module,
|
||||
since it is always imported early due to the entry point.
|
||||
23
docs/narr/cli/index.rst
Normal file
23
docs/narr/cli/index.rst
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
|
||||
========================
|
||||
Command Line Interface
|
||||
========================
|
||||
|
||||
The primary way of using the import/export framework day to day is via
|
||||
the command line.
|
||||
|
||||
WuttJamaican defines the ``wutta`` :term:`command` and WuttaSync comes
|
||||
with some extra :term:`subcommands <subcommand>` for importing to /
|
||||
exporting from the Wutta :term:`app database`.
|
||||
|
||||
It is fairly simple to add a dedicated subcommand for any
|
||||
:term:`import handler`; see below.
|
||||
|
||||
And for more general info about CLI see
|
||||
:doc:`wuttjamaican:narr/cli/index`.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
builtin
|
||||
custom
|
||||
54
docs/narr/concepts.rst
Normal file
54
docs/narr/concepts.rst
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
|
||||
Concepts
|
||||
========
|
||||
|
||||
Things hopefully are straightforward but it's important to get the
|
||||
following straight in your head; the rest will come easier if you do.
|
||||
|
||||
|
||||
Source vs. Target
|
||||
-----------------
|
||||
|
||||
Data always flows from source to target, it is the #1 rule.
|
||||
|
||||
Docs and command output will always reflect this, e.g. **CSV →
|
||||
Wutta**.
|
||||
|
||||
Source and target can be anything as long as the :term:`import
|
||||
handler` and :term:`importer(s) <importer>` implement the desired
|
||||
logic. The :term:`app database` is often involved but not always.
|
||||
|
||||
|
||||
Import vs. Export
|
||||
-----------------
|
||||
|
||||
Surprise, there is no difference. After all from target's perspective
|
||||
everything is really an import.
|
||||
|
||||
Sometimes it's more helpful to think of it as an export, e.g. **Wutta
|
||||
→ CSV** really seems like an export. In such cases the
|
||||
:attr:`~wuttasync.importing.handlers.ImportHandler.orientation` may be
|
||||
set to reflect the distinction.
|
||||
|
||||
|
||||
.. _import-handler-vs-importer:
|
||||
|
||||
Import Handler vs. Importer
|
||||
---------------------------
|
||||
|
||||
The :term:`import handler` is sort of the "wrapper" around one or more
|
||||
:term:`importers <importer>` and the latter contain the table-specific
|
||||
sync logic.
|
||||
|
||||
In a DB or similar context, the import handler will make the
|
||||
connection, then invoke all requested importers, then commit
|
||||
transaction at the end (or rollback if dry-run).
|
||||
|
||||
And each importer will read data from source, and usually also read
|
||||
data from target, then compare data sets and finally write data to
|
||||
target as needed. But each would usually do this for just one table.
|
||||
|
||||
See also the base classes for each:
|
||||
|
||||
* :class:`~wuttasync.importing.handlers.ImportHandler`
|
||||
* :class:`~wuttasync.importing.base.Importer`
|
||||
9
docs/narr/custom/command.rst
Normal file
9
docs/narr/custom/command.rst
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
Define Command
|
||||
==============
|
||||
|
||||
Now that you have defined the import handler plus any importers
|
||||
required, you'll want to define a command line interface to use it.
|
||||
|
||||
This section is here for completeness but the process is described
|
||||
elsewhere; see :doc:`/narr/cli/custom`.
|
||||
90
docs/narr/custom/conventions.rst
Normal file
90
docs/narr/custom/conventions.rst
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
|
||||
Conventions
|
||||
===========
|
||||
|
||||
Below are recommended conventions for structuring and naming the files
|
||||
in your project relating to import/export.
|
||||
|
||||
The intention for these rules is that they are "intuitive" based on
|
||||
the fact that all data flows from source to target and therefore can
|
||||
be thought of as "importing" in virtually all cases.
|
||||
|
||||
But there are a lot of edge cases out there so YMMV.
|
||||
|
||||
|
||||
"The Rules"
|
||||
-----------
|
||||
|
||||
There are exceptions to these of course, but in general:
|
||||
|
||||
* regarding how to think about these conventions:
|
||||
|
||||
* always look at it from target's perspective
|
||||
|
||||
* always look at it as an *import*, not export
|
||||
|
||||
* "final" logic is always a combo of:
|
||||
|
||||
* "base" logic for how target data read/write happens generally
|
||||
|
||||
* "specific" logic for how that happens using a particular data source
|
||||
|
||||
* targets each get their own subpackage within project
|
||||
|
||||
* and within that, also an ``importing`` (nested) subpackage
|
||||
|
||||
* and within *that* is where the files live, referenced next
|
||||
|
||||
* target ``model.py`` should contain ``ToTarget`` importer base class
|
||||
|
||||
* also may have misc. per-model base classes, e.g. ``WidgetImporter``
|
||||
|
||||
* also may have ``ToTargetHandler`` base class if applicable
|
||||
|
||||
* sources each get their own module, named after the source
|
||||
|
||||
* should contain the "final" handler class, e.g. ``FromSourceToTarget``
|
||||
|
||||
* also contains "final" importer classes needed by handler (e.g. ``WidgetImporter``)
|
||||
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
That's a lot of rules so let's see it. Here we assume a Wutta-based
|
||||
app named Poser and it integrates with a Foo API in the cloud. Data
|
||||
should flow both ways so we will be thinking of this as:
|
||||
|
||||
* **Foo → Poser import**
|
||||
* **Poser → Foo export**
|
||||
|
||||
Here is the suggested file layout:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
poser/
|
||||
├── foo/
|
||||
│ ├── __init__.py
|
||||
│ ├── api.py
|
||||
│ └── importing/
|
||||
│ ├── __init__.py
|
||||
│ ├── model.py
|
||||
│ └── poser.py
|
||||
└── importing/
|
||||
├── __init__.py
|
||||
├── foo.py
|
||||
└── model.py
|
||||
|
||||
And the module breakdown:
|
||||
|
||||
* ``poser.foo.api`` has e.g. ``FooAPI`` interface logic
|
||||
|
||||
**Foo → Poser import** (aka. "Poser imports from Foo")
|
||||
|
||||
* ``poser.importing.model`` has ``ToPoserHandler``, ``ToPoser`` and per-model base importers
|
||||
* ``poser.importing.foo`` has ``FromFooToPoser`` plus final importers
|
||||
|
||||
**Poser → Foo export** (aka. "Foo imports from Poser")
|
||||
|
||||
* ``poser.foo.importing.model`` has ``ToFooHandler``, ``ToFoo`` and per-model base importer
|
||||
* ``poser.foo.importing.poser`` has ``FromPoserToFoo`` plus final importers
|
||||
93
docs/narr/custom/handler.rst
Normal file
93
docs/narr/custom/handler.rst
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
|
||||
Define Import Handler
|
||||
=====================
|
||||
|
||||
The obvious step here is to define a new :term:`import handler`, which
|
||||
ultimately inherits from
|
||||
:class:`~wuttasync.importing.handlers.ImportHandler`. But the choice
|
||||
of which class(es) *specifically* to inherit from, is a bit more
|
||||
complicated.
|
||||
|
||||
|
||||
Choose the Base Class(es)
|
||||
-------------------------
|
||||
|
||||
If all else fails, or to get started simply, you can always just
|
||||
inherit from :class:`~wuttasync.importing.handlers.ImportHandler`
|
||||
directly as the only base class. You'll have to define any methods
|
||||
needed to implement desired behavior.
|
||||
|
||||
However depending on your particular source and/or target, there may
|
||||
be existing base classes defined somewhere from which you can inherit.
|
||||
This may save you some effort, and/or is just a good idea to share
|
||||
code where possible.
|
||||
|
||||
Keep in mind your import handler can inherit from multiple base
|
||||
classes, and often will - one base for the source side, and another
|
||||
for the target side. For instance::
|
||||
|
||||
from wuttasync.importing import FromFileHandler, ToWuttaHandler
|
||||
|
||||
class FromExcelToPoser(FromFileHandler, ToWuttaHandler):
|
||||
"""
|
||||
Handler for Excel file → Poser app DB
|
||||
"""
|
||||
|
||||
You generally will still need to define/override some methods to
|
||||
customize behavior.
|
||||
|
||||
All built-in base classes live under :mod:`wuttasync.importing`.
|
||||
|
||||
|
||||
.. _register-importer:
|
||||
|
||||
Register Importer(s)
|
||||
--------------------
|
||||
|
||||
If nothing else, most custom handlers must override
|
||||
:meth:`~wuttasync.importing.handlers.ImportHandler.define_importers()`
|
||||
to "register" importer(s) as appropriate. There are two primary goals
|
||||
here:
|
||||
|
||||
* add "new" (totally custom) importers
|
||||
* override "existing" importers (inherited from base class)
|
||||
|
||||
Obviously for this to actually work the importer(s) must exist in
|
||||
code; see :doc:`importer`.
|
||||
|
||||
As an example let's say there's a ``FromFooToWutta`` handler which
|
||||
defines a ``Widget`` importer.
|
||||
|
||||
And let's say you want to customize that, by tweaking slightly the
|
||||
logic for ``WigdetImporter`` and adding a new ``SprocketImporter``::
|
||||
|
||||
from somewhere_else import (FromFooToWutta, ToWutta,
|
||||
WidgetImporter as WidgetImporterBase)
|
||||
|
||||
class FromFooToPoser(FromFooToWutta):
|
||||
"""
|
||||
Handler for Foo -> Poser
|
||||
"""
|
||||
|
||||
def define_importers(self):
|
||||
|
||||
# base class defines the initial set
|
||||
importers = super().define_importers()
|
||||
|
||||
# override widget importer
|
||||
importers['Widget'] = WidgetImporter
|
||||
|
||||
# add sprocket importer
|
||||
importers['Sprocket'] = SprocketImporter
|
||||
|
||||
return importers
|
||||
|
||||
class SprocketImporter(ToWutta):
|
||||
"""
|
||||
Sprocket importer for Foo -> Poser
|
||||
"""
|
||||
|
||||
class WidgetImporter(WidgetImporterBase):
|
||||
"""
|
||||
Widget importer for Foo -> Poser
|
||||
"""
|
||||
149
docs/narr/custom/importer.rst
Normal file
149
docs/narr/custom/importer.rst
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
|
||||
Define Importer(s)
|
||||
==================
|
||||
|
||||
Here we'll describe how to make a custom :term:`importer/exporter
|
||||
<importer>`, which can process a given :term:`data model`.
|
||||
|
||||
..
|
||||
The example will assume a **Foo → Poser import** for the ``Widget``
|
||||
:term:`data model`.
|
||||
|
||||
|
||||
Choose the Base Class(es)
|
||||
-------------------------
|
||||
|
||||
As with the :term:`import handler`, the importer "usually" will have
|
||||
two base classes: one for the target side and another for the source.
|
||||
|
||||
The base class for target side is generally more fleshed out, with
|
||||
logic to read/write data for the given target model. Whereas the base
|
||||
class for the source side could just be a stub. In the latter case,
|
||||
one might choose to skip it and inherit only from the target base
|
||||
class.
|
||||
|
||||
In any case the final importer class you define can override any/all
|
||||
logic from either base class if needed.
|
||||
|
||||
|
||||
Example: Foo → Poser import
|
||||
---------------------------
|
||||
|
||||
Here we'll assume a Wutta-based app named "Poser" which will be
|
||||
importing "Widget" data from the "Foo API" cloud service.
|
||||
|
||||
In this case we will inherit from a base class for the target side,
|
||||
which already knows how to talk to the :term:`app database` via
|
||||
SQLAlchemy ORM.
|
||||
|
||||
But for the source side, there is no existing base class for the Foo
|
||||
API service, since that is just made-up - so we will also define our
|
||||
own base class for that::
|
||||
|
||||
from wuttasync.importing import Importer, ToWutta
|
||||
|
||||
# nb. this is not real of course, but an example
|
||||
from poser.foo.api import FooAPI
|
||||
|
||||
class FromFoo(Importer):
|
||||
"""
|
||||
Base class for importers using Foo API as source
|
||||
"""
|
||||
|
||||
def setup(self):
|
||||
"""
|
||||
Establish connection to Foo API
|
||||
"""
|
||||
self.foo_api = FooAPI(self.config)
|
||||
|
||||
class WidgetImporter(FromFoo, ToWutta):
|
||||
"""
|
||||
Widget importer for Foo -> Poser
|
||||
"""
|
||||
|
||||
def get_source_objects(self):
|
||||
"""
|
||||
Fetch all "raw" widgets from Foo API
|
||||
"""
|
||||
# nb. also not real, just example
|
||||
return self.foo_api.get_widgets()
|
||||
|
||||
def normalize_source_object(self, widget):
|
||||
"""
|
||||
Convert the "raw" widget we receive from Foo API, to a
|
||||
"normalized" dict with data for all fields which are part of
|
||||
the processing request.
|
||||
"""
|
||||
return {
|
||||
'id': widget.id,
|
||||
'name': widget.name,
|
||||
}
|
||||
|
||||
|
||||
Example: Poser → Foo export
|
||||
---------------------------
|
||||
|
||||
In the previous scenario we imported data from Foo to Poser, and here
|
||||
we'll do the reverse, exporting from Poser to Foo.
|
||||
|
||||
As of writing the base class logic for exporting from Wutta :term:`app
|
||||
database` does not yet exist. And the Foo API is just made-up so
|
||||
we'll add one-off base classes for both sides::
|
||||
|
||||
from wuttasync.importing import Importer
|
||||
|
||||
class FromWutta(Importer):
|
||||
"""
|
||||
Base class for importers using Wutta DB as source
|
||||
"""
|
||||
|
||||
class ToFoo(Importer):
|
||||
"""
|
||||
Base class for exporters targeting Foo API
|
||||
"""
|
||||
|
||||
class WidgetImporter(FromWutta, ToFoo):
|
||||
"""
|
||||
Widget exporter for Poser -> Foo
|
||||
"""
|
||||
|
||||
def get_source_objects(self):
|
||||
"""
|
||||
Fetch all widgets from the Poser app DB.
|
||||
|
||||
(see note below regarding the db session)
|
||||
"""
|
||||
model = self.app.model
|
||||
return self.source_session.query(model.Widget).all()
|
||||
|
||||
def normalize_source_object(self, widget):
|
||||
"""
|
||||
Convert the "raw" widget from Poser app (ORM) to a
|
||||
"normalized" dict with data for all fields which are part of
|
||||
the processing request.
|
||||
"""
|
||||
return {
|
||||
'id': widget.id,
|
||||
'name': widget.name,
|
||||
}
|
||||
|
||||
Note that the ``get_source_objects()`` method shown above makes use of
|
||||
a ``source_session`` attribute - where did that come from?
|
||||
|
||||
This is actually not part of the importer proper, but rather this
|
||||
attribute is set by the :term:`import handler`. And that will ony
|
||||
happen if the importer is being invoked by a handler which supports
|
||||
it. So none of that is shown here, but FYI.
|
||||
|
||||
(And again, that logic isn't written yet, but there will "soon" be a
|
||||
``FromSqlalchemyHandler`` class defined which implements this.)
|
||||
|
||||
|
||||
Regster with Import Handler
|
||||
---------------------------
|
||||
|
||||
After you define the importer/exporter class (as shown above) you also
|
||||
must "register" it within the import/export handler.
|
||||
|
||||
This section is here for completeness but the process is described
|
||||
elsewhere; see :ref:`register-importer`.
|
||||
21
docs/narr/custom/index.rst
Normal file
21
docs/narr/custom/index.rst
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
|
||||
Custom Import/Export
|
||||
====================
|
||||
|
||||
This section explains what's required to make your own import/export
|
||||
tasks.
|
||||
|
||||
See also :doc:`/narr/concepts` for some terminology etc.
|
||||
|
||||
..
|
||||
The examples throughout the sections below will often involve a
|
||||
theoretical **Foo → Poser** import, where Poser is a typical
|
||||
Wutta-based app and Foo is some API in the cloud.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
conventions
|
||||
handler
|
||||
importer
|
||||
command
|
||||
|
|
@ -6,8 +6,8 @@ build-backend = "hatchling.build"
|
|||
|
||||
[project]
|
||||
name = "WuttaSync"
|
||||
version = "0.1.0"
|
||||
description = "Wutta framework for data import/export and real-time sync"
|
||||
version = "0.6.0"
|
||||
description = "Wutta Framework for data import/export and real-time sync"
|
||||
readme = "README.md"
|
||||
authors = [{name = "Lance Edgar", email = "lance@wuttaproject.org"}]
|
||||
license = {text = "GNU GPL v3+"}
|
||||
|
|
@ -26,14 +26,34 @@ classifiers = [
|
|||
]
|
||||
requires-python = ">= 3.8"
|
||||
dependencies = [
|
||||
"humanize",
|
||||
"makefun",
|
||||
"rich",
|
||||
"SQLAlchemy-Utils",
|
||||
"WuttJamaican[db]",
|
||||
"WuttJamaican[db]>=0.28.10",
|
||||
]
|
||||
|
||||
|
||||
[project.optional-dependencies]
|
||||
docs = ["Sphinx", "enum-tools[sphinx]", "furo"]
|
||||
tests = ["pytest-cov", "tox"]
|
||||
docs = ["Sphinx", "enum-tools[sphinx]", "furo", "sphinxcontrib-programoutput"]
|
||||
tests = ["pylint", "pytest", "pytest-cov", "tox", "Wutta-Continuum>=0.3.0"]
|
||||
|
||||
|
||||
[project.entry-points."wutta.app.providers"]
|
||||
wuttasync = "wuttasync.app:WuttaSyncAppProvider"
|
||||
|
||||
[project.entry-points."wutta.config.extensions"]
|
||||
"wuttasync" = "wuttasync.conf:WuttaSyncConfig"
|
||||
|
||||
[project.entry-points."wuttasync.importing"]
|
||||
"export.to_csv.from_wutta" = "wuttasync.exporting.csv:FromWuttaToCsv"
|
||||
"export.to_wutta.from_wutta" = "wuttasync.importing.wutta:FromWuttaToWuttaExport"
|
||||
"import.to_versions.from_wutta" = "wuttasync.importing.versions:FromWuttaToVersions"
|
||||
"import.to_wutta.from_csv" = "wuttasync.importing.csv:FromCsvToWutta"
|
||||
"import.to_wutta.from_wutta" = "wuttasync.importing.wutta:FromWuttaToWuttaImport"
|
||||
|
||||
[project.entry-points."wutta.typer_imports"]
|
||||
wuttasync = "wuttasync.cli"
|
||||
|
||||
|
||||
[project.urls]
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta framework for data import/export and real-time sync
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
"""
|
||||
Package Version
|
||||
"""
|
||||
|
||||
from importlib.metadata import version
|
||||
|
||||
|
||||
__version__ = version('WuttaSync')
|
||||
__version__ = version("WuttaSync")
|
||||
|
|
|
|||
243
src/wuttasync/app.py
Normal file
243
src/wuttasync/app.py
Normal file
|
|
@ -0,0 +1,243 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
App handler supplement for WuttaSync
|
||||
"""
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from wuttjamaican.app import AppProvider
|
||||
from wuttjamaican.util import load_entry_points
|
||||
|
||||
|
||||
class WuttaSyncAppProvider(AppProvider):
|
||||
"""
|
||||
The :term:`app provider` for WuttaSync.
|
||||
|
||||
This adds some methods to the :term:`app handler`, which are
|
||||
specific to import/export.
|
||||
|
||||
It also declares some :term:`email modules <email module>` and
|
||||
:term:`email templates <email template>` for the app.
|
||||
|
||||
We have two concerns when doing lookups etc. for import/export
|
||||
handlers:
|
||||
|
||||
* which handlers are *available* - i.e. they exist and are
|
||||
discoverable
|
||||
* which handlers are *designated* - only one designated handler
|
||||
per key
|
||||
|
||||
All "available" handlers will have a key, but some keys may be
|
||||
referenced by multiple handlers. For each key, only one handler
|
||||
can be "designated" - there is a default, but config can override.
|
||||
"""
|
||||
|
||||
email_modules = ["wuttasync.emails"]
|
||||
email_templates = ["wuttasync:email-templates"]
|
||||
|
||||
def get_all_import_handlers(self):
|
||||
"""
|
||||
Returns *all* :term:`import/export handler <import handler>`
|
||||
*classes* which are known to exist, i.e. are discoverable.
|
||||
|
||||
See also :meth:`get_import_handler()` and
|
||||
:meth:`get_designated_import_handlers()`.
|
||||
|
||||
The discovery process is as follows:
|
||||
|
||||
* load handlers from registered entry points
|
||||
* check config for designated handlers
|
||||
|
||||
Checking for designated handler config is not a reliable way
|
||||
to discover handlers, but it's done just in case any new ones
|
||||
might be found.
|
||||
|
||||
Registration via entry points is the only way to ensure a
|
||||
handler is discoverable. The entry point group name is always
|
||||
``wuttasync.importing`` regardless of :term:`app name`;
|
||||
entries are like ``"handler_key" = "handler_spec"``. For
|
||||
example:
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[project.entry-points."wuttasync.importing"]
|
||||
"export.to_csv.from_poser" = "poser.exporting.csv:FromPoserToCsv"
|
||||
"import.to_poser.from_csv" = "poser.importing.csv:FromCsvToPoser"
|
||||
|
||||
:returns: List of all import/export handler classes
|
||||
"""
|
||||
# first load all "registered" Handler classes. note we must
|
||||
# specify lists=True since handlers from different projects
|
||||
# can be registered with the same key.
|
||||
factory_lists = load_entry_points(
|
||||
"wuttasync.importing", lists=True, ignore_errors=True
|
||||
)
|
||||
|
||||
# organize registered classes by spec
|
||||
specs = {}
|
||||
all_factories = []
|
||||
for factories in factory_lists.values():
|
||||
for factory in factories:
|
||||
specs[factory.get_spec()] = factory
|
||||
all_factories.append(factory)
|
||||
|
||||
# many handlers may not be registered per se, but may be
|
||||
# designated via config. so try to include those too
|
||||
seen = set()
|
||||
for factory in all_factories:
|
||||
key = factory.get_key()
|
||||
if key in seen:
|
||||
continue
|
||||
spec = self.get_designated_import_handler_spec(key)
|
||||
if spec and spec not in specs:
|
||||
specs[spec] = self.app.load_object(spec)
|
||||
seen.add(key)
|
||||
|
||||
# flatten back to simple list of classes
|
||||
factories = list(specs.values())
|
||||
return factories
|
||||
|
||||
def get_designated_import_handler_spec(self, key, require=False):
|
||||
"""
|
||||
Returns the designated import/export handler :term:`spec`
|
||||
string for the given type key.
|
||||
|
||||
This just checks config for the designated handler, using the
|
||||
``wuttasync.importing`` prefix regardless of :term:`app name`.
|
||||
For instance:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[wuttasync.importing]
|
||||
export.to_csv.from_poser.handler = poser.exporting.csv:FromPoserToCsv
|
||||
import.to_poser.from_csv.handler = poser.importing.csv:FromCsvToPoser
|
||||
|
||||
See also :meth:`get_designated_import_handlers()` and
|
||||
:meth:`get_import_handler()`.
|
||||
|
||||
:param key: Unique key indicating the type of import/export
|
||||
handler.
|
||||
|
||||
:param require: Flag indicating whether an error should be raised if no
|
||||
handler is found.
|
||||
|
||||
:returns: Spec string for the designated handler. If none is
|
||||
configured, then ``None`` is returned *unless* the
|
||||
``require`` param is true, in which case an error is
|
||||
raised.
|
||||
"""
|
||||
spec = self.config.get(f"wuttasync.importing.{key}.handler")
|
||||
if spec:
|
||||
return spec
|
||||
|
||||
spec = self.config.get(f"wuttasync.importing.{key}.default_handler")
|
||||
if spec:
|
||||
return spec
|
||||
|
||||
if require:
|
||||
raise ValueError(f"Cannot locate import handler spec for key: {key}")
|
||||
return None
|
||||
|
||||
def get_designated_import_handlers(self):
|
||||
"""
|
||||
Returns all *designated* import/export handler *instances*.
|
||||
|
||||
Each import/export handler has a "key" which indicates the
|
||||
"type" of import/export job it performs. For instance the CSV
|
||||
→ Wutta import has the key: ``import.to_wutta.from_csv``
|
||||
|
||||
More than one handler can be defined for that key; however
|
||||
only one such handler will be "designated" for each key.
|
||||
|
||||
This method first loads *all* available import handlers, then
|
||||
organizes them by key, and tries to determine which handler
|
||||
should be designated for each key.
|
||||
|
||||
See also :meth:`get_all_import_handlers()` and
|
||||
:meth:`get_designated_import_handler_spec()`.
|
||||
|
||||
:returns: List of designated import/export handler instances
|
||||
"""
|
||||
grouped = OrderedDict()
|
||||
for factory in self.get_all_import_handlers():
|
||||
key = factory.get_key()
|
||||
grouped.setdefault(key, []).append(factory)
|
||||
|
||||
def find_designated(key, group):
|
||||
spec = self.get_designated_import_handler_spec(key)
|
||||
if spec:
|
||||
for factory in group:
|
||||
if factory.get_spec() == spec:
|
||||
return factory
|
||||
if len(group) == 1:
|
||||
return group[0]
|
||||
return None
|
||||
|
||||
designated = []
|
||||
for key, group in grouped.items():
|
||||
factory = find_designated(key, group)
|
||||
if factory:
|
||||
handler = factory(self.config)
|
||||
designated.append(handler)
|
||||
|
||||
return designated
|
||||
|
||||
def get_import_handler(self, key, require=False, **kwargs):
|
||||
"""
|
||||
Returns the designated :term:`import/export handler <import
|
||||
handler>` instance for the given :term:`import/export key`.
|
||||
|
||||
See also :meth:`get_all_import_handlers()` and
|
||||
:meth:`get_designated_import_handlers()`.
|
||||
|
||||
:param key: Key indicating the type of import/export handler,
|
||||
e.g. ``"import.to_wutta.from_csv"``
|
||||
|
||||
:param require: Set this to true if you want an error raised
|
||||
when no handler is found.
|
||||
|
||||
:param \\**kwargs: Remaining kwargs are passed as-is to the
|
||||
handler constructor.
|
||||
|
||||
:returns: The import/export handler instance. If no handler
|
||||
is found, then ``None`` is returned, unless ``require``
|
||||
param is true, in which case error is raised.
|
||||
"""
|
||||
# first try to fetch the handler per designated spec
|
||||
spec = self.get_designated_import_handler_spec(key)
|
||||
if spec:
|
||||
factory = self.app.load_object(spec)
|
||||
return factory(self.config, **kwargs)
|
||||
|
||||
# nothing was designated, so leverage logic which already
|
||||
# sorts out which handler is "designated" for given key
|
||||
designated = self.get_designated_import_handlers()
|
||||
for handler in designated:
|
||||
if handler.get_key() == key:
|
||||
factory = type(handler)
|
||||
return factory(self.config, **kwargs)
|
||||
|
||||
if require:
|
||||
raise ValueError(f"Cannot locate import handler for key: {key}")
|
||||
return None
|
||||
46
src/wuttasync/cli/__init__.py
Normal file
46
src/wuttasync/cli/__init__.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
WuttaSync - ``wutta`` subcommands
|
||||
|
||||
This namespace exposes the following:
|
||||
|
||||
* :func:`~wuttasync.cli.base.import_command()`
|
||||
* :func:`~wuttasync.cli.base.file_export_command()`
|
||||
* :func:`~wuttasync.cli.base.file_import_command()`
|
||||
* :class:`~wuttasync.cli.base.ImportCommandHandler`
|
||||
"""
|
||||
|
||||
from .base import (
|
||||
import_command,
|
||||
file_export_command,
|
||||
file_import_command,
|
||||
ImportCommandHandler,
|
||||
)
|
||||
|
||||
# nb. must bring in all modules for discovery to work
|
||||
from . import export_csv
|
||||
from . import export_wutta
|
||||
from . import import_csv
|
||||
from . import import_versions
|
||||
from . import import_wutta
|
||||
478
src/wuttasync/cli/base.py
Normal file
478
src/wuttasync/cli/base.py
Normal file
|
|
@ -0,0 +1,478 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
``wutta import-csv`` command
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
from typing_extensions import Annotated
|
||||
|
||||
import makefun
|
||||
import rich
|
||||
import typer
|
||||
|
||||
from wuttjamaican.app import GenericHandler
|
||||
from wuttasync.importing import ImportHandler, FromFileHandler
|
||||
from wuttasync.exporting import ToFileHandler
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ImportCommandHandler(GenericHandler):
|
||||
"""
|
||||
This is the :term:`handler` responsible for import/export command
|
||||
line runs.
|
||||
|
||||
Normally, the command (actually :term:`subcommand`) logic will
|
||||
create this handler and call its :meth:`run()` method.
|
||||
|
||||
This handler does not know how to import/export data, but it knows
|
||||
how to make its :attr:`import_handler` do it. Likewise, the
|
||||
import handler is not "CLI-aware" - so this provides the glue.
|
||||
|
||||
:param import_handler: During construction, caller can specify the
|
||||
:attr:`import_handler` as any of:
|
||||
|
||||
* import handler instance
|
||||
* import handler factory (e.g. class)
|
||||
* import handler :term:`spec`
|
||||
|
||||
:param key: Optional :term:`import/export key` to use for handler
|
||||
lookup. Only used if ``import_handler`` param is not set.
|
||||
|
||||
:param \\**kwargs: Remaining kwargs are passed as-is to the
|
||||
import/export handler constructor, i.e. when making the
|
||||
:attr:`import_handler`. Note that if the ``import_handler``
|
||||
*instance* is specified, these kwargs will be ignored.
|
||||
|
||||
Typical usage for custom commands will be to provide the spec
|
||||
(please note the *colon*)::
|
||||
|
||||
handler = ImportCommandHandler(
|
||||
config, "poser.importing.foo:FromFooToPoser"
|
||||
)
|
||||
|
||||
Library authors may prefer to use the import/export key; this lets
|
||||
the command work with any designated handler::
|
||||
|
||||
handler = ImportCommandHandler(
|
||||
config, key="import.to_poser.from_foo"
|
||||
)
|
||||
|
||||
See also
|
||||
:meth:`~wuttasync.app.WuttaSyncAppProvider.get_import_handler()`
|
||||
which does the lookup by key.
|
||||
|
||||
Additional kwargs may be specified as needed. Typically these
|
||||
should wind up as attributes on the import/export handler
|
||||
instance::
|
||||
|
||||
handler = ImportCommandHandler(
|
||||
config, "poser.importing.foo:FromFooToPoser", dbkey="remote"
|
||||
)
|
||||
"""
|
||||
|
||||
import_handler = None
|
||||
"""
|
||||
Reference to the :term:`import handler` instance, which is to be
|
||||
invoked when command runs. See also :meth:`run()`.
|
||||
"""
|
||||
|
||||
def __init__(self, config, import_handler=None, key=None, **kwargs):
|
||||
super().__init__(config)
|
||||
|
||||
if import_handler:
|
||||
if isinstance(import_handler, ImportHandler):
|
||||
self.import_handler = import_handler
|
||||
elif callable(import_handler):
|
||||
self.import_handler = import_handler(self.config, **kwargs)
|
||||
else: # spec
|
||||
factory = self.app.load_object(import_handler)
|
||||
self.import_handler = factory(self.config, **kwargs)
|
||||
|
||||
elif key:
|
||||
self.import_handler = self.app.get_import_handler(
|
||||
key, require=True, **kwargs
|
||||
)
|
||||
|
||||
def run(self, ctx, progress=None): # pylint: disable=unused-argument
|
||||
"""
|
||||
Run the import/export job(s) based on command line params.
|
||||
|
||||
This mostly just calls
|
||||
:meth:`~wuttasync.importing.handlers.ImportHandler.process_data()`
|
||||
for the :attr:`import_handler`.
|
||||
|
||||
Unless ``--list-models`` was specified on the command line in
|
||||
which case we do :meth:`list_models()` instead.
|
||||
|
||||
:param ctx: :class:`typer.Context` instance.
|
||||
|
||||
:param progress: Optional progress indicator factory.
|
||||
"""
|
||||
|
||||
# maybe just list models and bail
|
||||
if ctx.params.get("list_models"):
|
||||
self.list_models(ctx.params)
|
||||
return
|
||||
|
||||
# otherwise we'll (hopefully) process some data
|
||||
log.debug("using handler: %s", self.import_handler.get_spec())
|
||||
|
||||
# but first, some extra checks for certain file-based
|
||||
# handlers. this must be done here, because these CLI params
|
||||
# are not technically required (otherwise typer would handle
|
||||
# this instead of us here). and that is because we want to
|
||||
# allow user to specify --list without needing to also specify
|
||||
# --input or --output
|
||||
if isinstance(self.import_handler, FromFileHandler):
|
||||
if not ctx.params.get("input_file_path"):
|
||||
rich.print(
|
||||
"\n[bold yellow]must specify --input folder/file path[/bold yellow]\n",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
elif isinstance(self.import_handler, ToFileHandler):
|
||||
if not ctx.params.get("output_file_path"):
|
||||
rich.print(
|
||||
"\n[bold yellow]must specify --output folder/file path[/bold yellow]\n",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# all params from caller will be passed along
|
||||
kw = dict(ctx.params)
|
||||
|
||||
# runas user and comment also, but they come from root command
|
||||
if username := ctx.parent.params.get("runas_username"):
|
||||
kw["runas_username"] = username
|
||||
if comment := ctx.parent.params.get("comment"):
|
||||
kw["transaction_comment"] = comment
|
||||
|
||||
# sort out which models to process
|
||||
models = kw.pop("models", None)
|
||||
if not models:
|
||||
models = self.import_handler.get_default_importer_keys()
|
||||
log.debug(
|
||||
"%s %s for models: %s",
|
||||
self.import_handler.actioning,
|
||||
self.import_handler.get_title(),
|
||||
", ".join(models),
|
||||
)
|
||||
|
||||
# process data
|
||||
log.debug("params are: %s", kw)
|
||||
self.import_handler.process_data(*models, **kw)
|
||||
|
||||
def list_models(self, params): # pylint: disable=unused-argument
|
||||
"""
|
||||
Query the :attr:`import_handler`'s supported target models and
|
||||
print the info to stdout.
|
||||
|
||||
This is what happens when command line has ``--list-models``.
|
||||
"""
|
||||
all_keys = list(self.import_handler.importers)
|
||||
default_keys = [k for k in all_keys if self.import_handler.is_default(k)]
|
||||
extra_keys = [k for k in all_keys if k not in default_keys]
|
||||
|
||||
sys.stdout.write("\n")
|
||||
sys.stdout.write("==============================\n")
|
||||
sys.stdout.write(" DEFAULT MODELS:\n")
|
||||
sys.stdout.write("==============================\n")
|
||||
if default_keys:
|
||||
for key in default_keys:
|
||||
sys.stdout.write(f"{key}\n")
|
||||
else:
|
||||
sys.stdout.write("(none)\n")
|
||||
|
||||
sys.stdout.write("==============================\n")
|
||||
sys.stdout.write(" EXTRA MODELS:\n")
|
||||
sys.stdout.write("==============================\n")
|
||||
if extra_keys:
|
||||
for key in extra_keys:
|
||||
sys.stdout.write(f"{key}\n")
|
||||
else:
|
||||
sys.stdout.write("(none)\n")
|
||||
|
||||
sys.stdout.write("==============================\n")
|
||||
sys.stdout.write(f" for {self.import_handler.get_title()}\n\n")
|
||||
|
||||
|
||||
def import_command_template( # pylint: disable=unused-argument,too-many-arguments,too-many-positional-arguments,too-many-locals
|
||||
models: Annotated[
|
||||
Optional[List[str]],
|
||||
typer.Argument(
|
||||
help="Target model(s) to process. Specify one or more, "
|
||||
"or omit to process default models."
|
||||
),
|
||||
] = None,
|
||||
list_models: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
"--list-models", "-l", help="List available target models and exit."
|
||||
),
|
||||
] = False,
|
||||
create: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
help="Allow new target records to be created. " "See aso --max-create."
|
||||
),
|
||||
] = True,
|
||||
update: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
help="Allow existing target records to be updated. "
|
||||
"See also --max-update."
|
||||
),
|
||||
] = True,
|
||||
delete: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
help="Allow existing target records to be deleted. "
|
||||
"See also --max-delete."
|
||||
),
|
||||
] = False,
|
||||
fields: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
"--fields", help="List of fields to process. See also --exclude and --key."
|
||||
),
|
||||
] = None,
|
||||
excluded_fields: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
"--exclude", help="List of fields *not* to process. See also --fields."
|
||||
),
|
||||
] = None,
|
||||
keys: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
"--key",
|
||||
"--keys",
|
||||
help="List of fields to use as record key/identifier. "
|
||||
"See also --fields.",
|
||||
),
|
||||
] = None,
|
||||
max_create: Annotated[
|
||||
int,
|
||||
typer.Option(
|
||||
help="Max number of target records to create (per model). "
|
||||
"See also --create."
|
||||
),
|
||||
] = None,
|
||||
max_update: Annotated[
|
||||
int,
|
||||
typer.Option(
|
||||
help="Max number of target records to update (per model). "
|
||||
"See also --update."
|
||||
),
|
||||
] = None,
|
||||
max_delete: Annotated[
|
||||
int,
|
||||
typer.Option(
|
||||
help="Max number of target records to delete (per model). "
|
||||
"See also --delete."
|
||||
),
|
||||
] = None,
|
||||
max_total: Annotated[
|
||||
int,
|
||||
typer.Option(
|
||||
help="Max number of *any* target record changes which may occur (per model)."
|
||||
),
|
||||
] = None,
|
||||
warnings: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
"--warn",
|
||||
"-W",
|
||||
help="Expect no changes; warn (email the diff) if any occur.",
|
||||
),
|
||||
] = False,
|
||||
warnings_recipients: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
"--recip",
|
||||
"--recips",
|
||||
help="Override the recipient(s) for diff warning email.",
|
||||
),
|
||||
] = None,
|
||||
warnings_max_diffs: Annotated[
|
||||
int,
|
||||
typer.Option(
|
||||
"--max-diffs",
|
||||
help="Max number of record diffs to show (per model) in warning email.",
|
||||
),
|
||||
] = 15,
|
||||
dry_run: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
"--dry-run", help="Go through the motions, but rollback the transaction."
|
||||
),
|
||||
] = False,
|
||||
):
|
||||
"""
|
||||
Stub function which provides a common param signature; used with
|
||||
:func:`import_command()`.
|
||||
"""
|
||||
|
||||
|
||||
def import_command(fn):
|
||||
"""
|
||||
Decorator for import/export commands. Adds common params based on
|
||||
:func:`import_command_template()`.
|
||||
|
||||
To use this, e.g. for ``poser import-foo`` command::
|
||||
|
||||
from poser.cli import poser_typer
|
||||
from wuttasync.cli import import_command, ImportCommandHandler
|
||||
|
||||
@poser_typer.command()
|
||||
@import_command
|
||||
def import_foo(
|
||||
ctx: typer.Context,
|
||||
**kwargs
|
||||
):
|
||||
\"""
|
||||
Import data from Foo API to Poser DB
|
||||
\"""
|
||||
config = ctx.parent.wutta_config
|
||||
handler = ImportCommandHandler(
|
||||
config, import_handler='poser.importing.foo:FromFooToPoser')
|
||||
handler.run(ctx.params)
|
||||
|
||||
See also :class:`ImportCommandHandler`.
|
||||
"""
|
||||
original_sig = inspect.signature(fn)
|
||||
reference_sig = inspect.signature(import_command_template)
|
||||
|
||||
params = list(original_sig.parameters.values())
|
||||
for i, param in enumerate(reference_sig.parameters.values()):
|
||||
params.insert(i + 1, param)
|
||||
|
||||
# remove the **kwargs param
|
||||
params.pop(-1)
|
||||
|
||||
final_sig = original_sig.replace(parameters=params)
|
||||
return makefun.create_function(final_sig, fn)
|
||||
|
||||
|
||||
def file_export_command_template( # pylint: disable=unused-argument
|
||||
# nb. technically this is required, but not if doing --list
|
||||
# (so we cannot mark it required here, for that reason)
|
||||
output_file_path: Annotated[
|
||||
Path,
|
||||
typer.Option(
|
||||
"--output",
|
||||
"-o",
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
help="Path to output folder. Or full path to output file "
|
||||
"if only running one target model.",
|
||||
),
|
||||
] = None,
|
||||
):
|
||||
"""
|
||||
Stub function to provide signature for exporter commands which
|
||||
produce data file(s) as output. Used with
|
||||
:func:`file_export_command`.
|
||||
"""
|
||||
|
||||
|
||||
def file_export_command(fn):
|
||||
"""
|
||||
Decorator for file export commands. Adds common params based on
|
||||
:func:`file_export_command_template`.
|
||||
"""
|
||||
original_sig = inspect.signature(fn)
|
||||
plain_import_sig = inspect.signature(import_command_template)
|
||||
file_export_sig = inspect.signature(file_export_command_template)
|
||||
desired_params = list(plain_import_sig.parameters.values()) + list(
|
||||
file_export_sig.parameters.values()
|
||||
)
|
||||
|
||||
params = list(original_sig.parameters.values())
|
||||
for i, param in enumerate(desired_params):
|
||||
params.insert(i + 1, param)
|
||||
|
||||
# remove the **kwargs param
|
||||
params.pop(-1)
|
||||
|
||||
final_sig = original_sig.replace(parameters=params)
|
||||
return makefun.create_function(final_sig, fn)
|
||||
|
||||
|
||||
def file_import_command_template( # pylint: disable=unused-argument
|
||||
# nb. technically this is required, but not if doing --list
|
||||
# (so we cannot mark it required here, for that reason)
|
||||
input_file_path: Annotated[
|
||||
Path,
|
||||
typer.Option(
|
||||
"--input",
|
||||
"-i",
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
help="Path to input folder. Or full path to input file "
|
||||
"if only running one target model.",
|
||||
),
|
||||
] = None,
|
||||
):
|
||||
"""
|
||||
Stub function to provide signature for import/export commands
|
||||
which require input file. Used with
|
||||
:func:`file_import_command()`.
|
||||
"""
|
||||
|
||||
|
||||
def file_import_command(fn):
|
||||
"""
|
||||
Decorator for import/export commands which require input file.
|
||||
Adds common params based on
|
||||
:func:`file_import_command_template()`.
|
||||
|
||||
To use this, it's the same method as shown for
|
||||
:func:`import_command()` except in this case you would use the
|
||||
``file_import_command`` decorator.
|
||||
"""
|
||||
original_sig = inspect.signature(fn)
|
||||
plain_import_sig = inspect.signature(import_command_template)
|
||||
file_import_sig = inspect.signature(file_import_command_template)
|
||||
desired_params = list(plain_import_sig.parameters.values()) + list(
|
||||
file_import_sig.parameters.values()
|
||||
)
|
||||
|
||||
params = list(original_sig.parameters.values())
|
||||
for i, param in enumerate(desired_params):
|
||||
params.insert(i + 1, param)
|
||||
|
||||
# remove the **kwargs param
|
||||
params.pop(-1)
|
||||
|
||||
final_sig = original_sig.replace(parameters=params)
|
||||
return makefun.create_function(final_sig, fn)
|
||||
42
src/wuttasync/cli/export_csv.py
Normal file
42
src/wuttasync/cli/export_csv.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
See also: :ref:`wutta-export-csv`
|
||||
"""
|
||||
|
||||
import typer
|
||||
|
||||
from wuttjamaican.cli import wutta_typer
|
||||
|
||||
from .base import file_export_command, ImportCommandHandler
|
||||
|
||||
|
||||
@wutta_typer.command()
|
||||
@file_export_command
|
||||
def export_csv(ctx: typer.Context, **kwargs): # pylint: disable=unused-argument
|
||||
"""
|
||||
Export data from Wutta DB to CSV file(s)
|
||||
"""
|
||||
config = ctx.parent.wutta_config
|
||||
handler = ImportCommandHandler(config, key="export.to_csv.from_wutta")
|
||||
handler.run(ctx)
|
||||
53
src/wuttasync/cli/export_wutta.py
Normal file
53
src/wuttasync/cli/export_wutta.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
See also: :ref:`wutta-export-wutta`
|
||||
"""
|
||||
|
||||
from typing_extensions import Annotated
|
||||
|
||||
import typer
|
||||
|
||||
from wuttjamaican.cli import wutta_typer
|
||||
|
||||
from .base import import_command, ImportCommandHandler
|
||||
|
||||
|
||||
@wutta_typer.command()
|
||||
@import_command
|
||||
def export_wutta(
|
||||
ctx: typer.Context,
|
||||
dbkey: Annotated[
|
||||
str,
|
||||
typer.Option(help="Config key for app db engine to be used as data target."),
|
||||
] = None,
|
||||
**kwargs,
|
||||
): # pylint: disable=unused-argument
|
||||
"""
|
||||
Export data to another Wutta DB
|
||||
"""
|
||||
config = ctx.parent.wutta_config
|
||||
handler = ImportCommandHandler(
|
||||
config, key="export.to_wutta.from_wutta", dbkey=ctx.params["dbkey"]
|
||||
)
|
||||
handler.run(ctx)
|
||||
42
src/wuttasync/cli/import_csv.py
Normal file
42
src/wuttasync/cli/import_csv.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2025 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
See also: :ref:`wutta-import-csv`
|
||||
"""
|
||||
|
||||
import typer
|
||||
|
||||
from wuttjamaican.cli import wutta_typer
|
||||
|
||||
from .base import file_import_command, ImportCommandHandler
|
||||
|
||||
|
||||
@wutta_typer.command()
|
||||
@file_import_command
|
||||
def import_csv(ctx: typer.Context, **kwargs): # pylint: disable=unused-argument
|
||||
"""
|
||||
Import data from CSV file(s) to Wutta DB
|
||||
"""
|
||||
config = ctx.parent.wutta_config
|
||||
handler = ImportCommandHandler(config, key="import.to_wutta.from_csv")
|
||||
handler.run(ctx)
|
||||
65
src/wuttasync/cli/import_versions.py
Normal file
65
src/wuttasync/cli/import_versions.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2025 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
See also: :ref:`wutta-import-versions`
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
import rich
|
||||
import typer
|
||||
|
||||
from wuttjamaican.cli import wutta_typer
|
||||
|
||||
from .base import import_command, ImportCommandHandler
|
||||
|
||||
|
||||
@wutta_typer.command()
|
||||
@import_command
|
||||
def import_versions(ctx: typer.Context, **kwargs): # pylint: disable=unused-argument
|
||||
"""
|
||||
Import latest data to version tables, for Wutta DB
|
||||
"""
|
||||
config = ctx.parent.wutta_config
|
||||
app = config.get_app()
|
||||
|
||||
# warn/exit if libs are not installed
|
||||
try:
|
||||
import wutta_continuum # pylint: disable=import-outside-toplevel,unused-import
|
||||
except ImportError: # pragma: no cover
|
||||
rich.print(
|
||||
"\n\t[bold yellow]Wutta-Continum is not installed![/bold yellow]\n"
|
||||
"\n\tIf you want it, run: pip install Wutta-Continuum\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# warn/exit if feature disabled
|
||||
if not app.continuum_is_enabled(): # pragma: no cover
|
||||
rich.print(
|
||||
"\n\t[bold yellow]Wutta-Continum is not enabled![/bold yellow]\n"
|
||||
"\n\tIf you want it, see: https://docs.wuttaproject.org/wutta-continuum/\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
handler = ImportCommandHandler(config, key="import.to_versions.from_wutta")
|
||||
handler.run(ctx)
|
||||
53
src/wuttasync/cli/import_wutta.py
Normal file
53
src/wuttasync/cli/import_wutta.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
See also: :ref:`wutta-import-wutta`
|
||||
"""
|
||||
|
||||
from typing_extensions import Annotated
|
||||
|
||||
import typer
|
||||
|
||||
from wuttjamaican.cli import wutta_typer
|
||||
|
||||
from .base import import_command, ImportCommandHandler
|
||||
|
||||
|
||||
@wutta_typer.command()
|
||||
@import_command
|
||||
def import_wutta(
|
||||
ctx: typer.Context,
|
||||
dbkey: Annotated[
|
||||
str,
|
||||
typer.Option(help="Config key for app db engine to be used as data source."),
|
||||
] = None,
|
||||
**kwargs,
|
||||
): # pylint: disable=unused-argument
|
||||
"""
|
||||
Import data from another Wutta DB
|
||||
"""
|
||||
config = ctx.parent.wutta_config
|
||||
handler = ImportCommandHandler(
|
||||
config, key="import.to_wutta.from_wutta", dbkey=ctx.params["dbkey"]
|
||||
)
|
||||
handler.run(ctx)
|
||||
50
src/wuttasync/conf.py
Normal file
50
src/wuttasync/conf.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
WuttaSync config extension
|
||||
"""
|
||||
|
||||
from wuttjamaican.conf import WuttaConfigExtension
|
||||
|
||||
|
||||
class WuttaSyncConfig(WuttaConfigExtension):
|
||||
"""
|
||||
Config extension for WuttaSync.
|
||||
|
||||
This just configures some default import/export handlers.
|
||||
"""
|
||||
|
||||
key = "wuttasync"
|
||||
|
||||
def configure(self, config): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
|
||||
# default import/export handlers
|
||||
config.setdefault(
|
||||
"wuttasync.importing.import.to_wutta.from_wutta.default_handler",
|
||||
"wuttasync.importing.wutta:FromWuttaToWuttaImport",
|
||||
)
|
||||
config.setdefault(
|
||||
"wuttasync.importing.export.to_wutta.from_wutta.default_handler",
|
||||
"wuttasync.importing.wutta:FromWuttaToWuttaExport",
|
||||
)
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
## -*- coding: utf-8; -*-
|
||||
<html>
|
||||
<body>
|
||||
<h3>Diff warning for ${title} (${handler.actioning})</h3>
|
||||
|
||||
<p style="font-style: italic;">
|
||||
% if dry_run:
|
||||
<span style="font-weight: bold;">DRY RUN</span>
|
||||
- these changes have not yet happened
|
||||
% else:
|
||||
<span style="font-weight: bold;">LIVE RUN</span>
|
||||
- these changes already happened
|
||||
% endif
|
||||
</p>
|
||||
|
||||
<ul>
|
||||
% for model, (created, updated, deleted) in changes.items():
|
||||
<li>
|
||||
<a href="#${model}">${model}</a> -
|
||||
${app.render_quantity(len(created))} created;
|
||||
${app.render_quantity(len(updated))} updated;
|
||||
${app.render_quantity(len(deleted))} deleted
|
||||
</li>
|
||||
% endfor
|
||||
</ul>
|
||||
|
||||
<p>
|
||||
<span style="font-weight: bold;">COMMAND:</span>
|
||||
|
||||
<code>${argv}</code>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<span style="font-weight: bold;">RUNTIME:</span>
|
||||
|
||||
${runtime} (${runtime_display})
|
||||
</p>
|
||||
|
||||
% for model, (created, updated, deleted) in changes.items():
|
||||
|
||||
<br />
|
||||
<h4>
|
||||
<a name="${model}">${model}</a> -
|
||||
${app.render_quantity(len(created))} created;
|
||||
${app.render_quantity(len(updated))} updated;
|
||||
${app.render_quantity(len(deleted))} deleted
|
||||
</h4>
|
||||
|
||||
<div style="padding-left: 2rem;">
|
||||
|
||||
% for obj, source_data in created[:max_diffs]:
|
||||
<h5>${model} <em>created</em> in ${target_title}: ${obj}</h5>
|
||||
<% diff = make_diff({}, source_data, nature="create") %>
|
||||
<div style="padding-left: 2rem;">
|
||||
${diff.render_html()}
|
||||
</div>
|
||||
% endfor
|
||||
% if len(created) > max_diffs:
|
||||
<h5>${model} - ${app.render_quantity(len(created) - max_diffs)} more records <em>created</em> in ${target_title} - not shown here</h5>
|
||||
% endif
|
||||
|
||||
% for obj, target_data, source_data in updated[:max_diffs]:
|
||||
<h5>${model} <em>updated</em> in ${target_title}: ${obj}</h5>
|
||||
<% diff = make_diff(target_data, source_data, nature="update") %>
|
||||
<div style="padding-left: 2rem;">
|
||||
${diff.render_html()}
|
||||
</div>
|
||||
% endfor
|
||||
% if len(updated) > max_diffs:
|
||||
<h5>${model} - ${app.render_quantity(len(updated) - max_diffs)} more records <em>updated</em> in ${target_title} - not shown here</h5>
|
||||
% endif
|
||||
|
||||
% for obj, target_data in deleted[:max_diffs]:
|
||||
<h5>${model} <em>deleted</em> in ${target_title}: ${obj}</h5>
|
||||
<% diff = make_diff(target_data, {}, nature="delete") %>
|
||||
<div style="padding-left: 2rem;">
|
||||
${diff.render_html()}
|
||||
</div>
|
||||
% endfor
|
||||
% if len(deleted) > max_diffs:
|
||||
<h5>${model} - ${app.render_quantity(len(deleted) - max_diffs)} more records <em>deleted</em> in ${target_title} - not shown here</h5>
|
||||
% endif
|
||||
|
||||
</div>
|
||||
|
||||
% endfor
|
||||
</body>
|
||||
</html>
|
||||
182
src/wuttasync/emails.py
Normal file
182
src/wuttasync/emails.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
:term:`Email Settings <email setting>` for WuttaSync
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import re
|
||||
from uuid import UUID
|
||||
|
||||
from wuttjamaican.email import EmailSetting
|
||||
from wuttjamaican.diffs import Diff
|
||||
|
||||
|
||||
class ImportExportWarning(EmailSetting):
|
||||
"""
|
||||
Base class for import/export diff warnings; sent when unexpected
|
||||
changes occur.
|
||||
|
||||
This inherits from :class:`~wuttjamaican.email.EmailSetting`.
|
||||
"""
|
||||
|
||||
fallback_key = "import_export_warning"
|
||||
"" # suppress docs
|
||||
|
||||
import_handler_spec = None
|
||||
import_handler_key = None
|
||||
|
||||
def get_description(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
handler = self.get_import_handler()
|
||||
return f"Diff warning email for {handler.actioning} {handler.get_title()}"
|
||||
|
||||
def get_default_subject(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
handler = self.get_import_handler()
|
||||
return f"Changes for {handler.get_title()}"
|
||||
|
||||
def get_import_handler(self): # pylint: disable=missing-function-docstring
|
||||
|
||||
# prefer explicit spec, if set
|
||||
if self.import_handler_spec:
|
||||
return self.app.load_object(self.import_handler_spec)(self.config)
|
||||
|
||||
# next try spec lookup, if key set
|
||||
if self.import_handler_key:
|
||||
return self.app.get_import_handler(self.import_handler_key, require=True)
|
||||
|
||||
# or maybe try spec lookup basd on setting class name
|
||||
class_name = self.__class__.__name__
|
||||
if match := re.match(
|
||||
r"^(?P<action>import|export)_to_(?P<target>\S+)_from_(?P<source>\S+)_warning$",
|
||||
class_name,
|
||||
):
|
||||
key = f"{match['action']}.to_{match['target']}.from_{match['source']}"
|
||||
return self.app.get_import_handler(key, require=True)
|
||||
|
||||
raise ValueError(
|
||||
"must set import_handler_spec (or import_handler_key) "
|
||||
f"for email setting: {class_name}"
|
||||
)
|
||||
|
||||
# nb. this is just used for sample data
|
||||
def make_diff(self, *args, **kwargs): # pylint: disable=missing-function-docstring
|
||||
return Diff(self.config, *args, **kwargs)
|
||||
|
||||
def sample_data(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
model = self.app.model
|
||||
handler = self.get_import_handler()
|
||||
|
||||
alice = model.User(username="alice")
|
||||
bob = model.User(username="bob")
|
||||
charlie = model.User(username="charlie")
|
||||
|
||||
runtime = datetime.timedelta(seconds=30)
|
||||
return {
|
||||
"handler": handler,
|
||||
"title": handler.get_title(),
|
||||
"source_title": handler.get_source_title(),
|
||||
"target_title": handler.get_target_title(),
|
||||
"runtime": runtime,
|
||||
"runtime_display": "30 seconds",
|
||||
"dry_run": True,
|
||||
"argv": [
|
||||
"bin/wutta",
|
||||
"import-foo",
|
||||
"User",
|
||||
"--delete",
|
||||
"--dry-run",
|
||||
"-W",
|
||||
],
|
||||
"changes": {
|
||||
"User": (
|
||||
[
|
||||
(
|
||||
alice,
|
||||
{
|
||||
"uuid": UUID("06946d64-1ebf-79db-8000-ce40345044fe"),
|
||||
"username": "alice",
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
(
|
||||
bob,
|
||||
{
|
||||
"uuid": UUID("06946d64-1ebf-7a8c-8000-05d78792b084"),
|
||||
"username": "bob",
|
||||
},
|
||||
{
|
||||
"uuid": UUID("06946d64-1ebf-7a8c-8000-05d78792b084"),
|
||||
"username": "bobbie",
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
(
|
||||
charlie,
|
||||
{
|
||||
"uuid": UUID("06946d64-1ebf-7ad4-8000-1ba52f720c48"),
|
||||
"username": "charlie",
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
},
|
||||
"make_diff": self.make_diff,
|
||||
"max_diffs": 15,
|
||||
}
|
||||
|
||||
|
||||
class export_to_wutta_from_wutta_warning( # pylint: disable=invalid-name
|
||||
ImportExportWarning
|
||||
):
|
||||
"""
|
||||
Diff warning for Wutta → Wutta export.
|
||||
"""
|
||||
|
||||
|
||||
class import_to_versions_from_wutta_warning( # pylint: disable=invalid-name
|
||||
ImportExportWarning
|
||||
):
|
||||
"""
|
||||
Diff warning for Wutta → Versions import.
|
||||
"""
|
||||
|
||||
|
||||
class import_to_wutta_from_csv_warning( # pylint: disable=invalid-name
|
||||
ImportExportWarning
|
||||
):
|
||||
"""
|
||||
Diff warning for CSV → Wutta import.
|
||||
"""
|
||||
|
||||
|
||||
class import_to_wutta_from_wutta_warning( # pylint: disable=invalid-name
|
||||
ImportExportWarning
|
||||
):
|
||||
"""
|
||||
Diff warning for Wutta → Wutta import.
|
||||
"""
|
||||
43
src/wuttasync/exporting/__init__.py
Normal file
43
src/wuttasync/exporting/__init__.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
Data Import / Export Framework
|
||||
|
||||
This namespace exposes the following:
|
||||
|
||||
* :enum:`~wuttasync.importing.handlers.Orientation`
|
||||
|
||||
And some :term:`export handler <import handler>` base classes:
|
||||
|
||||
* :class:`~wuttasync.exporting.handlers.ExportHandler`
|
||||
* :class:`~wuttasync.exporting.handlers.ToFileHandler`
|
||||
|
||||
And some :term:`exporter <importer>` base classes:
|
||||
|
||||
* :class:`~wuttasync.exporting.base.ToFile`
|
||||
|
||||
See also the :mod:`wuttasync.importing` module.
|
||||
"""
|
||||
|
||||
from .handlers import Orientation, ExportHandler, ToFileHandler
|
||||
from .base import ToFile
|
||||
166
src/wuttasync/exporting/base.py
Normal file
166
src/wuttasync/exporting/base.py
Normal file
|
|
@ -0,0 +1,166 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
Data exporter base classes
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from wuttasync.importing import Importer
|
||||
|
||||
|
||||
class ToFile(Importer):
|
||||
"""
|
||||
Base class for importer/exporter using output file as data target.
|
||||
|
||||
Depending on the subclass, it may be able to "guess" (at least
|
||||
partially) the path to the output file. If not, and/or to avoid
|
||||
ambiguity, the caller must specify the file path.
|
||||
|
||||
In most cases caller may specify any of these via kwarg to the
|
||||
class constructor, or e.g.
|
||||
:meth:`~wuttasync.importing.handlers.ImportHandler.process_data()`:
|
||||
|
||||
* :attr:`output_file_path`
|
||||
* :attr:`output_file_name`
|
||||
|
||||
The subclass itself can also specify via override of these
|
||||
methods:
|
||||
|
||||
* :meth:`get_output_file_path()`
|
||||
* :meth:`get_output_file_name()`
|
||||
|
||||
And of course subclass must override these too:
|
||||
|
||||
* :meth:`open_output_file()`
|
||||
* :meth:`close_output_file()`
|
||||
* (and see also :attr:`output_file`)
|
||||
"""
|
||||
|
||||
output_file_path = None
|
||||
"""
|
||||
Path to output folder, or file.
|
||||
|
||||
The ideal usage is to set this to the output *folder* path. That
|
||||
allows the handler to run several importers in one go. The same
|
||||
output folder path is given to each importer; they then each
|
||||
determine their own output filename within that.
|
||||
|
||||
But you can also set this to the full output folder + file path,
|
||||
e.g. if you're just running one importer. This would override
|
||||
the importer's own logic for determining output filename.
|
||||
|
||||
See also :meth:`get_output_file_path()` and
|
||||
:meth:`get_output_file_name()`.
|
||||
"""
|
||||
|
||||
output_file_name = None
|
||||
"""
|
||||
Optional static output file name (sans folder path).
|
||||
|
||||
If set, this will be used as output filename instead of the
|
||||
importer determining one on its own.
|
||||
|
||||
See also :meth:`get_output_file_name()`.
|
||||
"""
|
||||
|
||||
output_file = None
|
||||
"""
|
||||
Handle to the open output file, if applicable. May be set by
|
||||
:meth:`open_output_file()` for later reference within
|
||||
:meth:`close_output_file()`.
|
||||
"""
|
||||
|
||||
def setup(self):
|
||||
"""
|
||||
Open the output file. See also :meth:`open_output_file()`.
|
||||
"""
|
||||
if not self.dry_run:
|
||||
self.open_output_file()
|
||||
|
||||
def teardown(self):
|
||||
"""
|
||||
Close the output file. See also :meth:`close_output_file()`.
|
||||
"""
|
||||
if not self.dry_run:
|
||||
self.close_output_file()
|
||||
|
||||
def get_output_file_path(self):
|
||||
"""
|
||||
This must return the full path to output file.
|
||||
|
||||
Default logic inspects :attr:`output_file_path`; if that
|
||||
points to a folder then it is combined with
|
||||
:meth:`get_output_file_name()`. Otherwise it's returned
|
||||
as-is.
|
||||
|
||||
:returns: Path to output file, as string
|
||||
"""
|
||||
path = self.output_file_path
|
||||
if not path:
|
||||
raise ValueError("must set output_file_path")
|
||||
|
||||
if os.path.isdir(path):
|
||||
filename = self.get_output_file_name()
|
||||
return os.path.join(path, filename)
|
||||
|
||||
return path
|
||||
|
||||
def get_output_file_name(self):
|
||||
"""
|
||||
This must return the output filename, sans folder path.
|
||||
|
||||
Default logic will return :attr:`output_file_name` if set,
|
||||
otherwise raise error.
|
||||
|
||||
:returns: Output filename, sans folder path
|
||||
"""
|
||||
if self.output_file_name:
|
||||
return self.output_file_name
|
||||
|
||||
raise NotImplementedError("can't guess output filename")
|
||||
|
||||
def open_output_file(self):
|
||||
"""
|
||||
Open the output file for writing target data.
|
||||
|
||||
Subclass must override to specify how this happens; default
|
||||
logic is not implemented. Remember to set :attr:`output_file`
|
||||
if applicable for reference when closing.
|
||||
|
||||
See also :attr:`get_output_file_path()` and
|
||||
:meth:`close_output_file()`.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def close_output_file(self):
|
||||
"""
|
||||
Close the output file for target data.
|
||||
|
||||
Subclass must override to specify how this happens; default
|
||||
logic blindly calls the ``close()`` method on whatever
|
||||
:attr:`output_file` happens to point to.
|
||||
|
||||
See also :attr:`open_output_file()`.
|
||||
"""
|
||||
self.output_file.close()
|
||||
321
src/wuttasync/exporting/csv.py
Normal file
321
src/wuttasync/exporting/csv.py
Normal file
|
|
@ -0,0 +1,321 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
Exporting to CSV
|
||||
"""
|
||||
|
||||
import csv
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy_utils.functions import get_primary_keys, get_columns
|
||||
|
||||
from wuttjamaican.db.util import make_topo_sortkey
|
||||
|
||||
from wuttasync.importing import FromWuttaHandler, FromWutta
|
||||
from wuttasync.exporting import ToFileHandler, ToFile
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ToCsv(ToFile): # pylint: disable=abstract-method
|
||||
"""
|
||||
Base class for exporter using CSV file as data target.
|
||||
|
||||
This inherits from :class:`~wuttasync.exporting.base.ToFile`.
|
||||
"""
|
||||
|
||||
output_writer = None
|
||||
"""
|
||||
While the output file is open, this will reference a
|
||||
:class:`python:csv.DictWriter` instance.
|
||||
"""
|
||||
|
||||
csv_encoding = "utf_8"
|
||||
"""
|
||||
Encoding used for the CSV output file.
|
||||
|
||||
You can specify an override if needed when calling
|
||||
:meth:`~wuttasync.importing.handlers.ImportHandler.process_data()`.
|
||||
"""
|
||||
|
||||
def get_output_file_name(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
if self.output_file_name:
|
||||
return self.output_file_name
|
||||
|
||||
model_title = self.get_model_title()
|
||||
return f"{model_title}.csv"
|
||||
|
||||
def open_output_file(self):
|
||||
"""
|
||||
Opens the output CSV file for writing.
|
||||
|
||||
This calls
|
||||
:meth:`~wuttasync.exporting.base.ToFile.get_output_file_path()`
|
||||
and opens that file. It sets
|
||||
:attr:`~wuttasync.exporting.base.ToFile.output_file` and also
|
||||
:attr:`output_writer`. And it calls
|
||||
:meth:`write_output_header()` to write the field header row.
|
||||
"""
|
||||
path = self.get_output_file_path()
|
||||
log.debug("opening output file: %s", path)
|
||||
|
||||
self.output_file = open( # pylint: disable=consider-using-with
|
||||
path, "wt", encoding=self.csv_encoding
|
||||
)
|
||||
|
||||
self.output_writer = csv.DictWriter(
|
||||
self.output_file,
|
||||
self.fields,
|
||||
# quoting=csv.QUOTE_NONNUMERIC
|
||||
)
|
||||
|
||||
self.write_output_header()
|
||||
|
||||
def write_output_header(self):
|
||||
"""
|
||||
Write the field header row to the CSV file.
|
||||
|
||||
Default logic calls
|
||||
:meth:`~python:csv.DictWriter.writeheader()` on the
|
||||
:attr:`output_writer` instance.
|
||||
"""
|
||||
self.output_writer.writeheader()
|
||||
|
||||
def close_output_file(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
self.output_writer = None
|
||||
self.output_file.close()
|
||||
self.output_file = None
|
||||
|
||||
def update_target_object(self, obj, source_data, target_data=None):
|
||||
"""
|
||||
In a CSV export the assumption is we always start with an
|
||||
empty file, so "create" is the only logical action for each
|
||||
record - there are no updates or deletes per se.
|
||||
|
||||
But under the hood, this method is used for create as well, so
|
||||
we override it and actually write the record to CSV file.
|
||||
Unless :attr:`~wuttasync.importing.base.Importer.dry_run` is
|
||||
true, this calls :meth:`~python:csv.csvwriter.writerow()` on
|
||||
the :attr:`output_writer` instance.
|
||||
|
||||
See also parent method docs,
|
||||
:meth:`~wuttasync.importing.base.Importer.update_target_object()`
|
||||
"""
|
||||
data = self.coerce_csv(source_data)
|
||||
if not self.dry_run:
|
||||
self.output_writer.writerow(data)
|
||||
return data
|
||||
|
||||
def coerce_csv(self, data): # pylint: disable=missing-function-docstring
|
||||
coerced = {}
|
||||
for field in self.fields:
|
||||
value = data[field]
|
||||
|
||||
if value is None:
|
||||
value = ""
|
||||
|
||||
elif isinstance(value, (int, float)):
|
||||
pass
|
||||
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
coerced[field] = value
|
||||
return coerced
|
||||
|
||||
|
||||
class FromSqlalchemyToCsvMixin:
|
||||
"""
|
||||
Mixin class for SQLAlchemy ORM → CSV :term:`exporters <importer>`.
|
||||
|
||||
Such exporters are generated automatically by
|
||||
:class:`FromSqlalchemyToCsvHandlerMixin`, so you won't typically
|
||||
reference this mixin class directly.
|
||||
|
||||
This mixin effectively behaves like the
|
||||
:attr:`~wuttasync.importing.base.Importer.model_class` represents
|
||||
the source side instead of the target. It uses
|
||||
:attr:`~wuttasync.importing.base.FromSqlalchemy.source_model_class`
|
||||
instead, for automatic things like inspecting the fields list.
|
||||
"""
|
||||
|
||||
def get_model_title(self): # pylint: disable=missing-function-docstring
|
||||
if hasattr(self, "model_title"):
|
||||
return self.model_title
|
||||
return self.source_model_class.__name__
|
||||
|
||||
def get_simple_fields(self): # pylint: disable=missing-function-docstring
|
||||
if hasattr(self, "simple_fields"):
|
||||
return self.simple_fields
|
||||
try:
|
||||
fields = get_columns(self.source_model_class)
|
||||
except sa.exc.NoInspectionAvailable:
|
||||
return []
|
||||
return list(fields.keys())
|
||||
|
||||
def normalize_source_object(
|
||||
self, obj
|
||||
): # pylint: disable=missing-function-docstring
|
||||
fields = self.get_fields()
|
||||
fields = [f for f in self.get_simple_fields() if f in fields]
|
||||
data = {field: getattr(obj, field) for field in fields}
|
||||
return data
|
||||
|
||||
def make_object(self): # pylint: disable=missing-function-docstring
|
||||
return self.source_model_class()
|
||||
|
||||
|
||||
class FromSqlalchemyToCsvHandlerMixin:
|
||||
"""
|
||||
Mixin class for SQLAlchemy ORM → CSV :term:`export handlers
|
||||
<import handler>`.
|
||||
|
||||
This knows how to dynamically generate :term:`exporter <importer>`
|
||||
classes to represent the models in the source ORM. Such classes
|
||||
will inherit from :class:`FromSqlalchemyToCsvMixin`, in addition
|
||||
to whatever :attr:`FromImporterBase` and :attr:`ToImporterBase`
|
||||
reference.
|
||||
|
||||
That all happens within :meth:`define_importers()`.
|
||||
"""
|
||||
|
||||
target_key = "csv"
|
||||
generic_target_title = "CSV"
|
||||
|
||||
# nb. subclass must define this
|
||||
FromImporterBase = None
|
||||
"""
|
||||
For a handler to use this mixin, it must set this to a valid base
|
||||
class for the ORM source side. The :meth:`define_importers()`
|
||||
logic will use this when dynamically generating new exporter
|
||||
classes.
|
||||
"""
|
||||
|
||||
ToImporterBase = ToCsv
|
||||
"""
|
||||
This must be set to a valid base class for the CSV target side.
|
||||
Default is :class:`ToCsv` which should typically be fine; you can
|
||||
change if needed.
|
||||
"""
|
||||
|
||||
def get_source_model(self):
|
||||
"""
|
||||
This should return the :term:`app model` or a similar module
|
||||
containing data model classes for the source side.
|
||||
|
||||
The source model is used to dynamically generate a set of
|
||||
exporters (e.g. one per table in the source DB) which can use
|
||||
CSV file as data target. See also :meth:`define_importers()`.
|
||||
|
||||
Subclass must override this if needed; default behavior is not
|
||||
implemented.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def define_importers(self):
|
||||
"""
|
||||
This mixin overrides typical (manual) importer definition, and
|
||||
instead dynamically generates a set of exporters, e.g. one per
|
||||
table in the source DB.
|
||||
|
||||
It does this based on the source model, as returned by
|
||||
:meth:`get_source_model()`. It calls
|
||||
:meth:`make_importer_factory()` for each model class found.
|
||||
"""
|
||||
importers = {}
|
||||
model = self.get_source_model()
|
||||
|
||||
# mostly try to make an importer for every data model
|
||||
for name in dir(model):
|
||||
cls = getattr(model, name)
|
||||
if (
|
||||
isinstance(cls, type)
|
||||
and issubclass(cls, model.Base)
|
||||
and cls is not model.Base
|
||||
):
|
||||
importers[name] = self.make_importer_factory(cls, name)
|
||||
|
||||
# sort importers according to schema topography
|
||||
topo_sortkey = make_topo_sortkey(model)
|
||||
importers = OrderedDict(
|
||||
[(name, importers[name]) for name in sorted(importers, key=topo_sortkey)]
|
||||
)
|
||||
|
||||
return importers
|
||||
|
||||
def make_importer_factory(self, model_class, name):
|
||||
"""
|
||||
Generate a new :term:`exporter <importer>` class, targeting
|
||||
the given :term:`data model` class.
|
||||
|
||||
The newly-created class will inherit from:
|
||||
|
||||
* :class:`FromSqlalchemyToCsvMixin`
|
||||
* :attr:`FromImporterBase`
|
||||
* :attr:`ToImporterBase`
|
||||
|
||||
:param model_class: A data model class.
|
||||
|
||||
:param name: The "model name" for the importer/exporter. New
|
||||
class name will be based on this, so e.g. ``Widget`` model
|
||||
name becomes ``WidgetImporter`` class name.
|
||||
|
||||
:returns: The new class, meant to process import/export
|
||||
targeting the given data model.
|
||||
"""
|
||||
return type(
|
||||
f"{name}Importer",
|
||||
(FromSqlalchemyToCsvMixin, self.FromImporterBase, self.ToImporterBase),
|
||||
{
|
||||
"source_model_class": model_class,
|
||||
"default_keys": list(get_primary_keys(model_class)),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ToCsvHandler(ToFileHandler):
|
||||
"""
|
||||
Base class for export handlers using CSV file(s) as data target.
|
||||
"""
|
||||
|
||||
|
||||
class FromWuttaToCsv(
|
||||
FromSqlalchemyToCsvHandlerMixin, FromWuttaHandler, ToCsvHandler
|
||||
): # pylint: disable=too-many-ancestors
|
||||
"""
|
||||
Handler for Wutta (:term:`app database`) → CSV export.
|
||||
|
||||
This uses :class:`FromSqlalchemyToCsvHandlerMixin` for most of the
|
||||
heavy lifting.
|
||||
"""
|
||||
|
||||
FromImporterBase = FromWutta
|
||||
|
||||
def get_source_model(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
return self.app.model
|
||||
50
src/wuttasync/exporting/handlers.py
Normal file
50
src/wuttasync/exporting/handlers.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
Export Handlers
|
||||
"""
|
||||
|
||||
from wuttasync.importing import ImportHandler, Orientation
|
||||
|
||||
|
||||
class ExportHandler(ImportHandler):
|
||||
"""
|
||||
Generic base class for :term:`export handlers <import handler>`.
|
||||
|
||||
This is really just
|
||||
:class:`~wuttasync.importing.handlers.ImportHandler` with the
|
||||
orientation flipped.
|
||||
"""
|
||||
|
||||
orientation = Orientation.EXPORT
|
||||
"" # nb. suppress docs
|
||||
|
||||
|
||||
class ToFileHandler(ExportHandler):
|
||||
"""
|
||||
Base class for export handlers which use output file(s) as the
|
||||
data target.
|
||||
|
||||
Importers (exporters) used by this handler are generally assumed
|
||||
to subclass :class:`~wuttasync.exporting.base.ToFile`.
|
||||
"""
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta framework for data import/export and real-time sync
|
||||
# Copyright © 2024 Lance Edgar
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
|
|
@ -22,7 +22,40 @@
|
|||
################################################################################
|
||||
"""
|
||||
Data Import / Export Framework
|
||||
|
||||
This namespace exposes the following:
|
||||
|
||||
* :enum:`~wuttasync.importing.handlers.Orientation`
|
||||
|
||||
And some :term:`import handler` base classes:
|
||||
|
||||
* :class:`~wuttasync.importing.handlers.ImportHandler`
|
||||
* :class:`~wuttasync.importing.handlers.FromFileHandler`
|
||||
* :class:`~wuttasync.importing.handlers.FromSqlalchemyHandler`
|
||||
* :class:`~wuttasync.importing.handlers.FromWuttaHandler`
|
||||
* :class:`~wuttasync.importing.handlers.ToSqlalchemyHandler`
|
||||
* :class:`~wuttasync.importing.handlers.ToWuttaHandler`
|
||||
|
||||
And some :term:`importer` base classes:
|
||||
|
||||
* :class:`~wuttasync.importing.base.Importer`
|
||||
* :class:`~wuttasync.importing.base.FromFile`
|
||||
* :class:`~wuttasync.importing.base.FromSqlalchemy`
|
||||
* :class:`~wuttasync.importing.base.FromWutta`
|
||||
* :class:`~wuttasync.importing.base.ToSqlalchemy`
|
||||
* :class:`~wuttasync.importing.model.ToWutta`
|
||||
|
||||
See also the :mod:`wuttasync.exporting` module.
|
||||
"""
|
||||
|
||||
from .handlers import Orientation, ImportHandler, FromFileHandler, ToSqlalchemyHandler
|
||||
from .base import Importer, FromFile, ToSqlalchemy
|
||||
from .handlers import (
|
||||
Orientation,
|
||||
ImportHandler,
|
||||
FromFileHandler,
|
||||
FromSqlalchemyHandler,
|
||||
FromWuttaHandler,
|
||||
ToSqlalchemyHandler,
|
||||
ToWuttaHandler,
|
||||
)
|
||||
from .base import Importer, FromFile, FromSqlalchemy, FromWutta, ToSqlalchemy
|
||||
from .model import ToWutta
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta framework for data import/export and real-time sync
|
||||
# Copyright © 2024 Lance Edgar
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
|
|
@ -23,10 +23,13 @@
|
|||
"""
|
||||
Data Importer base class
|
||||
"""
|
||||
# pylint: disable=too-many-lines
|
||||
|
||||
import os
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy_utils.functions import get_primary_keys, get_columns
|
||||
|
||||
|
|
@ -36,7 +39,14 @@ from wuttasync.util import data_diffs
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Importer:
|
||||
class ImportLimitReached(Exception):
|
||||
"""
|
||||
Exception raised when an import/export job reaches the max number
|
||||
of changes allowed.
|
||||
"""
|
||||
|
||||
|
||||
class Importer: # pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
"""
|
||||
Base class for all data importers / exporters.
|
||||
|
||||
|
|
@ -71,6 +81,25 @@ class Importer:
|
|||
|
||||
It is primarily (only?) used when the target side of the
|
||||
import/export uses SQLAlchemy ORM.
|
||||
|
||||
.. attribute:: fields
|
||||
|
||||
This is the official list of "effective" fields to be processed
|
||||
for the current import/export job.
|
||||
|
||||
Code theoretically should not access this directly but instead
|
||||
call :meth:`get_fields()`. However it is often convenient to
|
||||
overwrite this attribute directly, for dynamic fields. If so
|
||||
then ``get_fields()`` will return the new value. And really,
|
||||
it's probably just as safe to read this attribute directly too.
|
||||
|
||||
.. attribute:: excluded_fields
|
||||
|
||||
This attribute will often not exist, but is mentioned here for
|
||||
reference.
|
||||
|
||||
It may be specified via constructor param in which case each
|
||||
field listed therein will be removed from :attr:`fields`.
|
||||
"""
|
||||
|
||||
allow_create = True
|
||||
|
|
@ -155,23 +184,56 @@ class Importer:
|
|||
:meth:`get_target_cache()`.
|
||||
"""
|
||||
|
||||
default_keys = None
|
||||
"""
|
||||
In certain edge cases, the importer class must declare its key
|
||||
list without using :attr:`keys`.
|
||||
|
||||
(As of now this only happens with
|
||||
:class:`~wuttasync.importing.versions.FromWuttaToVersions` which
|
||||
must dynamically create importer classes.)
|
||||
|
||||
If applicable, this value is used as fallback for
|
||||
:meth:`get_keys()`.
|
||||
"""
|
||||
|
||||
max_create = None
|
||||
max_update = None
|
||||
max_delete = None
|
||||
max_total = None
|
||||
|
||||
handler = None
|
||||
model_class = None
|
||||
|
||||
def __init__(self, config, **kwargs):
|
||||
self.config = config
|
||||
self.app = self.config.get_app()
|
||||
|
||||
self.create = kwargs.pop('create',
|
||||
kwargs.pop('allow_create', self.allow_create))
|
||||
self.update = kwargs.pop('update',
|
||||
kwargs.pop('allow_update', self.allow_update))
|
||||
self.delete = kwargs.pop('delete',
|
||||
kwargs.pop('allow_delete', self.allow_delete))
|
||||
self.create = kwargs.pop(
|
||||
"create", kwargs.pop("allow_create", self.allow_create)
|
||||
)
|
||||
self.update = kwargs.pop(
|
||||
"update", kwargs.pop("allow_update", self.allow_update)
|
||||
)
|
||||
self.delete = kwargs.pop(
|
||||
"delete", kwargs.pop("allow_delete", self.allow_delete)
|
||||
)
|
||||
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
self.simple_fields = self.get_simple_fields()
|
||||
self.supported_fields = self.get_supported_fields()
|
||||
self.fields = self.get_fields()
|
||||
|
||||
# fields could be comma-delimited string from cli param
|
||||
if isinstance(self.fields, str):
|
||||
self.fields = self.config.parse_list(self.fields)
|
||||
|
||||
# discard any fields caller asked to exclude
|
||||
excluded = getattr(self, "excluded_fields", None)
|
||||
if excluded:
|
||||
if isinstance(excluded, str):
|
||||
excluded = self.config.parse_list(excluded)
|
||||
self.fields = [f for f in self.fields if f not in excluded]
|
||||
|
||||
@property
|
||||
def orientation(self):
|
||||
"""
|
||||
|
|
@ -203,7 +265,7 @@ class Importer:
|
|||
"""
|
||||
Returns the display title for the target data model.
|
||||
"""
|
||||
if hasattr(self, 'model_title'):
|
||||
if hasattr(self, "model_title"):
|
||||
return self.model_title
|
||||
|
||||
# TODO: this will fail if not using a model class, obviously..
|
||||
|
|
@ -222,10 +284,13 @@ class Importer:
|
|||
|
||||
:returns: Possibly empty list of "simple" field names.
|
||||
"""
|
||||
if hasattr(self, 'simple_fields'):
|
||||
if hasattr(self, "simple_fields"):
|
||||
return self.simple_fields
|
||||
|
||||
fields = get_columns(self.model_class)
|
||||
try:
|
||||
fields = get_columns(self.model_class)
|
||||
except sa.exc.NoInspectionAvailable:
|
||||
return []
|
||||
return list(fields.keys())
|
||||
|
||||
def get_supported_fields(self):
|
||||
|
|
@ -245,7 +310,7 @@ class Importer:
|
|||
|
||||
:returns: List of all "supported" field names.
|
||||
"""
|
||||
if hasattr(self, 'supported_fields'):
|
||||
if hasattr(self, "supported_fields"):
|
||||
return self.supported_fields
|
||||
|
||||
return self.get_simple_fields()
|
||||
|
|
@ -255,6 +320,8 @@ class Importer:
|
|||
This should return the "effective" list of fields which are to
|
||||
be used for the import/export.
|
||||
|
||||
See also :attr:`fields` which is normally what this returns.
|
||||
|
||||
All fields in this list should also be found in the output for
|
||||
:meth:`get_supported_fields()`.
|
||||
|
||||
|
|
@ -262,24 +329,68 @@ class Importer:
|
|||
|
||||
:returns: List of "effective" field names.
|
||||
"""
|
||||
if hasattr(self, 'fields'):
|
||||
if hasattr(self, "fields") and self.fields is not None:
|
||||
return self.fields
|
||||
|
||||
return self.get_supported_fields()
|
||||
|
||||
def get_keys(self):
|
||||
"""
|
||||
Must return the key field(s) for use with import/export.
|
||||
Retrieve the list of key field(s) for use with import/export.
|
||||
The result is cached, so the key list is only calculated once.
|
||||
|
||||
Many importers have just one key field, but we always assume a
|
||||
key *list* - so this often is a list with just one field.
|
||||
|
||||
All fields in this list should also be found in the output for
|
||||
:meth:`get_fields()`.
|
||||
|
||||
Many importers will declare this via :attr:`keys` (or
|
||||
:attr:`key`) static attribute::
|
||||
|
||||
class SprocketImporter(Importer):
|
||||
|
||||
# nb. all these examples work the same
|
||||
|
||||
# 'keys' is the preferred attribute
|
||||
keys = ("sprocket_id",) # <-- the "canonical" way
|
||||
keys = ["sprocket_id"]
|
||||
keys = "sprocket_id"
|
||||
|
||||
# 'key' is not preferred, but works
|
||||
key = ("sprocket_id",)
|
||||
key = "sprocket_id"
|
||||
|
||||
If neither ``keys`` nor ``key`` is set, as a special case
|
||||
:attr:`default_keys` is used if set.
|
||||
|
||||
If no keys were declared, the list is inspected from the model
|
||||
class via
|
||||
:func:`sqlalchemy-utils:sqlalchemy_utils.functions.get_primary_keys()`.
|
||||
|
||||
In any case, the determination is made only once. This method
|
||||
also *sets* :attr:`keys` on the instance, so it will return
|
||||
that as-is for subsequent calls.
|
||||
|
||||
:returns: List of "key" field names.
|
||||
"""
|
||||
if hasattr(self, 'key'):
|
||||
keys = None
|
||||
|
||||
# nb. prefer 'keys' but use 'key' as fallback
|
||||
if "keys" in self.__dict__:
|
||||
keys = self.__dict__["keys"]
|
||||
elif hasattr(self, "keys"):
|
||||
keys = self.keys
|
||||
elif hasattr(self, "key"):
|
||||
keys = self.key
|
||||
else:
|
||||
keys = self.default_keys
|
||||
|
||||
if keys:
|
||||
if isinstance(keys, str):
|
||||
keys = [keys]
|
||||
keys = self.config.parse_list(keys)
|
||||
# nb. save for next time
|
||||
self.__dict__["keys"] = keys
|
||||
return keys
|
||||
|
||||
return list(get_primary_keys(self.model_class))
|
||||
|
|
@ -314,9 +425,26 @@ class Importer:
|
|||
Note that subclass generally should not override this method,
|
||||
but instead some of the others.
|
||||
|
||||
:param source_data: Optional sequence of normalized source
|
||||
data. If not specified, it is obtained from
|
||||
:meth:`normalize_source_data()`.
|
||||
This first calls :meth:`setup()` to prepare things as needed.
|
||||
|
||||
If no source data is specified, it calls
|
||||
:meth:`normalize_source_data()` to get that. Regardless, it
|
||||
also calls :meth:`get_unique_data()` to discard any
|
||||
duplicates.
|
||||
|
||||
If :attr:`caches_target` is set, it calls
|
||||
:meth:`get_target_cache()` and assigns result to
|
||||
:attr:`cached_target`.
|
||||
|
||||
Then depending on values for :attr:`create`, :attr:`update`
|
||||
and :attr:`delete` it may call:
|
||||
|
||||
* :meth:`do_create_update()`
|
||||
* :meth:`do_delete()`
|
||||
|
||||
And finally it calls :meth:`teardown()` for cleanup.
|
||||
|
||||
:param source_data: Sequence of normalized source data, if known.
|
||||
|
||||
:param progress: Optional progress indicator factory.
|
||||
|
||||
|
|
@ -326,25 +454,26 @@ class Importer:
|
|||
* ``created`` - list of records created on the target
|
||||
* ``updated`` - list of records updated on the target
|
||||
* ``deleted`` - list of records deleted on the target
|
||||
|
||||
See also these methods which this one calls:
|
||||
|
||||
* :meth:`setup()`
|
||||
* :meth:`do_create_update()`
|
||||
* :meth:`do_delete()`
|
||||
* :meth:`teardown()`
|
||||
"""
|
||||
# TODO: should add try/catch around this all? and teardown() in finally: clause?
|
||||
self.setup()
|
||||
created = []
|
||||
updated = []
|
||||
deleted = []
|
||||
|
||||
model_title = self.get_model_title()
|
||||
log.debug(
|
||||
"using key fields for %s: %s", model_title, ", ".join(self.get_keys())
|
||||
)
|
||||
|
||||
# get complete set of normalized source data
|
||||
if source_data is None:
|
||||
source_data = self.normalize_source_data(progress=progress)
|
||||
|
||||
# TODO: should exclude duplicate source records
|
||||
# source_data, unique = self.get_unique_data(source_data)
|
||||
# nb. prune duplicate records from source data
|
||||
source_data, source_keys = self.get_unique_data(source_data)
|
||||
|
||||
log.debug("got %s %s records from source", len(source_data), model_title)
|
||||
|
||||
# maybe cache existing target data
|
||||
if self.caches_target:
|
||||
|
|
@ -356,7 +485,14 @@ class Importer:
|
|||
|
||||
# delete target data
|
||||
if self.delete:
|
||||
deleted = self.do_delete(source_data)
|
||||
changes = len(created) + len(updated)
|
||||
if self.max_total and changes >= self.max_total:
|
||||
log.debug(
|
||||
"max of %s total changes already reached; skipping deletions",
|
||||
self.max_total,
|
||||
)
|
||||
else:
|
||||
deleted = self.do_delete(source_keys, changes, progress=progress)
|
||||
|
||||
self.teardown()
|
||||
return created, updated, deleted
|
||||
|
|
@ -394,7 +530,7 @@ class Importer:
|
|||
# cache the set of fields to use for diff checks
|
||||
fields = set(self.get_fields()) - set(self.get_keys())
|
||||
|
||||
def create_update(source_data, i):
|
||||
def create_update(source_data, i): # pylint: disable=unused-argument
|
||||
|
||||
# try to fetch target object per source key
|
||||
key = self.get_record_key(source_data)
|
||||
|
|
@ -407,13 +543,34 @@ class Importer:
|
|||
if diffs:
|
||||
|
||||
# data differs, so update target object
|
||||
log.debug("fields (%s) differed for target data: %s and source data: %s",
|
||||
','.join(diffs), target_data, source_data)
|
||||
target_object = self.update_target_object(target_object,
|
||||
source_data,
|
||||
target_data=target_data)
|
||||
log.debug(
|
||||
"fields (%s) differed for target data: %s and source data: %s",
|
||||
",".join(diffs),
|
||||
target_data,
|
||||
source_data,
|
||||
)
|
||||
target_object = self.update_target_object(
|
||||
target_object, source_data, target_data=target_data
|
||||
)
|
||||
updated.append((target_object, target_data, source_data))
|
||||
|
||||
# stop if we reach max allowed
|
||||
if self.max_update and len(updated) >= self.max_update:
|
||||
log.warning(
|
||||
"max of %s *updated* records has been reached; stopping now",
|
||||
self.max_update,
|
||||
)
|
||||
raise ImportLimitReached()
|
||||
if (
|
||||
self.max_total
|
||||
and (len(created) + len(updated)) >= self.max_total
|
||||
):
|
||||
log.warning(
|
||||
"max of %s *total changes* has been reached; stopping now",
|
||||
self.max_total,
|
||||
)
|
||||
raise ImportLimitReached()
|
||||
|
||||
elif not target_object and self.create:
|
||||
|
||||
# target object not yet present, so create it
|
||||
|
|
@ -427,23 +584,115 @@ class Importer:
|
|||
# 'object': target_object,
|
||||
# 'data': self.normalize_target_object(target_object),
|
||||
# }
|
||||
|
||||
# stop if we reach max allowed
|
||||
if self.max_create and len(created) >= self.max_create:
|
||||
log.warning(
|
||||
"max of %s *created* records has been reached; stopping now",
|
||||
self.max_create,
|
||||
)
|
||||
raise ImportLimitReached()
|
||||
if (
|
||||
self.max_total
|
||||
and (len(created) + len(updated)) >= self.max_total
|
||||
):
|
||||
log.warning(
|
||||
"max of %s *total changes* has been reached; stopping now",
|
||||
self.max_total,
|
||||
)
|
||||
raise ImportLimitReached()
|
||||
|
||||
else:
|
||||
log.debug("did NOT create new %s for key: %s", model_title, key)
|
||||
|
||||
actioning = self.actioning.capitalize()
|
||||
target_title = self.handler.get_target_title()
|
||||
self.app.progress_loop(create_update, all_source_data, progress,
|
||||
message=f"{actioning} {model_title} data to {target_title}")
|
||||
try:
|
||||
self.app.progress_loop(
|
||||
create_update,
|
||||
all_source_data,
|
||||
progress,
|
||||
message=f"{actioning} {model_title} data to {target_title}",
|
||||
)
|
||||
except ImportLimitReached:
|
||||
pass
|
||||
|
||||
return created, updated
|
||||
|
||||
def do_delete(self, source_data, progress=None):
|
||||
def do_delete(self, source_keys, changes=None, progress=None):
|
||||
"""
|
||||
TODO: not yet implemented
|
||||
Delete records from the target side as needed, per the given
|
||||
source data.
|
||||
|
||||
:returns: List of records deleted on the target.
|
||||
This will call :meth:`get_deletable_keys()` to discover which
|
||||
keys existing on the target side could theoretically allow
|
||||
being deleted.
|
||||
|
||||
From that set it will remove all the given source keys - since
|
||||
such keys still exist on the source, they should not be
|
||||
deleted from target.
|
||||
|
||||
If any "deletable" keys remain, their corresponding objects
|
||||
are removed from target via :meth:`delete_target_object()`.
|
||||
|
||||
:param source_keys: A ``set`` of keys for all source records.
|
||||
Essentially this is just the list of keys for which target
|
||||
records should *not* be deleted - since they still exist in
|
||||
the data source.
|
||||
|
||||
:param changes: Number of changes which have already been made
|
||||
on the target side. Used to enforce max allowed changes,
|
||||
if applicable.
|
||||
|
||||
:param progress: Optional progress indicator factory.
|
||||
|
||||
:returns: List of target records which were deleted.
|
||||
"""
|
||||
return []
|
||||
model_title = self.get_model_title()
|
||||
deleted = []
|
||||
changes = changes or 0
|
||||
|
||||
# which target records are deletable? potentially all target
|
||||
# records may be eligible, but anything also found in source
|
||||
# is *not* eligible.
|
||||
deletable = self.get_deletable_keys() - source_keys
|
||||
log.debug("found %s records to delete", len(deletable))
|
||||
|
||||
def delete(key, i): # pylint: disable=unused-argument
|
||||
cached = self.cached_target.pop(key)
|
||||
obj = cached["object"]
|
||||
|
||||
# delete target object
|
||||
log.debug("deleting %s %s: %s", model_title, key, obj)
|
||||
if self.delete_target_object(obj):
|
||||
deleted.append((obj, cached["data"]))
|
||||
|
||||
# stop if we reach max allowed
|
||||
if self.max_delete and len(deleted) >= self.max_delete:
|
||||
log.warning(
|
||||
"max of %s *deleted* records has been reached; stopping now",
|
||||
self.max_delete,
|
||||
)
|
||||
raise ImportLimitReached()
|
||||
if self.max_total and (changes + len(deleted)) >= self.max_total:
|
||||
log.warning(
|
||||
"max of %s *total changes* has been reached; stopping now",
|
||||
self.max_total,
|
||||
)
|
||||
raise ImportLimitReached()
|
||||
|
||||
try:
|
||||
model_title = self.get_model_title()
|
||||
self.app.progress_loop(
|
||||
delete,
|
||||
sorted(deletable),
|
||||
progress,
|
||||
message=f"Deleting {model_title} records",
|
||||
)
|
||||
except ImportLimitReached:
|
||||
pass
|
||||
|
||||
return deleted
|
||||
|
||||
def get_record_key(self, data):
|
||||
"""
|
||||
|
|
@ -522,17 +771,66 @@ class Importer:
|
|||
source_objects = self.get_source_objects()
|
||||
normalized = []
|
||||
|
||||
def normalize(obj, i):
|
||||
def normalize(obj, i): # pylint: disable=unused-argument
|
||||
data = self.normalize_source_object_all(obj)
|
||||
if data:
|
||||
normalized.extend(data)
|
||||
|
||||
model_title = self.get_model_title()
|
||||
source_title = self.handler.get_source_title()
|
||||
self.app.progress_loop(normalize, source_objects, progress,
|
||||
message=f"Reading {model_title} data from {source_title}")
|
||||
self.app.progress_loop(
|
||||
normalize,
|
||||
source_objects,
|
||||
progress,
|
||||
message=f"Reading {model_title} data from {source_title}",
|
||||
)
|
||||
return normalized
|
||||
|
||||
def get_unique_data(self, source_data):
|
||||
"""
|
||||
Return a copy of the given source data, with any duplicate
|
||||
records removed.
|
||||
|
||||
This looks for duplicates based on the effective key fields,
|
||||
cf. :meth:`get_keys()`. The first record found with a given
|
||||
key is kept; subsequent records with that key are discarded.
|
||||
|
||||
This is called from :meth:`process_data()` and is done largely
|
||||
for sanity's sake, to avoid indeterminate behavior when source
|
||||
data contains duplicates. For instance:
|
||||
|
||||
Problem #1: If source contains 2 records with key 'X' it makes
|
||||
no sense to create both records on the target side.
|
||||
|
||||
Problem #2: if the 2 source records have different data (apart
|
||||
from their key) then which should target reflect?
|
||||
|
||||
So the main point of this method is to discard the duplicates
|
||||
to avoid problem #1, but do it in a deterministic way so at
|
||||
least the "choice" of which record is kept will not vary
|
||||
across runs; hence "pseudo-resolve" problem #2.
|
||||
|
||||
:param source_data: Sequence of normalized source data.
|
||||
|
||||
:returns: A 2-tuple of ``(source_data, unique_keys)`` where:
|
||||
|
||||
* ``source_data`` is the final list of source data
|
||||
* ``unique_keys`` is a :class:`python:set` of the source record keys
|
||||
"""
|
||||
unique = OrderedDict()
|
||||
for data in source_data:
|
||||
key = self.get_record_key(data)
|
||||
if key in unique:
|
||||
log.warning(
|
||||
"duplicate %s records detected from %s for key: %s",
|
||||
self.get_model_title(),
|
||||
self.handler.get_source_title(),
|
||||
key,
|
||||
)
|
||||
else:
|
||||
unique[key] = data
|
||||
return list(unique.values()), set(unique)
|
||||
|
||||
def get_source_objects(self):
|
||||
"""
|
||||
This method (if applicable) should return a sequence of "raw"
|
||||
|
|
@ -567,6 +865,7 @@ class Importer:
|
|||
data = self.normalize_source_object(obj)
|
||||
if data:
|
||||
return [data]
|
||||
return None
|
||||
|
||||
def normalize_source_object(self, obj):
|
||||
"""
|
||||
|
|
@ -627,16 +926,21 @@ class Importer:
|
|||
objects = self.get_target_objects(source_data=source_data)
|
||||
cached = {}
|
||||
|
||||
def cache(obj, i):
|
||||
def cache(obj, i): # pylint: disable=unused-argument
|
||||
data = self.normalize_target_object(obj)
|
||||
if data:
|
||||
key = self.get_record_key(data)
|
||||
cached[key] = {'object': obj, 'data': data}
|
||||
cached[key] = {"object": obj, "data": data}
|
||||
|
||||
model_title = self.get_model_title()
|
||||
target_title = self.handler.get_target_title()
|
||||
self.app.progress_loop(cache, objects, progress,
|
||||
message=f"Reading {model_title} data from {target_title}")
|
||||
self.app.progress_loop(
|
||||
cache,
|
||||
objects,
|
||||
progress,
|
||||
message=f"Reading {model_title} data from {target_title}",
|
||||
)
|
||||
log.debug(f"cached %s {model_title} records from target", len(cached))
|
||||
return cached
|
||||
|
||||
def get_target_objects(self, source_data=None, progress=None):
|
||||
|
|
@ -677,7 +981,8 @@ class Importer:
|
|||
"""
|
||||
if self.caches_target and self.cached_target is not None:
|
||||
cached = self.cached_target.get(key)
|
||||
return cached['object'] if cached else None
|
||||
return cached["object"] if cached else None
|
||||
return None
|
||||
|
||||
def normalize_target_object(self, obj):
|
||||
"""
|
||||
|
|
@ -701,12 +1006,46 @@ class Importer:
|
|||
:returns: Dict of normalized data fields, or ``None``.
|
||||
"""
|
||||
fields = self.get_fields()
|
||||
fields = [f for f in self.get_simple_fields()
|
||||
if f in fields]
|
||||
data = dict([(field, getattr(obj, field))
|
||||
for field in fields])
|
||||
fields = [f for f in self.get_simple_fields() if f in fields]
|
||||
data = {field: getattr(obj, field) for field in fields}
|
||||
return data
|
||||
|
||||
def get_deletable_keys(self, progress=None):
|
||||
"""
|
||||
Return a set of record keys from the target side, which are
|
||||
*potentially* eligible for deletion.
|
||||
|
||||
Inclusion in this set does not imply a given record/key
|
||||
*should* be deleted, only that app logic (e.g. business rules)
|
||||
does not prevent it.
|
||||
|
||||
Default logic here will look in the :attr:`cached_target` and
|
||||
then call :meth:`can_delete_object()` for each record in the
|
||||
cache. If that call returns true for a given key, it is
|
||||
included in the result.
|
||||
|
||||
:returns: The ``set`` of target record keys eligible for
|
||||
deletion.
|
||||
"""
|
||||
if not self.caches_target:
|
||||
return set()
|
||||
|
||||
keys = set()
|
||||
|
||||
def check(key, i): # pylint: disable=unused-argument
|
||||
data = self.cached_target[key]["data"]
|
||||
obj = self.cached_target[key]["object"]
|
||||
if self.can_delete_object(obj, data):
|
||||
keys.add(key)
|
||||
|
||||
self.app.progress_loop(
|
||||
check,
|
||||
set(self.cached_target),
|
||||
progress,
|
||||
message="Determining which objects can be deleted",
|
||||
)
|
||||
return keys
|
||||
|
||||
##############################
|
||||
# CRUD methods
|
||||
##############################
|
||||
|
|
@ -722,12 +1061,11 @@ class Importer:
|
|||
|
||||
:returns: New object for the target side, or ``None``.
|
||||
"""
|
||||
if source_data.get('__ignoreme__'):
|
||||
return
|
||||
if source_data.get("__ignoreme__"):
|
||||
return None
|
||||
|
||||
obj = self.make_empty_object(key)
|
||||
if obj:
|
||||
return self.update_target_object(obj, source_data)
|
||||
return self.update_target_object(obj, source_data)
|
||||
|
||||
def make_empty_object(self, key):
|
||||
"""
|
||||
|
|
@ -756,7 +1094,9 @@ class Importer:
|
|||
|
||||
Default logic will make a new instance of :attr:`model_class`.
|
||||
"""
|
||||
return self.model_class()
|
||||
if callable(self.model_class):
|
||||
return self.model_class() # pylint: disable=not-callable
|
||||
raise AttributeError("model_class is not callable!")
|
||||
|
||||
def update_target_object(self, obj, source_data, target_data=None):
|
||||
"""
|
||||
|
|
@ -795,23 +1135,59 @@ class Importer:
|
|||
# object key(s) should already be populated
|
||||
continue
|
||||
|
||||
# elif field not in source_data:
|
||||
# if field not in source_data:
|
||||
# # no source data for field
|
||||
# continue
|
||||
|
||||
elif field in fields:
|
||||
if field in fields:
|
||||
|
||||
# field is eligible for update generally, so compare
|
||||
# values between records
|
||||
if (not target_data
|
||||
if (
|
||||
not target_data
|
||||
or field not in target_data
|
||||
or target_data[field] != source_data[field]):
|
||||
or target_data[field] != source_data[field]
|
||||
):
|
||||
|
||||
# data mismatch; update field for target object
|
||||
setattr(obj, field, source_data[field])
|
||||
|
||||
return obj
|
||||
|
||||
def can_delete_object(self, obj, data=None): # pylint: disable=unused-argument
|
||||
"""
|
||||
Should return true or false indicating whether the given
|
||||
object "can" be deleted. Default is to return true in all
|
||||
cases.
|
||||
|
||||
If you return false then the importer will know not to call
|
||||
:meth:`delete_target_object()` even if the data sets imply
|
||||
that it should.
|
||||
|
||||
:param obj: Raw object on the target side.
|
||||
|
||||
:param data: Normalized data dict for the target record, if
|
||||
known.
|
||||
|
||||
:returns: ``True`` if object can be deleted, else ``False``.
|
||||
"""
|
||||
return True
|
||||
|
||||
def delete_target_object(self, obj): # pylint: disable=unused-argument
|
||||
"""
|
||||
Delete the given raw object from the target side, and return
|
||||
true if successful.
|
||||
|
||||
This is called from :meth:`do_delete()`.
|
||||
|
||||
Default logic for this method just returns false; subclass
|
||||
should override if needed.
|
||||
|
||||
:returns: Should return ``True`` if deletion succeeds, or
|
||||
``False`` if deletion failed or was skipped.
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
class FromFile(Importer):
|
||||
"""
|
||||
|
|
@ -861,6 +1237,8 @@ class FromFile(Importer):
|
|||
:meth:`close_input_file()`.
|
||||
"""
|
||||
|
||||
input_file = None
|
||||
|
||||
def setup(self):
|
||||
"""
|
||||
Open the input file. See also :meth:`open_input_file()`.
|
||||
|
|
@ -884,7 +1262,7 @@ class FromFile(Importer):
|
|||
|
||||
:returns: Path to input file.
|
||||
"""
|
||||
if hasattr(self, 'input_file_path'):
|
||||
if hasattr(self, "input_file_path"):
|
||||
return self.input_file_path
|
||||
|
||||
folder = self.get_input_file_dir()
|
||||
|
|
@ -900,7 +1278,7 @@ class FromFile(Importer):
|
|||
|
||||
:returns: Path to folder with input file(s).
|
||||
"""
|
||||
if hasattr(self, 'input_file_dir'):
|
||||
if hasattr(self, "input_file_dir"):
|
||||
return self.input_file_dir
|
||||
|
||||
raise NotImplementedError("can't guess path to input file(s) folder")
|
||||
|
|
@ -914,7 +1292,7 @@ class FromFile(Importer):
|
|||
|
||||
:returns: Input filename, sans folder path.
|
||||
"""
|
||||
if hasattr(self, 'input_file_name'):
|
||||
if hasattr(self, "input_file_name"):
|
||||
return self.input_file_name
|
||||
|
||||
raise NotImplementedError("can't guess input filename")
|
||||
|
|
@ -945,23 +1323,160 @@ class FromFile(Importer):
|
|||
self.input_file.close()
|
||||
|
||||
|
||||
class QueryWrapper:
|
||||
"""
|
||||
Simple wrapper for a SQLAlchemy query, to make it sort of behave
|
||||
so that an importer can treat it as a data record list.
|
||||
|
||||
:param query: :class:`~sqlalchemy:sqlalchemy.orm.Query` instance
|
||||
"""
|
||||
|
||||
def __init__(self, query):
|
||||
self.query = query
|
||||
|
||||
def __len__(self):
|
||||
try:
|
||||
return len(self.query)
|
||||
except TypeError:
|
||||
return self.query.count()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.query)
|
||||
|
||||
|
||||
class FromSqlalchemy(Importer): # pylint: disable=abstract-method
|
||||
"""
|
||||
Base class for importer/exporter using SQL/ORM query as data
|
||||
source.
|
||||
|
||||
Subclass should define :attr:`source_model_class` in which case
|
||||
the source query is automatic. And/or override
|
||||
:meth:`get_source_query()` to customize.
|
||||
|
||||
See also :class:`FromSqlalchemyMirror` and :class:`ToSqlalchemy`.
|
||||
"""
|
||||
|
||||
source_model_class = None
|
||||
"""
|
||||
Reference to the :term:`data model` class representing the source.
|
||||
|
||||
This normally is a SQLAlchemy mapped class, e.g.
|
||||
:class:`~wuttjamaican:wuttjamaican.db.model.base.Person` for
|
||||
exporting from the Wutta People table.
|
||||
"""
|
||||
|
||||
source_session = None
|
||||
"""
|
||||
Reference to the open :term:`db session` for the data source.
|
||||
|
||||
The importer must be given this reference when instantiated by the
|
||||
:term:`import handler`. This is handled automatically if using
|
||||
:class:`~wuttasync.importing.handlers.FromSqlalchemyHandler`.
|
||||
"""
|
||||
|
||||
def get_source_objects(self):
|
||||
"""
|
||||
This method is responsible for fetching "raw" (non-normalized)
|
||||
records from data source.
|
||||
|
||||
(See also the parent method docs for
|
||||
:meth:`~wuttasync.importing.base.Importer.get_source_objects()`.)
|
||||
|
||||
It calls :meth:`get_source_query()` and then wraps that in a
|
||||
:class:`QueryWrapper`, which is then returned.
|
||||
|
||||
Note that this method does not technically "retrieve" records
|
||||
from the query; that happens automatically later.
|
||||
|
||||
:returns: :class:`QueryWrapper` for the source query
|
||||
"""
|
||||
query = self.get_source_query()
|
||||
return QueryWrapper(query)
|
||||
|
||||
def get_source_query(self):
|
||||
"""
|
||||
This returns the SQL/ORM query used to fetch source
|
||||
data. It is called from :meth:`get_source_objects()`.
|
||||
|
||||
Default logic just makes a simple ``SELECT * FROM TABLE`` kind
|
||||
of query. Subclass can override as needed.
|
||||
|
||||
:returns: :class:`~sqlalchemy:sqlalchemy.orm.Query` instance
|
||||
"""
|
||||
return self.source_session.query(self.source_model_class)
|
||||
|
||||
|
||||
class FromSqlalchemyMirror(FromSqlalchemy): # pylint: disable=abstract-method
|
||||
"""
|
||||
Special base class for when the source and target are effectively
|
||||
mirrored, and can each be represented by the same :term:`data
|
||||
model`.
|
||||
|
||||
The assumption is that SQLAlchemy ORM is used on both sides, even
|
||||
though this base class only defines the source side (it inherits
|
||||
from :class:`FromSqlalchemy`).
|
||||
|
||||
There are two main use cases for this:
|
||||
|
||||
* sync between app nodes
|
||||
* sync version tables
|
||||
|
||||
When 2 app nodes are synced, the source and target are "the same"
|
||||
in a schema sense, e.g. ``sprockets on node 01 => sprockets on
|
||||
node 02``.
|
||||
|
||||
When version tables are synced, the same schema can be used for
|
||||
the "live" table and the "version" table, e.g. ``sprockets =>
|
||||
sprocket versions``.
|
||||
"""
|
||||
|
||||
@property
|
||||
def source_model_class(self):
|
||||
"""
|
||||
This returns the :attr:`~Importer.model_class` since source
|
||||
and target must share common schema.
|
||||
"""
|
||||
return self.model_class
|
||||
|
||||
def normalize_source_object(self, obj):
|
||||
"""
|
||||
Since source/target share schema, there should be no tricky
|
||||
normalization involved.
|
||||
|
||||
This calls :meth:`~Importer.normalize_target_object()` since
|
||||
that logic should already be defined. This ensures the same
|
||||
normalization is used on both sides.
|
||||
"""
|
||||
return self.normalize_target_object(obj)
|
||||
|
||||
|
||||
class FromWutta(FromSqlalchemy): # pylint: disable=abstract-method
|
||||
"""
|
||||
Base class for data importer/exporter which uses the Wutta ORM
|
||||
(:term:`app database`) as data source.
|
||||
"""
|
||||
|
||||
|
||||
class ToSqlalchemy(Importer):
|
||||
"""
|
||||
Base class for importer/exporter which uses SQLAlchemy ORM on the
|
||||
target side.
|
||||
|
||||
See also :class:`FromSqlalchemy`.
|
||||
"""
|
||||
|
||||
caches_target = True
|
||||
"" # nb. suppress sphinx docs
|
||||
"" # nb. suppress sphinx docs
|
||||
|
||||
target_session = None
|
||||
|
||||
def get_target_object(self, key):
|
||||
"""
|
||||
Tries to fetch the object from target DB using ORM query.
|
||||
"""
|
||||
# first the default logic in case target object is cached
|
||||
obj = super().get_target_object(key)
|
||||
if obj:
|
||||
return obj
|
||||
# use default logic to fetch from cache, if applicable
|
||||
if self.caches_target:
|
||||
return super().get_target_object(key)
|
||||
|
||||
# okay now we must fetch via query
|
||||
query = self.target_session.query(self.model_class)
|
||||
|
|
@ -970,16 +1485,7 @@ class ToSqlalchemy(Importer):
|
|||
try:
|
||||
return query.one()
|
||||
except orm.exc.NoResultFound:
|
||||
pass
|
||||
|
||||
def create_target_object(self, key, source_data):
|
||||
""" """
|
||||
with self.target_session.no_autoflush:
|
||||
obj = super().create_target_object(key, source_data)
|
||||
if obj:
|
||||
# nb. add new object to target db session
|
||||
self.target_session.add(obj)
|
||||
return obj
|
||||
return None
|
||||
|
||||
def get_target_objects(self, source_data=None, progress=None):
|
||||
"""
|
||||
|
|
@ -989,10 +1495,27 @@ class ToSqlalchemy(Importer):
|
|||
query = self.get_target_query(source_data=source_data)
|
||||
return query.all()
|
||||
|
||||
def get_target_query(self, source_data=None):
|
||||
def get_target_query(self, source_data=None): # pylint: disable=unused-argument
|
||||
"""
|
||||
Returns an ORM query suitable to fetch existing objects from
|
||||
the target side. This is called from
|
||||
:meth:`get_target_objects()`.
|
||||
|
||||
:returns: :class:`~sqlalchemy:sqlalchemy.orm.Query` instance
|
||||
"""
|
||||
return self.target_session.query(self.model_class)
|
||||
|
||||
def create_target_object(self, key, source_data): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
with self.target_session.no_autoflush:
|
||||
obj = super().create_target_object(key, source_data)
|
||||
if obj:
|
||||
# nb. add new object to target db session
|
||||
self.target_session.add(obj)
|
||||
return obj
|
||||
return None
|
||||
|
||||
def delete_target_object(self, obj): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
self.target_session.delete(obj)
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta framework for data import/export and real-time sync
|
||||
# Copyright © 2024 Lance Edgar
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2025 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
|
|
@ -25,19 +25,27 @@ Importing from CSV
|
|||
"""
|
||||
|
||||
import csv
|
||||
import datetime
|
||||
import decimal
|
||||
import logging
|
||||
import uuid as _uuid
|
||||
from collections import OrderedDict
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy_utils.functions import get_primary_keys
|
||||
|
||||
from wuttjamaican.db.util import make_topo_sortkey
|
||||
from wuttjamaican.db.util import make_topo_sortkey, UUID
|
||||
from wuttjamaican.util import parse_bool
|
||||
|
||||
from .base import FromFile
|
||||
from .handlers import FromFileHandler
|
||||
from .wutta import ToWuttaHandler
|
||||
from .handlers import FromFileHandler, ToWuttaHandler
|
||||
from .model import ToWutta
|
||||
|
||||
|
||||
class FromCsv(FromFile):
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FromCsv(FromFile): # pylint: disable=abstract-method
|
||||
"""
|
||||
Base class for importer/exporter using CSV file as data source.
|
||||
|
||||
|
|
@ -56,7 +64,9 @@ class FromCsv(FromFile):
|
|||
:class:`python:csv.DictReader` instance.
|
||||
"""
|
||||
|
||||
csv_encoding = 'utf_8'
|
||||
input_reader = None
|
||||
|
||||
csv_encoding = "utf_8"
|
||||
"""
|
||||
Encoding used by the CSV input file.
|
||||
|
||||
|
|
@ -73,11 +83,11 @@ class FromCsv(FromFile):
|
|||
:meth:`~wuttasync.importing.base.Importer.get_model_title()`
|
||||
to obtain the model name.
|
||||
"""
|
||||
if hasattr(self, 'input_file_name'):
|
||||
if hasattr(self, "input_file_name"):
|
||||
return self.input_file_name
|
||||
|
||||
model_title = self.get_model_title()
|
||||
return f'{model_title}.csv'
|
||||
return f"{model_title}.csv"
|
||||
|
||||
def open_input_file(self):
|
||||
"""
|
||||
|
|
@ -86,12 +96,36 @@ class FromCsv(FromFile):
|
|||
This tracks the file handle via
|
||||
:attr:`~wuttasync.importing.base.FromFile.input_file` and the
|
||||
CSV reader via :attr:`input_reader`.
|
||||
|
||||
It also updates the effective
|
||||
:attr:`~wuttasync.importing.base.Importer.fields` list per the
|
||||
following logic:
|
||||
|
||||
First get the current effective field list, e.g. as defined by
|
||||
the class and/or from caller params. Then read the column
|
||||
header list from CSV file, and discard any which are not found
|
||||
in the first list. The result becomes the new effective field
|
||||
list.
|
||||
"""
|
||||
path = self.get_input_file_path()
|
||||
self.input_file = open(path, 'rt', encoding=self.csv_encoding)
|
||||
log.debug("opening input file: %s", path)
|
||||
self.input_file = open( # pylint: disable=consider-using-with
|
||||
path, "rt", encoding=self.csv_encoding
|
||||
)
|
||||
self.input_reader = csv.DictReader(self.input_file)
|
||||
|
||||
def close_input_file(self):
|
||||
# nb. importer may have all supported fields by default, so
|
||||
# must prune to the subset also present in the input file
|
||||
fields = self.get_fields()
|
||||
orientation = self.orientation.value
|
||||
log.debug(f"supported fields for {orientation}: %s", fields)
|
||||
self.fields = [f for f in self.input_reader.fieldnames or [] if f in fields]
|
||||
log.debug("fields present in source data: %s", self.fields)
|
||||
if not self.fields:
|
||||
self.input_file.close()
|
||||
raise ValueError("input file has no recognized fields")
|
||||
|
||||
def close_input_file(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
self.input_file.close()
|
||||
del self.input_reader
|
||||
|
|
@ -109,11 +143,69 @@ class FromCsv(FromFile):
|
|||
return list(self.input_reader)
|
||||
|
||||
|
||||
class FromCsvToSqlalchemyMixin:
|
||||
class FromCsvToSqlalchemyMixin: # pylint: disable=too-few-public-methods
|
||||
"""
|
||||
Mixin handler class for CSV → SQLAlchemy ORM import/export.
|
||||
Mixin class for CSV → SQLAlchemy ORM :term:`importers <importer>`.
|
||||
|
||||
Such importers are generated automatically by
|
||||
:class:`FromCsvToSqlalchemyHandlerMixin`, so you won't typically
|
||||
reference this mixin class directly.
|
||||
|
||||
This mixin adds data type coercion for each field value read from
|
||||
the CSV file; see :meth:`normalize_source_object()`.
|
||||
|
||||
.. attribute:: coercers
|
||||
|
||||
Dict of coercer functions, keyed by field name. This is an
|
||||
empty dict by default; however typical usage does not require
|
||||
you to set it, as it's auto-provided from
|
||||
:func:`make_coercers()`.
|
||||
|
||||
Each coercer function should accept a single value, and return
|
||||
the coerced value, e.g.::
|
||||
|
||||
def coerce_int(val):
|
||||
return int(val)
|
||||
"""
|
||||
source_key = 'csv'
|
||||
|
||||
coercers = {}
|
||||
|
||||
def normalize_source_object(self, obj):
|
||||
"""
|
||||
Normalize a source record from CSV input file. See also the
|
||||
parent docs for
|
||||
:meth:`wuttasync.importing.base.Importer.normalize_source_object()`.
|
||||
|
||||
This will invoke the appropriate coercer function for each
|
||||
field, according to :attr:`coercers`.
|
||||
|
||||
:param obj: Raw data record (dict) from CSV reader.
|
||||
|
||||
:returns: Final data dict for the record.
|
||||
"""
|
||||
data = {}
|
||||
for field in self.fields:
|
||||
value = obj[field]
|
||||
if field in self.coercers:
|
||||
value = self.coercers[field](value)
|
||||
data[field] = value
|
||||
return data
|
||||
|
||||
|
||||
class FromCsvToSqlalchemyHandlerMixin:
|
||||
"""
|
||||
Mixin class for CSV → SQLAlchemy ORM :term:`import handlers
|
||||
<import handler>`.
|
||||
|
||||
This knows how to dynamically generate :term:`importer` classes to
|
||||
target the particular ORM involved. Such classes will inherit
|
||||
from :class:`FromCsvToSqlalchemyMixin`, in addition to whatever
|
||||
:attr:`FromImporterBase` and :attr:`ToImporterBase` reference.
|
||||
|
||||
This all happens within :meth:`define_importers()`.
|
||||
"""
|
||||
|
||||
source_key = "csv"
|
||||
generic_source_title = "CSV"
|
||||
|
||||
FromImporterBase = FromCsv
|
||||
|
|
@ -146,6 +238,8 @@ class FromCsvToSqlalchemyMixin:
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# TODO: pylint (correctly) flags this as duplicate code, matching
|
||||
# on the wuttasync.importing.versions/wutta module - should fix?
|
||||
def define_importers(self):
|
||||
"""
|
||||
This mixin overrides typical (manual) importer definition, and
|
||||
|
|
@ -159,48 +253,226 @@ class FromCsvToSqlalchemyMixin:
|
|||
importers = {}
|
||||
model = self.get_target_model()
|
||||
|
||||
# pylint: disable=duplicate-code
|
||||
# mostly try to make an importer for every data model
|
||||
for name in dir(model):
|
||||
cls = getattr(model, name)
|
||||
if isinstance(cls, type) and issubclass(cls, model.Base) and cls is not model.Base:
|
||||
if (
|
||||
isinstance(cls, type)
|
||||
and issubclass(cls, model.Base)
|
||||
and cls is not model.Base
|
||||
):
|
||||
importers[name] = self.make_importer_factory(cls, name)
|
||||
|
||||
# sort importers according to schema topography
|
||||
topo_sortkey = make_topo_sortkey(model)
|
||||
importers = OrderedDict([
|
||||
(name, importers[name])
|
||||
for name in sorted(importers, key=topo_sortkey)
|
||||
])
|
||||
importers = OrderedDict(
|
||||
[(name, importers[name]) for name in sorted(importers, key=topo_sortkey)]
|
||||
)
|
||||
|
||||
return importers
|
||||
|
||||
def make_importer_factory(self, cls, name):
|
||||
def make_importer_factory(self, model_class, name):
|
||||
"""
|
||||
Generate and return a new importer/exporter class, targeting
|
||||
the given data model class.
|
||||
Generate and return a new :term:`importer` class, targeting
|
||||
the given :term:`data model` class.
|
||||
|
||||
:param cls: A data model class.
|
||||
The newly-created class will inherit from:
|
||||
|
||||
:param name: Optional "model name" override for the
|
||||
importer/exporter.
|
||||
* :class:`FromCsvToSqlalchemyMixin`
|
||||
* :attr:`FromImporterBase`
|
||||
* :attr:`ToImporterBase`
|
||||
|
||||
:returns: A new class, meant to process import/export
|
||||
operations which target the given data model. The new
|
||||
class will inherit from both :attr:`FromImporterBase` and
|
||||
:attr:`ToImporterBase`.
|
||||
And :attr:`~FromCsvToSqlalchemyMixin.coercers` will be set on
|
||||
the class, to the result of :func:`make_coercers()`.
|
||||
|
||||
:param model_class: A data model class.
|
||||
|
||||
:param name: The "model name" for the importer/exporter. New
|
||||
class name will be based on this, so e.g. ``Widget`` model
|
||||
name becomes ``WidgetImporter`` class name.
|
||||
|
||||
:returns: The new class, meant to process import/export
|
||||
targeting the given data model.
|
||||
"""
|
||||
return type(f'{name}Importer', (FromCsv, self.ToImporterBase), {
|
||||
'model_class': cls,
|
||||
'key': list(get_primary_keys(cls)),
|
||||
})
|
||||
return type(
|
||||
f"{name}Importer",
|
||||
(FromCsvToSqlalchemyMixin, self.FromImporterBase, self.ToImporterBase),
|
||||
{
|
||||
"model_class": model_class,
|
||||
"key": list(get_primary_keys(model_class)),
|
||||
"coercers": make_coercers(model_class),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class FromCsvToWutta(FromCsvToSqlalchemyMixin, ToWuttaHandler):
|
||||
class FromCsvToWutta(FromCsvToSqlalchemyHandlerMixin, FromFileHandler, ToWuttaHandler):
|
||||
"""
|
||||
Handler for CSV → Wutta :term:`app database` import.
|
||||
|
||||
This uses :class:`FromCsvToSqlalchemyHandlerMixin` for most of the
|
||||
heavy lifting.
|
||||
"""
|
||||
|
||||
ToImporterBase = ToWutta
|
||||
|
||||
def get_target_model(self):
|
||||
def get_target_model(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
return self.app.model
|
||||
|
||||
|
||||
##############################
|
||||
# coercion utilities
|
||||
##############################
|
||||
|
||||
|
||||
def make_coercers(model_class):
|
||||
"""
|
||||
Returns a dict of coercer functions for use by
|
||||
:meth:`~FromCsvToSqlalchemyMixin.normalize_source_object()`.
|
||||
|
||||
This is called automatically by
|
||||
:meth:`~FromCsvToSqlalchemyHandlerMixin.make_importer_factory()`,
|
||||
in which case the result is assigned to
|
||||
:attr:`~FromCsvToSqlalchemyMixin.coercers` on the importer class.
|
||||
|
||||
It will iterate over all mapped fields, and call
|
||||
:func:`make_coercer()` for each.
|
||||
|
||||
:param model_class: SQLAlchemy mapped class, e.g.
|
||||
:class:`wuttjamaican:wuttjamaican.db.model.base.Person`.
|
||||
|
||||
:returns: Dict of coercer functions, keyed by field name.
|
||||
"""
|
||||
mapper = sa.inspect(model_class)
|
||||
fields = list(mapper.columns.keys())
|
||||
|
||||
coercers = {}
|
||||
for field in fields:
|
||||
attr = getattr(model_class, field)
|
||||
coercers[field] = make_coercer(attr)
|
||||
|
||||
return coercers
|
||||
|
||||
|
||||
def make_coercer(attr): # pylint: disable=too-many-return-statements
|
||||
"""
|
||||
Returns a coercer function suitable for use by
|
||||
:meth:`~FromCsvToSqlalchemyMixin.normalize_source_object()`.
|
||||
|
||||
This is typically called from :func:`make_coercers()`. The
|
||||
resulting function will coerce values to the data type defined by
|
||||
the given attribute, e.g.::
|
||||
|
||||
def coerce_int(val):
|
||||
return int(val)
|
||||
|
||||
:param attr: SQLAlchemy mapped attribute, e.g.
|
||||
:attr:`wuttjamaican:wuttjamaican.db.model.upgrades.Upgrade.exit_code`.
|
||||
|
||||
:returns: Coercer function based on mapped attribute data type.
|
||||
"""
|
||||
assert len(attr.prop.columns) == 1
|
||||
column = attr.prop.columns[0]
|
||||
|
||||
# UUID
|
||||
if isinstance(attr.type, UUID):
|
||||
return coerce_uuid
|
||||
|
||||
# Boolean
|
||||
if isinstance(attr.type, sa.Boolean):
|
||||
if column.nullable:
|
||||
return coerce_boolean_nullable
|
||||
return coerce_boolean
|
||||
|
||||
# DateTime
|
||||
if isinstance(attr.type, sa.DateTime) or (
|
||||
hasattr(attr.type, "impl") and isinstance(attr.type.impl, sa.DateTime)
|
||||
):
|
||||
return coerce_datetime
|
||||
|
||||
# Date
|
||||
if isinstance(attr.type, sa.Date):
|
||||
return coerce_date
|
||||
|
||||
# Float
|
||||
# nb. check this before decimal, since Numeric inherits from Float
|
||||
if isinstance(attr.type, sa.Float):
|
||||
return coerce_float
|
||||
|
||||
# Decimal
|
||||
if isinstance(attr.type, sa.Numeric):
|
||||
return coerce_decimal
|
||||
|
||||
# Integer
|
||||
if isinstance(attr.type, sa.Integer):
|
||||
return coerce_integer
|
||||
|
||||
# String
|
||||
if isinstance(attr.type, sa.String):
|
||||
if column.nullable:
|
||||
return coerce_string_nullable
|
||||
|
||||
# do not coerce
|
||||
return coerce_noop
|
||||
|
||||
|
||||
def coerce_boolean(value): # pylint: disable=missing-function-docstring
|
||||
return parse_bool(value)
|
||||
|
||||
|
||||
def coerce_boolean_nullable(value): # pylint: disable=missing-function-docstring
|
||||
if value == "":
|
||||
return None
|
||||
return coerce_boolean(value)
|
||||
|
||||
|
||||
def coerce_date(value): # pylint: disable=missing-function-docstring
|
||||
if value == "":
|
||||
return None
|
||||
|
||||
return datetime.datetime.strptime(value, "%Y-%m-%d").date()
|
||||
|
||||
|
||||
def coerce_datetime(value): # pylint: disable=missing-function-docstring
|
||||
if value == "":
|
||||
return None
|
||||
|
||||
try:
|
||||
return datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
|
||||
except ValueError:
|
||||
return datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S.%f")
|
||||
|
||||
|
||||
def coerce_decimal(value): # pylint: disable=missing-function-docstring
|
||||
if value == "":
|
||||
return None
|
||||
return decimal.Decimal(value)
|
||||
|
||||
|
||||
def coerce_float(value): # pylint: disable=missing-function-docstring
|
||||
if value == "":
|
||||
return None
|
||||
return float(value)
|
||||
|
||||
|
||||
def coerce_integer(value): # pylint: disable=missing-function-docstring
|
||||
if value == "":
|
||||
return None
|
||||
return int(value)
|
||||
|
||||
|
||||
def coerce_noop(value): # pylint: disable=missing-function-docstring
|
||||
return value
|
||||
|
||||
|
||||
def coerce_string_nullable(value): # pylint: disable=missing-function-docstring
|
||||
if value == "":
|
||||
return None
|
||||
return value
|
||||
|
||||
|
||||
def coerce_uuid(value): # pylint: disable=missing-function-docstring
|
||||
if value == "":
|
||||
return None
|
||||
return _uuid.UUID(value)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta framework for data import/export and real-time sync
|
||||
# Copyright © 2024 Lance Edgar
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
|
|
@ -23,12 +23,18 @@
|
|||
"""
|
||||
Data Import / Export Handlers
|
||||
"""
|
||||
# pylint: disable=too-many-lines
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from enum import Enum
|
||||
|
||||
import humanize
|
||||
|
||||
from wuttjamaican.app import GenericHandler
|
||||
from wuttjamaican.diffs import Diff
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -38,11 +44,14 @@ class Orientation(Enum):
|
|||
"""
|
||||
Enum values for :attr:`ImportHandler.orientation`.
|
||||
"""
|
||||
IMPORT = 'import'
|
||||
EXPORT = 'export'
|
||||
|
||||
IMPORT = "import"
|
||||
EXPORT = "export"
|
||||
|
||||
|
||||
class ImportHandler(GenericHandler):
|
||||
class ImportHandler( # pylint: disable=too-many-public-methods,too-many-instance-attributes
|
||||
GenericHandler
|
||||
):
|
||||
"""
|
||||
Base class for all import/export handlers.
|
||||
|
||||
|
|
@ -119,6 +128,59 @@ class ImportHandler(GenericHandler):
|
|||
:meth:`commit_transaction()`.
|
||||
"""
|
||||
|
||||
process_started = None
|
||||
|
||||
warnings = False
|
||||
"""
|
||||
Boolean indicating the import/export should run in "warnings"
|
||||
mode.
|
||||
|
||||
If set, this declares that no changes are expected for the
|
||||
import/export job. If any changes do occur with this flag set, a
|
||||
diff warning email is sent within :meth:`process_changes()`.
|
||||
|
||||
See also :attr:`warnings_recipients`,
|
||||
:attr:`warnings_max_diffs` and :attr:`warnings_email_key`.
|
||||
"""
|
||||
|
||||
warnings_email_key = None
|
||||
"""
|
||||
Explicit :term:`email key` for sending the diff warning email,
|
||||
*unique to this import/export type*.
|
||||
|
||||
Handlers do not normally set this, so the email key is determined
|
||||
automatically within :meth:`get_warnings_email_key()`.
|
||||
|
||||
See also :attr:`warnings`.
|
||||
"""
|
||||
|
||||
warnings_recipients = None
|
||||
"""
|
||||
Explicit recipient list for the warning email. If not set, the
|
||||
recipients are determined automatically via config.
|
||||
|
||||
See also :attr:`warnings`.
|
||||
"""
|
||||
|
||||
warnings_max_diffs = 15
|
||||
"""
|
||||
Max number of record diffs (per model) to show in the warning email.
|
||||
|
||||
See also :attr:`warnings`.
|
||||
"""
|
||||
|
||||
runas_username = None
|
||||
"""
|
||||
Username responsible for running the import/export job. This is
|
||||
mostly used for Continuum versioning.
|
||||
"""
|
||||
|
||||
transaction_comment = None
|
||||
"""
|
||||
Optional comment to apply to the transaction, where applicable.
|
||||
This is mostly used for Continuum versioning.
|
||||
"""
|
||||
|
||||
importers = None
|
||||
"""
|
||||
This should be a dict of all importer/exporter classes available
|
||||
|
|
@ -141,13 +203,31 @@ class ImportHandler(GenericHandler):
|
|||
|
||||
def __init__(self, config, **kwargs):
|
||||
""" """
|
||||
super().__init__(config, **kwargs)
|
||||
super().__init__(config)
|
||||
|
||||
# callers can set any attrs they want
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
self.importers = self.define_importers()
|
||||
|
||||
def __str__(self):
|
||||
""" """
|
||||
return self.get_title()
|
||||
|
||||
@property
|
||||
def actioner(self):
|
||||
"""
|
||||
Convenience property which effectively returns the
|
||||
:attr:`orientation` as a noun - i.e. one of:
|
||||
|
||||
* ``'importer'``
|
||||
* ``'exporter'``
|
||||
|
||||
See also :attr:`actioning`.
|
||||
"""
|
||||
return f"{self.orientation.value}er"
|
||||
|
||||
@property
|
||||
def actioning(self):
|
||||
"""
|
||||
|
|
@ -156,24 +236,29 @@ class ImportHandler(GenericHandler):
|
|||
|
||||
* ``'importing'``
|
||||
* ``'exporting'``
|
||||
|
||||
See also :attr:`actioner`.
|
||||
"""
|
||||
return f'{self.orientation.value}ing'
|
||||
return f"{self.orientation.value}ing"
|
||||
|
||||
@classmethod
|
||||
def get_key(cls):
|
||||
"""
|
||||
Returns the "full key" for the handler. This is a combination
|
||||
of :attr:`source_key` and :attr:`target_key` and
|
||||
:attr:`orientation`.
|
||||
Returns the :term:`import/export key` for the handler. This
|
||||
is a combination of :attr:`source_key` and :attr:`target_key`
|
||||
and :attr:`orientation`.
|
||||
|
||||
For instance in the case of CSV → Wutta, the full handler key
|
||||
is ``to_wutta.from_csv.import``.
|
||||
For instance in the case of Wutta → CSV export, the key is:
|
||||
``export.to_csv.from_wutta``
|
||||
|
||||
Note that more than one handler may return the same full key
|
||||
here; but only one will be configured as the "default" handler
|
||||
for that key. See also :meth:`get_spec()`.
|
||||
Note that more than one handler may use the same key; but only
|
||||
one will be configured as the "designated" handler for that
|
||||
key, a la
|
||||
:meth:`~wuttasync.app.WuttaSyncAppProvider.get_import_handler()`.
|
||||
|
||||
See also :meth:`get_spec()`.
|
||||
"""
|
||||
return f'to_{cls.target_key}.from_{cls.source_key}.{cls.orientation.value}'
|
||||
return f"{cls.orientation.value}.to_{cls.target_key}.from_{cls.source_key}"
|
||||
|
||||
@classmethod
|
||||
def get_spec(cls):
|
||||
|
|
@ -187,7 +272,7 @@ class ImportHandler(GenericHandler):
|
|||
|
||||
See also :meth:`get_key()`.
|
||||
"""
|
||||
return f'{cls.__module__}:{cls.__name__}'
|
||||
return f"{cls.__module__}:{cls.__name__}"
|
||||
|
||||
def get_title(self):
|
||||
"""
|
||||
|
|
@ -207,11 +292,25 @@ class ImportHandler(GenericHandler):
|
|||
"""
|
||||
Returns the display title for the data source.
|
||||
|
||||
By default this returns :attr:`source_key`, but this can be
|
||||
overriden by class attribute.
|
||||
|
||||
Base class can define ``generic_source_title`` to provide a
|
||||
new default::
|
||||
|
||||
class FromExcelHandler(ImportHandler):
|
||||
generic_source_title = "Excel File"
|
||||
|
||||
Subclass can define ``source_title`` to be explicit::
|
||||
|
||||
class FromExcelToWutta(FromExcelHandler, ToWuttaHandler):
|
||||
source_title = "My Spreadsheet"
|
||||
|
||||
See also :meth:`get_title()` and :meth:`get_target_title()`.
|
||||
"""
|
||||
if hasattr(self, 'source_title'):
|
||||
if hasattr(self, "source_title"):
|
||||
return self.source_title
|
||||
if hasattr(self, 'generic_source_title'):
|
||||
if hasattr(self, "generic_source_title"):
|
||||
return self.generic_source_title
|
||||
return self.source_key
|
||||
|
||||
|
|
@ -219,11 +318,25 @@ class ImportHandler(GenericHandler):
|
|||
"""
|
||||
Returns the display title for the data target.
|
||||
|
||||
By default this returns :attr:`target_key`, but this can be
|
||||
overriden by class attribute.
|
||||
|
||||
Base class can define ``generic_target_title`` to provide a
|
||||
new default::
|
||||
|
||||
class ToExcelHandler(ImportHandler):
|
||||
generic_target_title = "Excel File"
|
||||
|
||||
Subclass can define ``target_title`` to be explicit::
|
||||
|
||||
class FromWuttaToExcel(FromWuttaHandler, ToExcelHandler):
|
||||
target_title = "My Spreadsheet"
|
||||
|
||||
See also :meth:`get_title()` and :meth:`get_source_title()`.
|
||||
"""
|
||||
if hasattr(self, 'target_title'):
|
||||
if hasattr(self, "target_title"):
|
||||
return self.target_title
|
||||
if hasattr(self, 'generic_target_title'):
|
||||
if hasattr(self, "generic_target_title"):
|
||||
return self.generic_target_title
|
||||
return self.target_key
|
||||
|
||||
|
|
@ -231,7 +344,7 @@ class ImportHandler(GenericHandler):
|
|||
"""
|
||||
Run import/export operations for the specified models.
|
||||
|
||||
:param \*keys: One or more importer/exporter (model) keys, as
|
||||
:param \\*keys: One or more importer/exporter (model) keys, as
|
||||
defined by the handler.
|
||||
|
||||
Each key specified must be present in :attr:`importers` and
|
||||
|
|
@ -248,11 +361,17 @@ class ImportHandler(GenericHandler):
|
|||
* :meth:`begin_transaction()`
|
||||
* :meth:`get_importer()`
|
||||
* :meth:`~wuttasync.importing.base.Importer.process_data()` (on the importer/exporter)
|
||||
* :meth:`process_changes()`
|
||||
* :meth:`rollback_transaction()`
|
||||
* :meth:`commit_transaction()`
|
||||
"""
|
||||
kwargs = self.consume_kwargs(kwargs)
|
||||
self.process_started = self.app.localtime()
|
||||
self.begin_transaction()
|
||||
changes = OrderedDict()
|
||||
|
||||
if not keys:
|
||||
keys = self.get_default_importer_keys()
|
||||
|
||||
success = False
|
||||
try:
|
||||
|
|
@ -263,20 +382,31 @@ class ImportHandler(GenericHandler):
|
|||
# invoke importer
|
||||
importer = self.get_importer(key, **kwargs)
|
||||
created, updated, deleted = importer.process_data()
|
||||
changed = bool(created or updated or deleted)
|
||||
|
||||
# log what happened
|
||||
msg = "%s: added %d; updated %d; deleted %d %s records"
|
||||
if self.dry_run:
|
||||
msg += " (dry run)"
|
||||
log.info(msg, self.get_title(), len(created), len(updated), len(deleted), key)
|
||||
logger = log.warning if changed and self.warnings else log.info
|
||||
logger(
|
||||
msg, self.get_title(), len(created), len(updated), len(deleted), key
|
||||
)
|
||||
|
||||
# keep track of any changes
|
||||
if changed:
|
||||
changes[key] = created, updated, deleted
|
||||
|
||||
# post-processing for all changes
|
||||
if changes:
|
||||
self.process_changes(changes)
|
||||
|
||||
success = True
|
||||
|
||||
except:
|
||||
# TODO: what should happen here?
|
||||
log.exception("what should happen here?") # TODO
|
||||
raise
|
||||
|
||||
else:
|
||||
success = True
|
||||
|
||||
finally:
|
||||
if not success:
|
||||
log.warning("something failed, so transaction was rolled back")
|
||||
|
|
@ -307,8 +437,25 @@ class ImportHandler(GenericHandler):
|
|||
|
||||
:returns: Dict of kwargs, "post-consumption."
|
||||
"""
|
||||
if 'dry_run' in kwargs:
|
||||
self.dry_run = kwargs['dry_run']
|
||||
if "dry_run" in kwargs:
|
||||
self.dry_run = kwargs["dry_run"]
|
||||
|
||||
if "warnings" in kwargs:
|
||||
self.warnings = kwargs.pop("warnings")
|
||||
|
||||
if "warnings_recipients" in kwargs:
|
||||
self.warnings_recipients = self.config.parse_list(
|
||||
kwargs.pop("warnings_recipients")
|
||||
)
|
||||
|
||||
if "warnings_max_diffs" in kwargs:
|
||||
self.warnings_max_diffs = kwargs.pop("warnings_max_diffs")
|
||||
|
||||
if "runas_username" in kwargs:
|
||||
self.runas_username = kwargs.pop("runas_username")
|
||||
|
||||
if "transaction_comment" in kwargs:
|
||||
self.transaction_comment = kwargs.pop("transaction_comment")
|
||||
|
||||
return kwargs
|
||||
|
||||
|
|
@ -459,6 +606,9 @@ class ImportHandler(GenericHandler):
|
|||
Returns an importer/exporter instance corresponding to the
|
||||
given key.
|
||||
|
||||
Note that this will always create a *new* instance; they are
|
||||
not cached.
|
||||
|
||||
The key will be the "model name" mapped to a particular
|
||||
importer/exporter class and thus must be present in
|
||||
:attr:`importers`.
|
||||
|
|
@ -471,6 +621,8 @@ class ImportHandler(GenericHandler):
|
|||
|
||||
:param key: Model key for desired importer/exporter.
|
||||
|
||||
:param \\**kwargs: Extra/override kwargs for the importer.
|
||||
|
||||
:returns: Instance of (subclass of)
|
||||
:class:`~wuttasync.importing.base.Importer`.
|
||||
"""
|
||||
|
|
@ -479,11 +631,16 @@ class ImportHandler(GenericHandler):
|
|||
raise KeyError(f"unknown {orientation} key: {key}")
|
||||
|
||||
kwargs = self.get_importer_kwargs(key, **kwargs)
|
||||
kwargs['handler'] = self
|
||||
kwargs["handler"] = self
|
||||
|
||||
# nb. default logic should (normally) determine keys
|
||||
if "keys" in kwargs and not kwargs["keys"]:
|
||||
del kwargs["keys"]
|
||||
|
||||
factory = self.importers[key]
|
||||
return factory(self.config, **kwargs)
|
||||
|
||||
def get_importer_kwargs(self, key, **kwargs):
|
||||
def get_importer_kwargs(self, key, **kwargs): # pylint: disable=unused-argument
|
||||
"""
|
||||
Returns a dict of kwargs to be used when construcing an
|
||||
importer/exporter with the given key. This is normally called
|
||||
|
|
@ -492,22 +649,298 @@ class ImportHandler(GenericHandler):
|
|||
:param key: Model key for the desired importer/exporter,
|
||||
e.g. ``'Widget'``
|
||||
|
||||
:param \**kwargs: Any kwargs we have so collected far.
|
||||
:param \\**kwargs: Any kwargs we have so collected far.
|
||||
|
||||
:returns: Final kwargs dict for new importer/exporter.
|
||||
"""
|
||||
return kwargs
|
||||
|
||||
def is_default(self, key): # pylint: disable=unused-argument
|
||||
"""
|
||||
Return a boolean indicating whether the importer corresponding
|
||||
to ``key`` should be considered "default" - i.e. included as
|
||||
part of a typical "import all" job.
|
||||
|
||||
The default logic here returns ``True`` in all cases; subclass can
|
||||
override as needed.
|
||||
|
||||
:param key: Key indicating the importer.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return True
|
||||
|
||||
def get_default_importer_keys(self):
|
||||
"""
|
||||
Return the list of importer keys which should be considered
|
||||
"default" - i.e. which should be included as part of a typical
|
||||
"import all" job.
|
||||
|
||||
This inspects :attr:`importers` and calls :meth:`is_default()`
|
||||
for each, to determine the result.
|
||||
|
||||
:returns: List of importer keys (strings).
|
||||
"""
|
||||
keys = list(self.importers)
|
||||
keys = [k for k in keys if self.is_default(k)]
|
||||
return keys
|
||||
|
||||
def process_changes(self, changes):
|
||||
"""
|
||||
Run post-processing operations on the given changes, if
|
||||
applicable.
|
||||
|
||||
This method is called by :meth:`process_data()`, if any
|
||||
changes were made.
|
||||
|
||||
Default logic will send a "diff warning" email to the
|
||||
configured recipient(s), if :attr:`warnings` mode is enabled.
|
||||
If it is not enabled, nothing happens.
|
||||
|
||||
:param changes: :class:`~python:collections.OrderedDict` of
|
||||
changes from the overall import/export job. The structure
|
||||
is described below.
|
||||
|
||||
Keys for the ``changes`` dict will be model/importer names,
|
||||
for instance::
|
||||
|
||||
{
|
||||
"Sprocket": {...},
|
||||
"User": {...},
|
||||
}
|
||||
|
||||
Value for each model key is a 3-tuple of ``(created, updated,
|
||||
deleted)``. Each of those elements is a list::
|
||||
|
||||
{
|
||||
"Sprocket": (
|
||||
[...], # created
|
||||
[...], # updated
|
||||
[...], # deleted
|
||||
),
|
||||
}
|
||||
|
||||
The list elements are always tuples, but the structure
|
||||
varies::
|
||||
|
||||
{
|
||||
"Sprocket": (
|
||||
[ # created, 2-tuples
|
||||
(obj, source_data),
|
||||
],
|
||||
[ # updated, 3-tuples
|
||||
(obj, source_data, target_data),
|
||||
],
|
||||
[ # deleted, 2-tuples
|
||||
(obj, target_data),
|
||||
],
|
||||
),
|
||||
}
|
||||
"""
|
||||
if not self.warnings:
|
||||
return
|
||||
|
||||
def make_diff(*args, **kwargs):
|
||||
return Diff(self.config, *args, **kwargs)
|
||||
|
||||
runtime = self.app.localtime() - self.process_started
|
||||
data = {
|
||||
"handler": self,
|
||||
"title": self.get_title(),
|
||||
"source_title": self.get_source_title(),
|
||||
"target_title": self.get_target_title(),
|
||||
"dry_run": self.dry_run,
|
||||
"argv": sys.argv,
|
||||
"runtime": runtime,
|
||||
"runtime_display": humanize.naturaldelta(runtime),
|
||||
"changes": changes,
|
||||
"make_diff": make_diff,
|
||||
"max_diffs": self.warnings_max_diffs,
|
||||
}
|
||||
|
||||
# maybe override recipients
|
||||
kw = {}
|
||||
if self.warnings_recipients:
|
||||
kw["to"] = self.warnings_recipients
|
||||
# TODO: should we in fact clear these..?
|
||||
kw["cc"] = []
|
||||
kw["bcc"] = []
|
||||
|
||||
# send the email
|
||||
email_key = self.get_warnings_email_key()
|
||||
self.app.send_email(email_key, data, fallback_key="import_export_warning", **kw)
|
||||
|
||||
log.info("%s: warning email was sent", self.get_title())
|
||||
|
||||
def get_warnings_email_key(self):
|
||||
"""
|
||||
Returns the :term:`email key` to be used for sending the diff
|
||||
warning email.
|
||||
|
||||
The email key should be unique to this import/export type
|
||||
(really, the :term:`import/export key`) but not necessarily
|
||||
unique to one handler.
|
||||
|
||||
If :attr:`warnings_email_key` is set, it will be used as-is.
|
||||
|
||||
Otherwise one is generated from :meth:`get_key()`.
|
||||
|
||||
:returns: Email key for diff warnings
|
||||
"""
|
||||
if self.warnings_email_key:
|
||||
return self.warnings_email_key
|
||||
|
||||
return self.get_key().replace(".", "_") + "_warning"
|
||||
|
||||
|
||||
class FromFileHandler(ImportHandler):
|
||||
"""
|
||||
Handler for import/export which uses an input file as data source.
|
||||
Handler for import/export which uses input file(s) as data source.
|
||||
|
||||
This handler assumes its importer/exporter classes inherit from
|
||||
:class:`~wuttasync.importing.base.FromFile` for source parent
|
||||
logic.
|
||||
"""
|
||||
|
||||
def process_data(self, *keys, **kwargs): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
|
||||
# interpret file vs. folder path
|
||||
# nb. this assumes FromFile importer/exporter
|
||||
path = kwargs.pop("input_file_path", None)
|
||||
if path:
|
||||
if not kwargs.get("input_file_dir") and os.path.isdir(path):
|
||||
kwargs["input_file_dir"] = path
|
||||
else:
|
||||
kwargs["input_file_path"] = path
|
||||
|
||||
# and carry on
|
||||
super().process_data(*keys, **kwargs)
|
||||
|
||||
|
||||
class FromSqlalchemyHandler(ImportHandler):
|
||||
"""
|
||||
Base class for import/export handlers using SQLAlchemy ORM (DB) as
|
||||
data source.
|
||||
|
||||
This is meant to be used with importers/exporters which inherit
|
||||
from :class:`~wuttasync.importing.base.FromSqlalchemy`. It will
|
||||
set the
|
||||
:attr:`~wuttasync.importing.base.FromSqlalchemy.source_session`
|
||||
attribute when making them; cf. :meth:`get_importer_kwargs()`.
|
||||
|
||||
This is the base class for :class:`FromWuttaHandler`, but can be
|
||||
used with any database.
|
||||
|
||||
See also :class:`ToSqlalchemyHandler`.
|
||||
"""
|
||||
|
||||
source_session = None
|
||||
"""
|
||||
Reference to the :term:`db session` for data source.
|
||||
|
||||
This will be ``None`` unless a transaction is running.
|
||||
"""
|
||||
|
||||
def begin_source_transaction(self):
|
||||
"""
|
||||
This calls :meth:`make_source_session()` and assigns the
|
||||
result to :attr:`source_session`.
|
||||
"""
|
||||
self.source_session = self.make_source_session()
|
||||
|
||||
def commit_source_transaction(self):
|
||||
"""
|
||||
This commits and closes :attr:`source_session`.
|
||||
"""
|
||||
self.source_session.commit()
|
||||
self.source_session.close()
|
||||
self.source_session = None
|
||||
|
||||
def rollback_source_transaction(self):
|
||||
"""
|
||||
This rolls back, then closes :attr:`source_session`.
|
||||
"""
|
||||
self.source_session.rollback()
|
||||
self.source_session.close()
|
||||
self.source_session = None
|
||||
|
||||
def make_source_session(self):
|
||||
"""
|
||||
Make and return a new :term:`db session` for the data source.
|
||||
|
||||
Default logic is not implemented; subclass must override.
|
||||
|
||||
:returns: :class:`~sqlalchemy.orm.Session` instance
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_importer_kwargs(self, key, **kwargs):
|
||||
"""
|
||||
This modifies the new importer kwargs to add:
|
||||
|
||||
* ``source_session`` - reference to :attr:`source_session`
|
||||
|
||||
See also docs for parent method,
|
||||
:meth:`~ImportHandler.get_importer_kwargs()`.
|
||||
"""
|
||||
kwargs = super().get_importer_kwargs(key, **kwargs)
|
||||
kwargs["source_session"] = self.source_session
|
||||
return kwargs
|
||||
|
||||
|
||||
class FromWuttaHandler(FromSqlalchemyHandler):
|
||||
"""
|
||||
Handler for import/export which uses Wutta ORM (:term:`app
|
||||
database`) as data source.
|
||||
|
||||
This inherits from :class:`FromSqlalchemyHandler`.
|
||||
|
||||
See also :class:`ToWuttaHandler`.
|
||||
"""
|
||||
|
||||
source_key = "wutta"
|
||||
"" # nb. suppress docs
|
||||
|
||||
def get_source_title(self):
|
||||
"""
|
||||
This overrides default logic to use
|
||||
:meth:`~wuttjamaican:wuttjamaican.app.AppHandler.get_title()`
|
||||
as the default value.
|
||||
|
||||
Subclass can still define
|
||||
:attr:`~wuttasync.importing.handlers.ImportHandler.source_title`
|
||||
(or
|
||||
:attr:`~wuttasync.importing.handlers.ImportHandler.generic_source_title`)
|
||||
to customize.
|
||||
|
||||
See also docs for parent method:
|
||||
:meth:`~wuttasync.importing.handlers.ImportHandler.get_source_title()`
|
||||
"""
|
||||
if hasattr(self, "source_title"):
|
||||
return self.source_title
|
||||
if hasattr(self, "generic_source_title"):
|
||||
return self.generic_source_title
|
||||
return self.app.get_title()
|
||||
|
||||
def make_source_session(self):
|
||||
"""
|
||||
This calls
|
||||
:meth:`~wuttjamaican:wuttjamaican.app.AppHandler.make_session()`
|
||||
and returns it.
|
||||
"""
|
||||
return self.app.make_session()
|
||||
|
||||
|
||||
class ToSqlalchemyHandler(ImportHandler):
|
||||
"""
|
||||
Handler for import/export which targets a SQLAlchemy ORM (DB).
|
||||
Base class for import/export handlers which target a SQLAlchemy
|
||||
ORM (DB).
|
||||
|
||||
This is the base class for :class:`ToWuttaHandler`, but can be
|
||||
used with any database.
|
||||
|
||||
See also :class:`FromSqlalchemyHandler`.
|
||||
"""
|
||||
|
||||
target_session = None
|
||||
|
|
@ -553,8 +986,72 @@ class ToSqlalchemyHandler(ImportHandler):
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_importer_kwargs(self, key, **kwargs):
|
||||
def get_importer_kwargs(self, key, **kwargs): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
kwargs = super().get_importer_kwargs(key, **kwargs)
|
||||
kwargs.setdefault('target_session', self.target_session)
|
||||
kwargs.setdefault("target_session", self.target_session)
|
||||
return kwargs
|
||||
|
||||
|
||||
class ToWuttaHandler(ToSqlalchemyHandler):
|
||||
"""
|
||||
Handler for import/export which targets Wutta ORM (:term:`app
|
||||
database`).
|
||||
|
||||
This inherits from :class:`ToSqlalchemyHandler`.
|
||||
|
||||
See also :class:`FromWuttaHandler`.
|
||||
"""
|
||||
|
||||
target_key = "wutta"
|
||||
"" # nb. suppress docs
|
||||
|
||||
def get_target_title(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
# nb. we override parent to use app title as default
|
||||
if hasattr(self, "target_title"):
|
||||
return self.target_title
|
||||
if hasattr(self, "generic_target_title"):
|
||||
return self.generic_target_title
|
||||
return self.app.get_title()
|
||||
|
||||
def make_target_session(self):
|
||||
"""
|
||||
This creates a typical :term:`db session` for the app by
|
||||
calling
|
||||
:meth:`~wuttjamaican:wuttjamaican.app.AppHandler.make_session()`.
|
||||
|
||||
It then may "customize" the session slightly. These
|
||||
customizations only are relevant if Wutta-Continuum versioning
|
||||
is enabled:
|
||||
|
||||
If :attr:`~ImportHandler.runas_username` is set, the
|
||||
responsible user (``continuum_user_id``) will be set for the
|
||||
new session as well.
|
||||
|
||||
Similarly, if :attr:`~ImportHandler.transaction_comment` is
|
||||
set, it (``continuum_comment``) will also be set for the new
|
||||
session.
|
||||
|
||||
:returns: :class:`~wuttjamaican:wuttjamaican.db.sess.Session`
|
||||
instance.
|
||||
"""
|
||||
model = self.app.model
|
||||
session = self.app.make_session()
|
||||
|
||||
# set runas user in case continuum versioning is enabled
|
||||
if self.runas_username:
|
||||
if user := (
|
||||
session.query(model.User)
|
||||
.filter_by(username=self.runas_username)
|
||||
.first()
|
||||
):
|
||||
session.info["continuum_user_id"] = user.uuid
|
||||
else:
|
||||
log.warning("runas username not found: %s", self.runas_username)
|
||||
|
||||
# set comment in case continuum versioning is enabled
|
||||
if self.transaction_comment:
|
||||
session.info["continuum_comment"] = self.transaction_comment
|
||||
|
||||
return session
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta framework for data import/export and real-time sync
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
|
|
|
|||
346
src/wuttasync/importing/versions.py
Normal file
346
src/wuttasync/importing/versions.py
Normal file
|
|
@ -0,0 +1,346 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2025 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
Importing Versions
|
||||
|
||||
This is a special type of import, only relevant when data versioning
|
||||
is enabled.
|
||||
|
||||
See the handler class for more info: :class:`FromWuttaToVersions`
|
||||
"""
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from sqlalchemy_utils.functions import get_primary_keys
|
||||
|
||||
from wuttjamaican.db.util import make_topo_sortkey
|
||||
|
||||
from .handlers import FromWuttaHandler, ToWuttaHandler
|
||||
from .wutta import FromWuttaMirror
|
||||
from .model import ToWutta
|
||||
|
||||
|
||||
class FromWuttaToVersions(FromWuttaHandler, ToWuttaHandler):
|
||||
"""
|
||||
Handler for Wutta -> Versions import.
|
||||
|
||||
The purpose of this is to ensure version tables accurately reflect
|
||||
the current "live" data set, for given table(s). It is only
|
||||
relevant/usable if versioning is configured and enabled. For more
|
||||
on that see :doc:`wutta-continuum:index`.
|
||||
|
||||
For a given import model, the source is the "live" table, target
|
||||
is the "version" table - both in the same :term:`app database`.
|
||||
|
||||
When reading data from the target side, it only grabs the "latest"
|
||||
(valid) version record for each comparison to source.
|
||||
|
||||
When changes are needed, instead of updating the existing version
|
||||
record, it always writes a new version record.
|
||||
|
||||
This handler will dynamically create importers for all versioned
|
||||
models in the :term:`app model`; see
|
||||
:meth:`make_importer_factory()`.
|
||||
"""
|
||||
|
||||
target_key = "versions"
|
||||
target_title = "Versions"
|
||||
|
||||
continuum_uow = None
|
||||
"""
|
||||
Reference to the
|
||||
:class:`sqlalchemy-continuum:`sqlalchemy_continuum.UnitOfWork`
|
||||
created (by the SQLAlchemy-Continuum ``versioning_manager``) when
|
||||
the transaction begins.
|
||||
|
||||
See also :attr:`continuum_txn` and
|
||||
:meth:`begin_target_transaction()`.
|
||||
"""
|
||||
|
||||
continuum_txn = None
|
||||
"""
|
||||
Reference to the SQLAlchemy-Continuum ``transaction`` record, to
|
||||
which any new version records will associate (if needed).
|
||||
|
||||
This transaction will track the effective user responsible for
|
||||
the change(s), their client IP, and timestamp.
|
||||
|
||||
This reference is passed along to the importers as well (as
|
||||
:attr:`~FromWuttaToVersionBase.continuum_txn`) via
|
||||
:meth:`get_importer_kwargs()`.
|
||||
|
||||
See also :attr:`continuum_uow`.
|
||||
"""
|
||||
|
||||
def begin_target_transaction(self):
|
||||
# pylint: disable=line-too-long
|
||||
"""
|
||||
In addition to normal logic, this does some setup for
|
||||
SQLAlchemy-Continuum:
|
||||
|
||||
It establishes a "unit of work" by calling
|
||||
:meth:`~sqlalchemy-continuum:sqlalchemy_continuum.VersioningManager.unit_of_work()`,
|
||||
assigning the result to :attr:`continuum_uow`.
|
||||
|
||||
It then calls
|
||||
:meth:`~sqlalchemy-continuum:sqlalchemy_continuum.unit_of_work.UnitOfWork.create_transaction()`
|
||||
and assigns that to :attr:`continuum_txn`.
|
||||
|
||||
It also sets the comment for the transaction, if applicable.
|
||||
|
||||
See also docs for parent method:
|
||||
:meth:`~wuttasync.importing.handlers.ToSqlalchemyHandler.begin_target_transaction()`
|
||||
"""
|
||||
import sqlalchemy_continuum as continuum # pylint: disable=import-outside-toplevel
|
||||
|
||||
super().begin_target_transaction()
|
||||
|
||||
self.continuum_uow = continuum.versioning_manager.unit_of_work(
|
||||
self.target_session
|
||||
)
|
||||
|
||||
self.continuum_txn = self.continuum_uow.create_transaction(self.target_session)
|
||||
|
||||
if self.transaction_comment:
|
||||
self.continuum_txn.meta = {"comment": self.transaction_comment}
|
||||
|
||||
def get_importer_kwargs(self, key, **kwargs):
|
||||
"""
|
||||
This modifies the new importer kwargs to add:
|
||||
|
||||
* ``continuum_txn`` - reference to :attr:`continuum_txn`
|
||||
|
||||
See also docs for parent method:
|
||||
:meth:`~wuttasync.importing.handlers.ImportHandler.get_importer_kwargs()`
|
||||
"""
|
||||
kwargs = super().get_importer_kwargs(key, **kwargs)
|
||||
kwargs["continuum_txn"] = self.continuum_txn
|
||||
return kwargs
|
||||
|
||||
# TODO: pylint (correctly) flags this as duplicate code, matching
|
||||
# on the wuttasync.importing.csv/wutta module - should fix?
|
||||
def define_importers(self):
|
||||
"""
|
||||
This overrides typical (manual) importer definition, instead
|
||||
generating importers for all versioned models.
|
||||
|
||||
It will inspect the :term:`app model` and call
|
||||
:meth:`make_importer_factory()` for each model found, keeping
|
||||
only the valid importers.
|
||||
|
||||
See also the docs for parent method:
|
||||
:meth:`~wuttasync.importing.handlers.ImportHandler.define_importers()`
|
||||
"""
|
||||
model = self.app.model
|
||||
importers = {}
|
||||
|
||||
# pylint: disable=duplicate-code
|
||||
# mostly try to make an importer for every data model
|
||||
for name in dir(model):
|
||||
cls = getattr(model, name)
|
||||
if (
|
||||
isinstance(cls, type)
|
||||
and issubclass(cls, model.Base)
|
||||
and cls is not model.Base
|
||||
):
|
||||
# only keep "good" importers, i.e. for versioned models
|
||||
if factory := self.make_importer_factory(cls, name):
|
||||
importers[name] = factory
|
||||
|
||||
# sort importers according to schema topography
|
||||
topo_sortkey = make_topo_sortkey(model)
|
||||
importers = OrderedDict(
|
||||
[(name, importers[name]) for name in sorted(importers, key=topo_sortkey)]
|
||||
)
|
||||
|
||||
return importers
|
||||
|
||||
def make_importer_factory(self, model_class, name):
|
||||
"""
|
||||
Try to generate a new :term:`importer` class for the given
|
||||
:term:`data model`. This is called by
|
||||
:meth:`define_importers()`.
|
||||
|
||||
If the provided ``model_class`` is not versioned, this will
|
||||
fail and return ``None``.
|
||||
|
||||
For a versioned model, the new importer class will inherit
|
||||
from :class:`FromWuttaToVersionBase`.
|
||||
|
||||
Its (target)
|
||||
:attr:`~wuttasync.importing.base.Importer.model_class` will be
|
||||
set to the **version** model.
|
||||
|
||||
Its
|
||||
:attr:`~wuttasync.importing.base.FromSqlalchemy.source_model_class`
|
||||
will be set to the **normal** model.
|
||||
|
||||
:param model_class: A (normal, not version) data model class.
|
||||
|
||||
:param name: The "model name" for the importer. New class
|
||||
name will be based on this, so e.g. ``Widget`` model name
|
||||
becomes ``WidgetImporter`` class name.
|
||||
|
||||
:returns: The new class, or ``None``
|
||||
"""
|
||||
import sqlalchemy_continuum as continuum # pylint: disable=import-outside-toplevel
|
||||
|
||||
try:
|
||||
version_class = continuum.version_class(model_class)
|
||||
except continuum.exc.ClassNotVersioned:
|
||||
return None
|
||||
|
||||
return type(
|
||||
f"{name}Importer",
|
||||
(FromWuttaToVersionBase,),
|
||||
{
|
||||
"source_model_class": model_class,
|
||||
"model_class": version_class,
|
||||
"default_keys": list(get_primary_keys(model_class)),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class FromWuttaToVersionBase(FromWuttaMirror, ToWutta):
|
||||
"""
|
||||
Base importer class for Wutta -> Versions.
|
||||
|
||||
This imports from
|
||||
:class:`~wuttasync.importing.wutta.FromWuttaMirror` and
|
||||
:class:`~wuttasync.importing.model.ToWutta`.
|
||||
|
||||
The import handler will dynamically generate importers using this
|
||||
base class; see
|
||||
:meth:`~FromWuttaToVersions.make_importer_factory()`.
|
||||
"""
|
||||
|
||||
continuum_txn = None
|
||||
"""
|
||||
Reference to the handler's attribute of the same name:
|
||||
:attr:`~FromWuttaToVersions.continuum_txn`
|
||||
|
||||
This is the SQLAlchemy-Continuum ``transaction`` record, to which
|
||||
any new version records will associate (if needed).
|
||||
|
||||
This transaction will track the effective user responsible for
|
||||
the change(s), their client IP, and timestamp.
|
||||
"""
|
||||
|
||||
def get_simple_fields(self): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
fields = super().get_simple_fields()
|
||||
unwanted = ["transaction_id", "operation_type", "end_transaction_id"]
|
||||
fields = [field for field in fields if field not in unwanted]
|
||||
return fields
|
||||
|
||||
def get_target_query(self, source_data=None):
|
||||
"""
|
||||
This modifies the normal query to ensure we only get the
|
||||
"latest valid" version for each record, for comparison to
|
||||
source.
|
||||
|
||||
.. note::
|
||||
|
||||
In some cases, it still may be possible for multiple
|
||||
"latest" versions to match for a given record. This means
|
||||
inconsistent data; a warning should be logged if so, and
|
||||
you must track it down...
|
||||
|
||||
See also docs for parent method:
|
||||
:meth:`~wuttasync.importing.base.ToSqlalchemy.get_target_query()`
|
||||
"""
|
||||
import sqlalchemy_continuum as continuum # pylint: disable=import-outside-toplevel
|
||||
|
||||
# pylint: disable=singleton-comparison
|
||||
return (
|
||||
self.target_session.query(self.model_class)
|
||||
.filter(self.model_class.end_transaction_id == None)
|
||||
.filter(self.model_class.operation_type != continuum.Operation.DELETE)
|
||||
)
|
||||
|
||||
def normalize_target_object(self, obj): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
data = super().normalize_target_object(obj)
|
||||
|
||||
# we want to add the original version object to normalized
|
||||
# data, so we can access it later for updating if needed. but
|
||||
# this method is called for *both* sides (source+target) since
|
||||
# this is a "mirrored" importer. so we must check the type
|
||||
# and only cache true versions, ignore "normal" objects.
|
||||
if isinstance( # pylint: disable=isinstance-second-argument-not-valid-type
|
||||
obj, self.model_class
|
||||
):
|
||||
data["_objref"] = obj
|
||||
|
||||
return data
|
||||
|
||||
def make_version( # pylint: disable=missing-function-docstring
|
||||
self, source_data, operation_type
|
||||
):
|
||||
key = self.get_record_key(source_data)
|
||||
with self.target_session.no_autoflush:
|
||||
version = self.make_empty_object(key)
|
||||
self.populate(version, source_data)
|
||||
version.transaction = self.continuum_txn
|
||||
version.operation_type = operation_type
|
||||
self.target_session.add(version)
|
||||
return version
|
||||
|
||||
def populate(self, obj, data): # pylint: disable=missing-function-docstring
|
||||
keys = self.get_keys()
|
||||
for field in self.get_simple_fields():
|
||||
if field not in keys and field in data and field in self.fields:
|
||||
setattr(obj, field, data[field])
|
||||
|
||||
def create_target_object(self, key, source_data): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
import sqlalchemy_continuum as continuum # pylint: disable=import-outside-toplevel
|
||||
|
||||
return self.make_version(source_data, continuum.Operation.INSERT)
|
||||
|
||||
def update_target_object( # pylint: disable=empty-docstring
|
||||
self, obj, source_data, target_data=None
|
||||
):
|
||||
""" """
|
||||
import sqlalchemy_continuum as continuum # pylint: disable=import-outside-toplevel
|
||||
|
||||
# when we "update" it always involves making a *new* version
|
||||
# record. but that requires actually updating the "previous"
|
||||
# version to indicate the new version's transaction.
|
||||
prev_version = target_data.pop("_objref")
|
||||
prev_version.end_transaction_id = self.continuum_txn.id
|
||||
|
||||
return self.make_version(source_data, continuum.Operation.UPDATE)
|
||||
|
||||
def delete_target_object(self, obj): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
import sqlalchemy_continuum as continuum # pylint: disable=import-outside-toplevel
|
||||
|
||||
# nb. `obj` here is the existing/old version record; we update
|
||||
# it to indicate the new version's transaction.
|
||||
obj.end_transaction_id = self.continuum_txn.id
|
||||
|
||||
# add new "DELETE" version record. values should be the same as
|
||||
# for "previous" (existing/old) version.
|
||||
source_data = self.normalize_target_object(obj)
|
||||
return self.make_version(source_data, continuum.Operation.DELETE)
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta framework for data import/export and real-time sync
|
||||
# Copyright © 2024 Lance Edgar
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2026 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
|
|
@ -21,37 +21,175 @@
|
|||
#
|
||||
################################################################################
|
||||
"""
|
||||
Wutta ⇄ Wutta import/export
|
||||
Wutta → Wutta import/export
|
||||
"""
|
||||
|
||||
from .handlers import ToSqlalchemyHandler
|
||||
from collections import OrderedDict
|
||||
|
||||
from sqlalchemy_utils.functions import get_primary_keys
|
||||
|
||||
from wuttjamaican.db.util import make_topo_sortkey
|
||||
|
||||
from .base import FromSqlalchemyMirror
|
||||
from .model import ToWutta
|
||||
from .handlers import FromWuttaHandler, ToWuttaHandler, Orientation
|
||||
|
||||
|
||||
class ToWuttaHandler(ToSqlalchemyHandler):
|
||||
class FromWuttaMirror(FromSqlalchemyMirror): # pylint: disable=abstract-method
|
||||
"""
|
||||
Handler for import/export which targets Wutta ORM (:term:`app
|
||||
database`).
|
||||
Base class for Wutta → Wutta data :term:`importers/exporters
|
||||
<importer>`.
|
||||
|
||||
This inherits from
|
||||
:class:`~wuttasync.importing.base.FromSqlalchemyMirror`.
|
||||
"""
|
||||
|
||||
target_key = 'wutta'
|
||||
"" # nb. suppress docs
|
||||
|
||||
def get_target_title(self):
|
||||
class FromWuttaToWuttaBase(FromWuttaHandler, ToWuttaHandler):
|
||||
"""
|
||||
Base class for Wutta → Wutta data :term:`import/export handlers
|
||||
<import handler>`.
|
||||
|
||||
This inherits from
|
||||
:class:`~wuttasync.importing.handlers.FromWuttaHandler` and
|
||||
:class:`~wuttasync.importing.handlers.ToWuttaHandler`.
|
||||
"""
|
||||
|
||||
dbkey = None
|
||||
"""
|
||||
Config key for the "other" (non-local) :term:`app database`.
|
||||
Depending on context this will represent either the source or
|
||||
target for import/export.
|
||||
"""
|
||||
|
||||
def get_target_model(self): # pylint: disable=missing-function-docstring
|
||||
return self.app.model
|
||||
|
||||
# TODO: pylint (correctly) flags this as duplicate code, matching
|
||||
# on the wuttasync.importing.csv/versions module - should fix?
|
||||
def define_importers(self):
|
||||
"""
|
||||
This overrides typical (manual) importer definition, and
|
||||
instead dynamically generates a set of importers, e.g. one per
|
||||
table in the target DB.
|
||||
|
||||
It does this by calling :meth:`make_importer_factory()` for
|
||||
each class found in the :term:`app model`.
|
||||
"""
|
||||
importers = {}
|
||||
model = self.get_target_model()
|
||||
|
||||
# pylint: disable=duplicate-code
|
||||
# mostly try to make an importer for every data model
|
||||
for name in dir(model):
|
||||
cls = getattr(model, name)
|
||||
if (
|
||||
isinstance(cls, type)
|
||||
and issubclass(cls, model.Base)
|
||||
and cls is not model.Base
|
||||
):
|
||||
importers[name] = self.make_importer_factory(cls, name)
|
||||
|
||||
# sort importers according to schema topography
|
||||
topo_sortkey = make_topo_sortkey(model)
|
||||
importers = OrderedDict(
|
||||
[(name, importers[name]) for name in sorted(importers, key=topo_sortkey)]
|
||||
)
|
||||
|
||||
return importers
|
||||
|
||||
def make_importer_factory(self, model_class, name):
|
||||
"""
|
||||
Generate and return a new :term:`importer` class, targeting
|
||||
the given :term:`data model` class.
|
||||
|
||||
The newly-created class will inherit from:
|
||||
|
||||
* :class:`FromWuttaMirror`
|
||||
* :class:`~wuttasync.importing.model.ToWutta`
|
||||
|
||||
:param model_class: A data model class.
|
||||
|
||||
:param name: The "model name" for the importer/exporter. New
|
||||
class name will be based on this, so e.g. ``Widget`` model
|
||||
name becomes ``WidgetImporter`` class name.
|
||||
|
||||
:returns: The new class, meant to process import/export
|
||||
targeting the given data model.
|
||||
"""
|
||||
return type(
|
||||
f"{name}Importer",
|
||||
(FromWuttaMirror, ToWutta),
|
||||
{
|
||||
"model_class": model_class,
|
||||
"key": list(get_primary_keys(model_class)),
|
||||
},
|
||||
)
|
||||
|
||||
def is_default(self, key): # pylint: disable=empty-docstring
|
||||
""" """
|
||||
# nb. we override parent to use app title as default
|
||||
if hasattr(self, 'target_title'):
|
||||
return self.target_title
|
||||
if hasattr(self, 'generic_target_title'):
|
||||
return self.generic_target_title
|
||||
return self.app.get_title()
|
||||
special = [
|
||||
"Setting",
|
||||
"Role",
|
||||
"Permission",
|
||||
"User",
|
||||
"UserRole",
|
||||
"UserAPIToken",
|
||||
"Upgrade",
|
||||
]
|
||||
if key in special:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class FromWuttaToWuttaImport(FromWuttaToWuttaBase):
|
||||
"""
|
||||
Handler for Wutta (other) → Wutta (local) data import.
|
||||
|
||||
This inherits from :class:`FromWuttaToWuttaBase`.
|
||||
"""
|
||||
|
||||
orientation = Orientation.IMPORT
|
||||
""" """ # nb. suppress docs
|
||||
|
||||
def make_source_session(self):
|
||||
"""
|
||||
This makes a "normal" :term:`db session`, but will use the
|
||||
engine corresponding to the
|
||||
:attr:`~FromWuttaToWuttaBase.dbkey`.
|
||||
"""
|
||||
if (
|
||||
not self.dbkey
|
||||
or self.dbkey == "default"
|
||||
or self.dbkey not in self.config.appdb_engines
|
||||
):
|
||||
raise ValueError(f"dbkey is not valid: {self.dbkey}")
|
||||
engine = self.config.appdb_engines[self.dbkey]
|
||||
return self.app.make_session(bind=engine)
|
||||
|
||||
|
||||
class FromWuttaToWuttaExport(FromWuttaToWuttaBase):
|
||||
"""
|
||||
Handler for Wutta (local) → Wutta (other) data export.
|
||||
|
||||
This inherits from :class:`FromWuttaToWuttaBase`.
|
||||
"""
|
||||
|
||||
orientation = Orientation.EXPORT
|
||||
""" """ # nb. suppress docs
|
||||
|
||||
def make_target_session(self):
|
||||
"""
|
||||
Call
|
||||
:meth:`~wuttjamaican:wuttjamaican.app.AppHandler.make_session()`
|
||||
and return the result.
|
||||
|
||||
:returns: :class:`~wuttjamaican:wuttjamaican.db.sess.Session`
|
||||
instance.
|
||||
This makes a "normal" :term:`db session`, but will use the
|
||||
engine corresponding to the
|
||||
:attr:`~FromWuttaToWuttaBase.dbkey`.
|
||||
"""
|
||||
return self.app.make_session()
|
||||
if (
|
||||
not self.dbkey
|
||||
or self.dbkey == "default"
|
||||
or self.dbkey not in self.config.appdb_engines
|
||||
):
|
||||
raise ValueError(f"dbkey is not valid: {self.dbkey}")
|
||||
engine = self.config.appdb_engines[self.dbkey]
|
||||
return self.app.make_session(bind=engine)
|
||||
|
|
|
|||
68
src/wuttasync/testing.py
Normal file
68
src/wuttasync/testing.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024-2025 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
#
|
||||
# Wutta Framework is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation, either version 3 of the License, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# Wutta Framework is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# Wutta Framework. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
################################################################################
|
||||
"""
|
||||
Testing utilities
|
||||
"""
|
||||
|
||||
from wuttjamaican.testing import ConfigTestCase
|
||||
|
||||
|
||||
class ImportExportWarningTestCase(ConfigTestCase):
|
||||
"""
|
||||
Base class for testing the import/export warning email settings.
|
||||
|
||||
This inherits from
|
||||
:class:`~wuttjamaican:wuttjamaican.testing.ConfigTestCase`.
|
||||
|
||||
Example usage::
|
||||
|
||||
from wuttasync.testing import ImportExportWarningTestCase
|
||||
|
||||
class TestEmailSettings(ImportExportWarningTestCase):
|
||||
|
||||
def test_import_to_wutta_from_foo_warning(self):
|
||||
self.do_test_preview("import_to_wutta_from_foo_warning")
|
||||
|
||||
def test_export_to_foo_from_wutta_warning(self):
|
||||
self.do_test_preview("export_to_foo_from_wutta_warning")
|
||||
"""
|
||||
|
||||
app_title = "Wutta Poser"
|
||||
|
||||
def setUp(self):
|
||||
self.setup_config()
|
||||
self.config.setdefault("wutta.app_title", self.app_title)
|
||||
|
||||
def make_preview( # pylint: disable=missing-function-docstring,unused-argument
|
||||
self, key, mode="html"
|
||||
):
|
||||
handler = self.app.get_email_handler()
|
||||
setting = handler.get_email_setting(key)
|
||||
context = setting.sample_data()
|
||||
return handler.get_auto_html_body(
|
||||
setting.key, context, fallback_key=setting.fallback_key
|
||||
)
|
||||
|
||||
def do_test_preview(self, key): # pylint: disable=missing-function-docstring
|
||||
body = self.make_preview(key, mode="html")
|
||||
self.assertIn("Diff warning for ", body)
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
################################################################################
|
||||
#
|
||||
# WuttaSync -- Wutta framework for data import/export and real-time sync
|
||||
# WuttaSync -- Wutta Framework for data import/export and real-time sync
|
||||
# Copyright © 2024 Lance Edgar
|
||||
#
|
||||
# This file is part of Wutta Framework.
|
||||
|
|
|
|||
10
tasks.py
10
tasks.py
|
|
@ -15,10 +15,10 @@ def release(c, skip_tests=False):
|
|||
Release a new version of WuttaSync
|
||||
"""
|
||||
if not skip_tests:
|
||||
c.run('pytest')
|
||||
c.run("pytest")
|
||||
|
||||
if os.path.exists('dist'):
|
||||
shutil.rmtree('dist')
|
||||
if os.path.exists("dist"):
|
||||
shutil.rmtree("dist")
|
||||
|
||||
c.run('python -m build --sdist')
|
||||
c.run('twine upload dist/*')
|
||||
c.run("python -m build --sdist")
|
||||
c.run("twine upload dist/*")
|
||||
|
|
|
|||
0
tests/cli/__init__.py
Normal file
0
tests/cli/__init__.py
Normal file
0
tests/cli/example.conf
Normal file
0
tests/cli/example.conf
Normal file
274
tests/cli/test_base.py
Normal file
274
tests/cli/test_base.py
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
import inspect
|
||||
import sys
|
||||
from unittest import TestCase
|
||||
from unittest.mock import patch, Mock, call
|
||||
|
||||
from wuttasync.cli import base as mod
|
||||
from wuttjamaican.testing import DataTestCase
|
||||
|
||||
|
||||
class TestImportCommandHandler(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.ImportCommandHandler(self.config, **kwargs)
|
||||
|
||||
def test_import_handler(self):
|
||||
|
||||
# none
|
||||
handler = self.make_handler()
|
||||
self.assertIsNone(handler.import_handler)
|
||||
|
||||
FromCsvToWutta = self.app.load_object("wuttasync.importing.csv:FromCsvToWutta")
|
||||
|
||||
# as spec
|
||||
handler = self.make_handler(import_handler=FromCsvToWutta.get_spec())
|
||||
self.assertIsInstance(handler.import_handler, FromCsvToWutta)
|
||||
self.assertFalse(hasattr(handler, "foo"))
|
||||
self.assertFalse(hasattr(handler.import_handler, "foo"))
|
||||
|
||||
# as spec, w/ kwargs
|
||||
handler = self.make_handler(import_handler=FromCsvToWutta.get_spec(), foo="bar")
|
||||
self.assertIsInstance(handler.import_handler, FromCsvToWutta)
|
||||
self.assertFalse(hasattr(handler, "foo"))
|
||||
self.assertTrue(hasattr(handler.import_handler, "foo"))
|
||||
self.assertEqual(handler.import_handler.foo, "bar")
|
||||
|
||||
# as factory
|
||||
handler = self.make_handler(import_handler=FromCsvToWutta)
|
||||
self.assertIsInstance(handler.import_handler, FromCsvToWutta)
|
||||
self.assertFalse(hasattr(handler, "foo"))
|
||||
self.assertFalse(hasattr(handler.import_handler, "foo"))
|
||||
|
||||
# as factory, w/ kwargs
|
||||
handler = self.make_handler(import_handler=FromCsvToWutta, foo="bar")
|
||||
self.assertIsInstance(handler.import_handler, FromCsvToWutta)
|
||||
self.assertFalse(hasattr(handler, "foo"))
|
||||
self.assertTrue(hasattr(handler.import_handler, "foo"))
|
||||
self.assertEqual(handler.import_handler.foo, "bar")
|
||||
|
||||
# as instance
|
||||
myhandler = FromCsvToWutta(self.config)
|
||||
handler = self.make_handler(import_handler=myhandler)
|
||||
self.assertIs(handler.import_handler, myhandler)
|
||||
self.assertFalse(hasattr(handler, "foo"))
|
||||
self.assertFalse(hasattr(handler.import_handler, "foo"))
|
||||
|
||||
# as instance, w/ kwargs (which are ignored)
|
||||
myhandler = FromCsvToWutta(self.config)
|
||||
handler = self.make_handler(import_handler=myhandler, foo="bar")
|
||||
self.assertIs(handler.import_handler, myhandler)
|
||||
self.assertFalse(hasattr(handler, "foo"))
|
||||
self.assertFalse(hasattr(handler.import_handler, "foo"))
|
||||
|
||||
# as key
|
||||
handler = self.make_handler(key="import.to_wutta.from_csv")
|
||||
self.assertIsInstance(handler.import_handler, FromCsvToWutta)
|
||||
self.assertFalse(hasattr(handler, "foo"))
|
||||
self.assertFalse(hasattr(handler.import_handler, "foo"))
|
||||
|
||||
# as key, w/ kwargs
|
||||
handler = self.make_handler(key="import.to_wutta.from_csv", foo="bar")
|
||||
self.assertIsInstance(handler.import_handler, FromCsvToWutta)
|
||||
self.assertFalse(hasattr(handler, "foo"))
|
||||
self.assertTrue(hasattr(handler.import_handler, "foo"))
|
||||
self.assertEqual(handler.import_handler.foo, "bar")
|
||||
|
||||
def test_run(self):
|
||||
handler = self.make_handler(
|
||||
import_handler="wuttasync.importing.csv:FromCsvToWutta"
|
||||
)
|
||||
|
||||
with patch.object(handler, "list_models") as list_models:
|
||||
ctx = Mock(params={"list_models": True})
|
||||
handler.run(ctx)
|
||||
list_models.assert_called_once_with(ctx.params)
|
||||
|
||||
class Object:
|
||||
def __init__(self, **kw):
|
||||
self.__dict__.update(kw)
|
||||
|
||||
with patch.object(handler, "import_handler") as import_handler:
|
||||
parent = Mock(
|
||||
params={
|
||||
"runas_username": "fred",
|
||||
"comment": "hello world",
|
||||
}
|
||||
)
|
||||
# TODO: why can't we just use Mock here? the parent attr is problematic
|
||||
ctx = Object(params={"models": []}, parent=parent)
|
||||
handler.run(ctx)
|
||||
import_handler.process_data.assert_called_once_with(
|
||||
runas_username="fred",
|
||||
transaction_comment="hello world",
|
||||
)
|
||||
|
||||
def test_run_missing_input(self):
|
||||
handler = self.make_handler(
|
||||
import_handler="wuttasync.importing.csv:FromCsvToWutta"
|
||||
)
|
||||
|
||||
class Object:
|
||||
def __init__(self, **kw):
|
||||
self.__dict__.update(kw)
|
||||
|
||||
# fails without input_file_path
|
||||
with patch.object(sys, "exit") as exit_:
|
||||
exit_.side_effect = RuntimeError
|
||||
ctx = Object(
|
||||
params={},
|
||||
parent=Object(params={}),
|
||||
)
|
||||
try:
|
||||
handler.run(ctx)
|
||||
except RuntimeError:
|
||||
pass
|
||||
exit_.assert_called_once_with(1)
|
||||
|
||||
# runs with input_file_path
|
||||
with patch.object(sys, "exit") as exit_:
|
||||
exit_.side_effect = RuntimeError
|
||||
ctx = Object(
|
||||
params={"input_file_path": self.tempdir},
|
||||
parent=Object(
|
||||
params={},
|
||||
),
|
||||
)
|
||||
self.assertRaises(FileNotFoundError, handler.run, ctx)
|
||||
exit_.assert_not_called()
|
||||
|
||||
def test_run_missing_output(self):
|
||||
handler = self.make_handler(
|
||||
import_handler="wuttasync.exporting.csv:FromWuttaToCsv"
|
||||
)
|
||||
|
||||
class Object:
|
||||
def __init__(self, **kw):
|
||||
self.__dict__.update(kw)
|
||||
|
||||
# fails without output_file_path
|
||||
with patch.object(sys, "exit") as exit_:
|
||||
exit_.side_effect = RuntimeError
|
||||
ctx = Object(
|
||||
params={},
|
||||
parent=Object(params={}),
|
||||
)
|
||||
try:
|
||||
handler.run(ctx)
|
||||
except RuntimeError:
|
||||
pass
|
||||
exit_.assert_called_once_with(1)
|
||||
|
||||
# runs with output_file_path
|
||||
with patch.object(sys, "exit") as exit_:
|
||||
exit_.side_effect = RuntimeError
|
||||
ctx = Object(
|
||||
params={"output_file_path": self.tempdir},
|
||||
parent=Object(
|
||||
params={},
|
||||
),
|
||||
)
|
||||
handler.run(ctx)
|
||||
exit_.assert_not_called()
|
||||
|
||||
def test_list_models(self):
|
||||
|
||||
# CSV -> Wutta (all importers are default)
|
||||
handler = self.make_handler(
|
||||
import_handler="wuttasync.importing.csv:FromCsvToWutta"
|
||||
)
|
||||
with patch.object(mod, "sys") as sys:
|
||||
handler.list_models({})
|
||||
sys.stdout.write.assert_has_calls(
|
||||
[
|
||||
call("==============================\n"),
|
||||
call(" EXTRA MODELS:\n"),
|
||||
call("==============================\n"),
|
||||
call("(none)\n"),
|
||||
call("==============================\n"),
|
||||
]
|
||||
)
|
||||
|
||||
# Wutta -> Wutta (only Person importer is default)
|
||||
handler = self.make_handler(
|
||||
import_handler="wuttasync.importing.wutta:FromWuttaToWuttaImport"
|
||||
)
|
||||
with patch.object(mod, "sys") as sys:
|
||||
handler.list_models({})
|
||||
sys.stdout.write.assert_has_calls(
|
||||
[
|
||||
call("==============================\n"),
|
||||
call(" DEFAULT MODELS:\n"),
|
||||
call("==============================\n"),
|
||||
call("Person\n"),
|
||||
call("==============================\n"),
|
||||
call(" EXTRA MODELS:\n"),
|
||||
call("==============================\n"),
|
||||
]
|
||||
)
|
||||
|
||||
# again, but pretend there are no default importers
|
||||
with patch.object(handler.import_handler, "is_default", return_value=False):
|
||||
with patch.object(mod, "sys") as sys:
|
||||
handler.list_models({})
|
||||
sys.stdout.write.assert_has_calls(
|
||||
[
|
||||
call("==============================\n"),
|
||||
call(" DEFAULT MODELS:\n"),
|
||||
call("==============================\n"),
|
||||
call("(none)\n"),
|
||||
call("==============================\n"),
|
||||
call(" EXTRA MODELS:\n"),
|
||||
call("==============================\n"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class TestImporterCommand(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
def myfunc(ctx, **kwargs):
|
||||
pass
|
||||
|
||||
sig1 = inspect.signature(myfunc)
|
||||
self.assertIn("kwargs", sig1.parameters)
|
||||
self.assertNotIn("dry_run", sig1.parameters)
|
||||
wrapt = mod.import_command(myfunc)
|
||||
sig2 = inspect.signature(wrapt)
|
||||
self.assertNotIn("kwargs", sig2.parameters)
|
||||
self.assertIn("dry_run", sig2.parameters)
|
||||
|
||||
|
||||
class TestFileExporterCommand(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
def myfunc(ctx, **kwargs):
|
||||
pass
|
||||
|
||||
sig1 = inspect.signature(myfunc)
|
||||
self.assertIn("kwargs", sig1.parameters)
|
||||
self.assertNotIn("dry_run", sig1.parameters)
|
||||
self.assertNotIn("output_file_path", sig1.parameters)
|
||||
wrapt = mod.file_export_command(myfunc)
|
||||
sig2 = inspect.signature(wrapt)
|
||||
self.assertNotIn("kwargs", sig2.parameters)
|
||||
self.assertIn("dry_run", sig2.parameters)
|
||||
self.assertIn("output_file_path", sig2.parameters)
|
||||
|
||||
|
||||
class TestFileImporterCommand(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
def myfunc(ctx, **kwargs):
|
||||
pass
|
||||
|
||||
sig1 = inspect.signature(myfunc)
|
||||
self.assertIn("kwargs", sig1.parameters)
|
||||
self.assertNotIn("dry_run", sig1.parameters)
|
||||
self.assertNotIn("input_file_path", sig1.parameters)
|
||||
wrapt = mod.file_import_command(myfunc)
|
||||
sig2 = inspect.signature(wrapt)
|
||||
self.assertNotIn("kwargs", sig2.parameters)
|
||||
self.assertIn("dry_run", sig2.parameters)
|
||||
self.assertIn("input_file_path", sig2.parameters)
|
||||
22
tests/cli/test_export_csv.py
Normal file
22
tests/cli/test_export_csv.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from unittest import TestCase
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from wuttasync.cli import export_csv as mod, ImportCommandHandler
|
||||
|
||||
|
||||
class TestExportCsv(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
params = {
|
||||
"models": [],
|
||||
"create": True,
|
||||
"update": True,
|
||||
"delete": False,
|
||||
"dry_run": True,
|
||||
}
|
||||
ctx = MagicMock(params=params)
|
||||
with patch.object(ImportCommandHandler, "run") as run:
|
||||
mod.export_csv(ctx)
|
||||
run.assert_called_once_with(ctx)
|
||||
23
tests/cli/test_export_wutta.py
Normal file
23
tests/cli/test_export_wutta.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from unittest import TestCase
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from wuttasync.cli import export_wutta as mod, ImportCommandHandler
|
||||
|
||||
|
||||
class TestExportWutta(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
params = {
|
||||
"dbkey": "another",
|
||||
"models": [],
|
||||
"create": True,
|
||||
"update": True,
|
||||
"delete": False,
|
||||
"dry_run": True,
|
||||
}
|
||||
ctx = MagicMock(params=params)
|
||||
with patch.object(ImportCommandHandler, "run") as run:
|
||||
mod.export_wutta(ctx)
|
||||
run.assert_called_once_with(ctx)
|
||||
22
tests/cli/test_import_csv.py
Normal file
22
tests/cli/test_import_csv.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from unittest import TestCase
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from wuttasync.cli import import_csv as mod, ImportCommandHandler
|
||||
|
||||
|
||||
class TestImportCsv(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
params = {
|
||||
"models": [],
|
||||
"create": True,
|
||||
"update": True,
|
||||
"delete": False,
|
||||
"dry_run": True,
|
||||
}
|
||||
ctx = MagicMock(params=params)
|
||||
with patch.object(ImportCommandHandler, "run") as run:
|
||||
mod.import_csv(ctx)
|
||||
run.assert_called_once_with(ctx)
|
||||
22
tests/cli/test_import_versions.py
Normal file
22
tests/cli/test_import_versions.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from unittest import TestCase
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from wuttasync.cli import import_versions as mod, ImportCommandHandler
|
||||
|
||||
|
||||
class TestImportCsv(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
params = {
|
||||
"models": [],
|
||||
"create": True,
|
||||
"update": True,
|
||||
"delete": False,
|
||||
"dry_run": True,
|
||||
}
|
||||
ctx = MagicMock(params=params)
|
||||
with patch.object(ImportCommandHandler, "run") as run:
|
||||
mod.import_versions(ctx)
|
||||
run.assert_called_once_with(ctx)
|
||||
23
tests/cli/test_import_wutta.py
Normal file
23
tests/cli/test_import_wutta.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from unittest import TestCase
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from wuttasync.cli import import_wutta as mod, ImportCommandHandler
|
||||
|
||||
|
||||
class TestImportWutta(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
params = {
|
||||
"dbkey": "another",
|
||||
"models": [],
|
||||
"create": True,
|
||||
"update": True,
|
||||
"delete": False,
|
||||
"dry_run": True,
|
||||
}
|
||||
ctx = MagicMock(params=params)
|
||||
with patch.object(ImportCommandHandler, "run") as run:
|
||||
mod.import_wutta(ctx)
|
||||
run.assert_called_once_with(ctx)
|
||||
99
tests/exporting/test_base.py
Normal file
99
tests/exporting/test_base.py
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from wuttjamaican.testing import DataTestCase
|
||||
|
||||
from wuttasync.exporting import base as mod, ExportHandler
|
||||
|
||||
|
||||
class TestToFile(DataTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.setup_db()
|
||||
self.handler = ExportHandler(self.config)
|
||||
|
||||
def make_exporter(self, **kwargs):
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
return mod.ToFile(self.config, **kwargs)
|
||||
|
||||
def test_setup(self):
|
||||
model = self.app.model
|
||||
|
||||
# output file is opened
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
self.assertFalse(exp.dry_run)
|
||||
with patch.object(exp, "open_output_file") as open_output_file:
|
||||
exp.setup()
|
||||
open_output_file.assert_called_once_with()
|
||||
|
||||
# but not if in dry run mode
|
||||
with patch.object(self.handler, "dry_run", new=True):
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
self.assertTrue(exp.dry_run)
|
||||
with patch.object(exp, "open_output_file") as open_output_file:
|
||||
exp.setup()
|
||||
open_output_file.assert_not_called()
|
||||
|
||||
def test_teardown(self):
|
||||
model = self.app.model
|
||||
|
||||
# output file is closed
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
self.assertFalse(exp.dry_run)
|
||||
with patch.object(exp, "close_output_file") as close_output_file:
|
||||
exp.teardown()
|
||||
close_output_file.assert_called_once_with()
|
||||
|
||||
# but not if in dry run mode
|
||||
with patch.object(self.handler, "dry_run", new=True):
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
self.assertTrue(exp.dry_run)
|
||||
with patch.object(exp, "close_output_file") as close_output_file:
|
||||
exp.teardown()
|
||||
close_output_file.assert_not_called()
|
||||
|
||||
def test_get_output_file_path(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
|
||||
# output path must be set
|
||||
self.assertRaises(ValueError, exp.get_output_file_path)
|
||||
|
||||
# path is guessed from dir+filename
|
||||
path1 = self.write_file("data1.txt", "")
|
||||
exp.output_file_path = self.tempdir
|
||||
exp.output_file_name = "data1.txt"
|
||||
self.assertEqual(exp.get_output_file_path(), path1)
|
||||
|
||||
# path can be explicitly set
|
||||
path2 = self.write_file("data2.txt", "")
|
||||
exp.output_file_path = path2
|
||||
self.assertEqual(exp.get_output_file_path(), path2)
|
||||
|
||||
def test_get_output_file_name(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
|
||||
# name cannot be guessed
|
||||
self.assertRaises(NotImplementedError, exp.get_output_file_name)
|
||||
|
||||
# name can be explicitly set
|
||||
exp.output_file_name = "data.txt"
|
||||
self.assertEqual(exp.get_output_file_name(), "data.txt")
|
||||
|
||||
def test_open_output_file(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
self.assertRaises(NotImplementedError, exp.open_output_file)
|
||||
|
||||
def test_close_output_file(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
|
||||
path = self.write_file("data.txt", "")
|
||||
with open(path, "wt") as f:
|
||||
exp.output_file = f
|
||||
with patch.object(f, "close") as close:
|
||||
exp.close_output_file()
|
||||
close.assert_called_once_with()
|
||||
209
tests/exporting/test_csv.py
Normal file
209
tests/exporting/test_csv.py
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
import csv
|
||||
import io
|
||||
from unittest.mock import patch
|
||||
|
||||
from wuttjamaican.testing import DataTestCase
|
||||
|
||||
from wuttasync.exporting import csv as mod, ExportHandler
|
||||
from wuttasync.importing import FromWuttaHandler, FromWutta
|
||||
|
||||
|
||||
class TestToCsv(DataTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.setup_db()
|
||||
self.handler = ExportHandler(self.config)
|
||||
|
||||
def make_exporter(self, **kwargs):
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
kwargs.setdefault("output_file_path", self.tempdir)
|
||||
return mod.ToCsv(self.config, **kwargs)
|
||||
|
||||
def test_get_output_file_name(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
|
||||
# name can be guessed
|
||||
self.assertEqual(exp.get_output_file_name(), "Setting.csv")
|
||||
|
||||
# name can be explicitly set
|
||||
exp.output_file_name = "data.txt"
|
||||
self.assertEqual(exp.get_output_file_name(), "data.txt")
|
||||
|
||||
def test_open_output_file(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
self.assertIsNone(exp.output_file)
|
||||
self.assertIsNone(exp.output_writer)
|
||||
exp.open_output_file()
|
||||
try:
|
||||
self.assertIsInstance(exp.output_file, io.TextIOBase)
|
||||
self.assertIsInstance(exp.output_writer, csv.DictWriter)
|
||||
finally:
|
||||
exp.output_file.close()
|
||||
|
||||
def test_close_output_file(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
|
||||
self.assertIsNone(exp.output_file)
|
||||
self.assertIsNone(exp.output_writer)
|
||||
exp.open_output_file()
|
||||
self.assertIsNotNone(exp.output_file)
|
||||
self.assertIsNotNone(exp.output_writer)
|
||||
exp.close_output_file()
|
||||
self.assertIsNone(exp.output_file)
|
||||
self.assertIsNone(exp.output_writer)
|
||||
|
||||
def test_coerce_csv(self):
|
||||
model = self.app.model
|
||||
|
||||
# string value
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
result = exp.coerce_csv({"name": "foo", "value": "bar"})
|
||||
self.assertEqual(result, {"name": "foo", "value": "bar"})
|
||||
|
||||
# null value converts to empty string
|
||||
result = exp.coerce_csv({"name": "foo", "value": None})
|
||||
self.assertEqual(result, {"name": "foo", "value": ""})
|
||||
|
||||
# float value passed thru as-is
|
||||
result = exp.coerce_csv({"name": "foo", "value": 12.34})
|
||||
self.assertEqual(result, {"name": "foo", "value": 12.34})
|
||||
self.assertIsInstance(result["value"], float)
|
||||
|
||||
def test_update_target_object(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(model_class=model.Setting)
|
||||
|
||||
exp.setup()
|
||||
|
||||
with patch.object(exp, "output_writer") as output_writer:
|
||||
|
||||
# writer is called for normal run
|
||||
data = {"name": "foo", "value": "bar"}
|
||||
exp.update_target_object(None, data)
|
||||
output_writer.writerow.assert_called_once_with(data)
|
||||
|
||||
# but not called for dry run
|
||||
output_writer.writerow.reset_mock()
|
||||
with patch.object(self.handler, "dry_run", new=True):
|
||||
exp.update_target_object(None, data)
|
||||
output_writer.writerow.assert_not_called()
|
||||
|
||||
exp.teardown()
|
||||
|
||||
|
||||
class MockMixinExporter(mod.FromSqlalchemyToCsvMixin, FromWutta, mod.ToCsv):
|
||||
pass
|
||||
|
||||
|
||||
class TestFromSqlalchemyToCsvMixin(DataTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.setup_db()
|
||||
self.handler = ExportHandler(self.config)
|
||||
|
||||
def make_exporter(self, **kwargs):
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
return MockMixinExporter(self.config, **kwargs)
|
||||
|
||||
def test_model_title(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(source_model_class=model.Setting)
|
||||
|
||||
# default comes from model class
|
||||
self.assertEqual(exp.get_model_title(), "Setting")
|
||||
|
||||
# but can override
|
||||
exp.model_title = "Widget"
|
||||
self.assertEqual(exp.get_model_title(), "Widget")
|
||||
|
||||
def test_get_simple_fields(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(source_model_class=model.Setting)
|
||||
|
||||
# default comes from model class
|
||||
self.assertEqual(exp.get_simple_fields(), ["name", "value"])
|
||||
|
||||
# but can override
|
||||
exp.simple_fields = ["name"]
|
||||
self.assertEqual(exp.get_simple_fields(), ["name"])
|
||||
|
||||
# no default if no model class
|
||||
exp = self.make_exporter()
|
||||
self.assertEqual(exp.get_simple_fields(), [])
|
||||
|
||||
def test_normalize_source_object(self):
|
||||
model = self.app.model
|
||||
exp = self.make_exporter(source_model_class=model.Setting)
|
||||
setting = model.Setting(name="foo", value="bar")
|
||||
data = exp.normalize_source_object(setting)
|
||||
self.assertEqual(data, {"name": "foo", "value": "bar"})
|
||||
|
||||
def test_make_object(self):
|
||||
model = self.app.model
|
||||
|
||||
# normal
|
||||
exp = self.make_exporter(source_model_class=model.Setting)
|
||||
obj = exp.make_object()
|
||||
self.assertIsInstance(obj, model.Setting)
|
||||
|
||||
# no model_class
|
||||
exp = self.make_exporter()
|
||||
self.assertRaises(TypeError, exp.make_object)
|
||||
|
||||
|
||||
class MockMixinHandler(
|
||||
mod.FromSqlalchemyToCsvHandlerMixin, FromWuttaHandler, mod.ToCsvHandler
|
||||
):
|
||||
FromImporterBase = FromWutta
|
||||
|
||||
|
||||
class TestFromSqlalchemyToCsvHandlerMixin(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return MockMixinHandler(self.config, **kwargs)
|
||||
|
||||
def test_get_source_model(self):
|
||||
with patch.object(
|
||||
mod.FromSqlalchemyToCsvHandlerMixin, "define_importers", return_value={}
|
||||
):
|
||||
handler = self.make_handler()
|
||||
self.assertRaises(NotImplementedError, handler.get_source_model)
|
||||
|
||||
def test_define_importers(self):
|
||||
model = self.app.model
|
||||
with patch.object(
|
||||
mod.FromSqlalchemyToCsvHandlerMixin, "get_source_model", return_value=model
|
||||
):
|
||||
handler = self.make_handler()
|
||||
importers = handler.define_importers()
|
||||
self.assertIn("Setting", importers)
|
||||
self.assertTrue(issubclass(importers["Setting"], FromWutta))
|
||||
self.assertTrue(issubclass(importers["Setting"], mod.ToCsv))
|
||||
self.assertIn("User", importers)
|
||||
self.assertIn("Person", importers)
|
||||
self.assertIn("Role", importers)
|
||||
|
||||
def test_make_importer_factory(self):
|
||||
model = self.app.model
|
||||
with patch.object(
|
||||
mod.FromSqlalchemyToCsvHandlerMixin, "define_importers", return_value={}
|
||||
):
|
||||
handler = self.make_handler()
|
||||
factory = handler.make_importer_factory(model.Setting, "Setting")
|
||||
self.assertTrue(issubclass(factory, FromWutta))
|
||||
self.assertTrue(issubclass(factory, mod.ToCsv))
|
||||
|
||||
|
||||
class TestFromWuttaToCsv(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.FromWuttaToCsv(self.config, **kwargs)
|
||||
|
||||
def test_get_source_model(self):
|
||||
handler = self.make_handler()
|
||||
self.assertIs(handler.get_source_model(), self.app.model)
|
||||
4
tests/exporting/test_handlers.py
Normal file
4
tests/exporting/test_handlers.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
# nothing to test yet really, just ensuring coverage
|
||||
from wuttasync.exporting import handlers as mod
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
#-*- coding: utf-8; -*-
|
||||
# -*- coding: utf-8; -*-
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from sqlalchemy import orm
|
||||
|
||||
from wuttjamaican.testing import DataTestCase
|
||||
|
||||
from wuttasync.importing import base as mod, ImportHandler, Orientation
|
||||
|
|
@ -14,7 +16,7 @@ class TestImporter(DataTestCase):
|
|||
self.handler = ImportHandler(self.config)
|
||||
|
||||
def make_importer(self, **kwargs):
|
||||
kwargs.setdefault('handler', self.handler)
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
return mod.Importer(self.config, **kwargs)
|
||||
|
||||
def test_constructor(self):
|
||||
|
|
@ -24,132 +26,371 @@ class TestImporter(DataTestCase):
|
|||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
# fields
|
||||
self.assertEqual(imp.supported_fields, ['name', 'value'])
|
||||
self.assertEqual(imp.simple_fields, ['name', 'value'])
|
||||
self.assertEqual(imp.fields, ['name', 'value'])
|
||||
self.assertEqual(imp.fields, ["name", "value"])
|
||||
|
||||
# orientation etc.
|
||||
self.assertEqual(imp.orientation, Orientation.IMPORT)
|
||||
self.assertEqual(imp.actioning, 'importing')
|
||||
self.assertEqual(imp.actioning, "importing")
|
||||
self.assertTrue(imp.create)
|
||||
self.assertTrue(imp.update)
|
||||
self.assertTrue(imp.delete)
|
||||
self.assertFalse(imp.dry_run)
|
||||
|
||||
def test_constructor_fields(self):
|
||||
model = self.app.model
|
||||
|
||||
# basic importer
|
||||
imp = self.make_importer(model_class=model.Setting, fields="name")
|
||||
self.assertEqual(imp.fields, ["name"])
|
||||
|
||||
def test_constructor_excluded_fields(self):
|
||||
model = self.app.model
|
||||
|
||||
# basic importer
|
||||
imp = self.make_importer(model_class=model.Setting, excluded_fields="value")
|
||||
self.assertEqual(imp.fields, ["name"])
|
||||
|
||||
def test_get_model_title(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
self.assertEqual(imp.get_model_title(), 'Setting')
|
||||
self.assertEqual(imp.get_model_title(), "Setting")
|
||||
imp.model_title = "SeTtInG"
|
||||
self.assertEqual(imp.get_model_title(), 'SeTtInG')
|
||||
self.assertEqual(imp.get_model_title(), "SeTtInG")
|
||||
|
||||
def test_get_simple_fields(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
self.assertEqual(imp.get_simple_fields(), ['name', 'value'])
|
||||
imp.simple_fields = ['name']
|
||||
self.assertEqual(imp.get_simple_fields(), ['name'])
|
||||
self.assertEqual(imp.get_simple_fields(), ["name", "value"])
|
||||
imp.simple_fields = ["name"]
|
||||
self.assertEqual(imp.get_simple_fields(), ["name"])
|
||||
|
||||
def test_get_supported_fields(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
self.assertEqual(imp.get_supported_fields(), ['name', 'value'])
|
||||
imp.supported_fields = ['name']
|
||||
self.assertEqual(imp.get_supported_fields(), ['name'])
|
||||
self.assertEqual(imp.get_supported_fields(), ["name", "value"])
|
||||
imp.supported_fields = ["name"]
|
||||
self.assertEqual(imp.get_supported_fields(), ["name"])
|
||||
|
||||
def test_get_fields(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
self.assertEqual(imp.get_fields(), ['name', 'value'])
|
||||
imp.fields = ['name']
|
||||
self.assertEqual(imp.get_fields(), ['name'])
|
||||
self.assertEqual(imp.get_fields(), ["name", "value"])
|
||||
imp.fields = ["name"]
|
||||
self.assertEqual(imp.get_fields(), ["name"])
|
||||
|
||||
def test_get_keys(self):
|
||||
model = self.app.model
|
||||
|
||||
# nb. get_keys() will cache the return value, so must
|
||||
# re-create importer for each test
|
||||
|
||||
# keys inspected from model by default
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
self.assertEqual(imp.get_keys(), ['name'])
|
||||
imp.key = 'value'
|
||||
self.assertEqual(imp.get_keys(), ['value'])
|
||||
self.assertEqual(imp.get_keys(), ["name"])
|
||||
imp = self.make_importer(model_class=model.User)
|
||||
self.assertEqual(imp.get_keys(), ["uuid"])
|
||||
|
||||
# object dict may define 'keys'
|
||||
imp = self.make_importer(model_class=model.User)
|
||||
with patch.dict(imp.__dict__, keys=["foo", "bar"]):
|
||||
self.assertEqual(imp.get_keys(), ["foo", "bar"])
|
||||
|
||||
# class may define 'keys'
|
||||
with patch.object(mod.Importer, "keys", new=["foo", "baz"], create=True):
|
||||
imp = self.make_importer(model_class=model.User)
|
||||
self.assertEqual(imp.get_keys(), ["foo", "baz"])
|
||||
|
||||
# class may define 'key'
|
||||
with patch.object(mod.Importer, "key", new="whatever", create=True):
|
||||
imp = self.make_importer(model_class=model.User)
|
||||
self.assertEqual(imp.get_keys(), ["whatever"])
|
||||
|
||||
# class may define 'default_keys'
|
||||
with patch.object(mod.Importer, "default_keys", new=["baz", "foo"]):
|
||||
imp = self.make_importer(model_class=model.User)
|
||||
self.assertEqual(imp.get_keys(), ["baz", "foo"])
|
||||
|
||||
def test_process_data(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||
imp = self.make_importer(
|
||||
model_class=model.Setting, caches_target=True, delete=True
|
||||
)
|
||||
|
||||
# empty data set / just for coverage
|
||||
with patch.object(imp, 'normalize_source_data') as normalize_source_data:
|
||||
normalize_source_data.return_value = []
|
||||
def make_cache():
|
||||
setting1 = model.Setting(name="foo1", value="bar1")
|
||||
setting2 = model.Setting(name="foo2", value="bar2")
|
||||
setting3 = model.Setting(name="foo3", value="bar3")
|
||||
cache = {
|
||||
("foo1",): {
|
||||
"object": setting1,
|
||||
"data": {"name": "foo1", "value": "bar1"},
|
||||
},
|
||||
("foo2",): {
|
||||
"object": setting2,
|
||||
"data": {"name": "foo2", "value": "bar2"},
|
||||
},
|
||||
("foo3",): {
|
||||
"object": setting3,
|
||||
"data": {"name": "foo3", "value": "bar3"},
|
||||
},
|
||||
}
|
||||
return cache
|
||||
|
||||
with patch.object(imp, 'get_target_cache') as get_target_cache:
|
||||
get_target_cache.return_value = {}
|
||||
# nb. delete always succeeds
|
||||
with patch.object(imp, "delete_target_object", return_value=True):
|
||||
|
||||
result = imp.process_data()
|
||||
self.assertEqual(result, ([], [], []))
|
||||
# create + update + delete all as needed
|
||||
with patch.object(imp, "get_target_cache", return_value=make_cache()):
|
||||
created, updated, deleted = imp.process_data(
|
||||
[
|
||||
{"name": "foo3", "value": "BAR3"},
|
||||
{"name": "foo4", "value": "BAR4"},
|
||||
{"name": "foo5", "value": "BAR5"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 2)
|
||||
self.assertEqual(len(updated), 1)
|
||||
self.assertEqual(len(deleted), 2)
|
||||
|
||||
# same but with --max-total so delete gets skipped
|
||||
with patch.object(imp, "get_target_cache", return_value=make_cache()):
|
||||
with patch.object(imp, "max_total", new=3):
|
||||
created, updated, deleted = imp.process_data(
|
||||
[
|
||||
{"name": "foo3", "value": "BAR3"},
|
||||
{"name": "foo4", "value": "BAR4"},
|
||||
{"name": "foo5", "value": "BAR5"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 2)
|
||||
self.assertEqual(len(updated), 1)
|
||||
self.assertEqual(len(deleted), 0)
|
||||
|
||||
# delete all if source data empty
|
||||
with patch.object(imp, "get_target_cache", return_value=make_cache()):
|
||||
created, updated, deleted = imp.process_data()
|
||||
self.assertEqual(len(created), 0)
|
||||
self.assertEqual(len(updated), 0)
|
||||
self.assertEqual(len(deleted), 3)
|
||||
|
||||
def test_do_create_update(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||
|
||||
def make_cache():
|
||||
setting1 = model.Setting(name="foo1", value="bar1")
|
||||
setting2 = model.Setting(name="foo2", value="bar2")
|
||||
cache = {
|
||||
("foo1",): {
|
||||
"object": setting1,
|
||||
"data": {"name": "foo1", "value": "bar1"},
|
||||
},
|
||||
("foo2",): {
|
||||
"object": setting2,
|
||||
"data": {"name": "foo2", "value": "bar2"},
|
||||
},
|
||||
}
|
||||
return cache
|
||||
|
||||
# change nothing if data matches
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "bar1"},
|
||||
{"name": "foo2", "value": "bar2"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 0)
|
||||
self.assertEqual(len(updated), 0)
|
||||
|
||||
# update all as needed
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "BAR1"},
|
||||
{"name": "foo2", "value": "BAR2"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 0)
|
||||
self.assertEqual(len(updated), 2)
|
||||
|
||||
# update all, with --max-update
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_update=1):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "BAR1"},
|
||||
{"name": "foo2", "value": "BAR2"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 0)
|
||||
self.assertEqual(len(updated), 1)
|
||||
|
||||
# update all, with --max-total
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "BAR1"},
|
||||
{"name": "foo2", "value": "BAR2"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 0)
|
||||
self.assertEqual(len(updated), 1)
|
||||
|
||||
# create all as needed
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "bar1"},
|
||||
{"name": "foo2", "value": "bar2"},
|
||||
{"name": "foo3", "value": "BAR3"},
|
||||
{"name": "foo4", "value": "BAR4"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 2)
|
||||
self.assertEqual(len(updated), 0)
|
||||
|
||||
# what happens when create gets skipped
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||
with patch.object(imp, "create_target_object", return_value=None):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "bar1"},
|
||||
{"name": "foo2", "value": "bar2"},
|
||||
{"name": "foo3", "value": "BAR3"},
|
||||
{"name": "foo4", "value": "BAR4"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 0)
|
||||
self.assertEqual(len(updated), 0)
|
||||
|
||||
# create all, with --max-create
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_create=1):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "bar1"},
|
||||
{"name": "foo2", "value": "bar2"},
|
||||
{"name": "foo3", "value": "BAR3"},
|
||||
{"name": "foo4", "value": "BAR4"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 1)
|
||||
self.assertEqual(len(updated), 0)
|
||||
|
||||
# create all, with --max-total
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "bar1"},
|
||||
{"name": "foo2", "value": "bar2"},
|
||||
{"name": "foo3", "value": "BAR3"},
|
||||
{"name": "foo4", "value": "BAR4"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 1)
|
||||
self.assertEqual(len(updated), 0)
|
||||
|
||||
# create + update all as needed
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "BAR1"},
|
||||
{"name": "foo2", "value": "BAR2"},
|
||||
{"name": "foo3", "value": "BAR3"},
|
||||
{"name": "foo4", "value": "BAR4"},
|
||||
]
|
||||
)
|
||||
self.assertEqual(len(created), 2)
|
||||
self.assertEqual(len(updated), 2)
|
||||
|
||||
# create + update all, with --max-total
|
||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1):
|
||||
created, updated = imp.do_create_update(
|
||||
[
|
||||
{"name": "foo1", "value": "BAR1"},
|
||||
{"name": "foo2", "value": "BAR2"},
|
||||
{"name": "foo3", "value": "BAR3"},
|
||||
{"name": "foo4", "value": "BAR4"},
|
||||
]
|
||||
)
|
||||
# nb. foo1 is updated first
|
||||
self.assertEqual(len(created), 0)
|
||||
self.assertEqual(len(updated), 1)
|
||||
|
||||
def test_do_delete(self):
|
||||
model = self.app.model
|
||||
|
||||
# this requires a mock target cache
|
||||
setting1 = model.Setting(name="foo1", value="bar1")
|
||||
setting2 = model.Setting(name="foo2", value="bar2")
|
||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||
setting = model.Setting(name='foo', value='bar')
|
||||
imp.cached_target = {
|
||||
('foo',): {
|
||||
'object': setting,
|
||||
'data': {'name': 'foo', 'value': 'bar'},
|
||||
cache = {
|
||||
("foo1",): {
|
||||
"object": setting1,
|
||||
"data": {"name": "foo1", "value": "bar1"},
|
||||
},
|
||||
("foo2",): {
|
||||
"object": setting2,
|
||||
"data": {"name": "foo2", "value": "bar2"},
|
||||
},
|
||||
}
|
||||
|
||||
# will update the one record
|
||||
result = imp.do_create_update([{'name': 'foo', 'value': 'baz'}])
|
||||
self.assertIs(result[1][0][0], setting)
|
||||
self.assertEqual(result, ([], [(setting,
|
||||
# nb. target
|
||||
{'name': 'foo', 'value': 'bar'},
|
||||
# nb. source
|
||||
{'name': 'foo', 'value': 'baz'})]))
|
||||
self.assertEqual(setting.value, 'baz')
|
||||
with patch.object(imp, "delete_target_object") as delete_target_object:
|
||||
|
||||
# will create a new record
|
||||
result = imp.do_create_update([{'name': 'blah', 'value': 'zay'}])
|
||||
self.assertIsNot(result[0][0][0], setting)
|
||||
setting_new = result[0][0][0]
|
||||
self.assertEqual(result, ([(setting_new,
|
||||
# nb. source
|
||||
{'name': 'blah', 'value': 'zay'})],
|
||||
[]))
|
||||
self.assertEqual(setting_new.name, 'blah')
|
||||
self.assertEqual(setting_new.value, 'zay')
|
||||
# delete nothing if source has same keys
|
||||
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
||||
source_keys = set(imp.cached_target)
|
||||
result = imp.do_delete(source_keys)
|
||||
self.assertFalse(delete_target_object.called)
|
||||
self.assertEqual(result, [])
|
||||
|
||||
# but what if new record is *not* created
|
||||
with patch.object(imp, 'create_target_object', return_value=None):
|
||||
result = imp.do_create_update([{'name': 'another', 'value': 'one'}])
|
||||
self.assertEqual(result, ([], []))
|
||||
# delete both if source has no keys
|
||||
delete_target_object.reset_mock()
|
||||
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
||||
source_keys = set()
|
||||
result = imp.do_delete(source_keys)
|
||||
self.assertEqual(delete_target_object.call_count, 2)
|
||||
self.assertEqual(len(result), 2)
|
||||
|
||||
# def test_do_delete(self):
|
||||
# model = self.app.model
|
||||
# imp = self.make_importer(model_class=model.Setting)
|
||||
# delete just one if --max-delete was set
|
||||
delete_target_object.reset_mock()
|
||||
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
||||
source_keys = set()
|
||||
with patch.object(imp, "max_delete", new=1):
|
||||
result = imp.do_delete(source_keys)
|
||||
self.assertEqual(delete_target_object.call_count, 1)
|
||||
self.assertEqual(len(result), 1)
|
||||
|
||||
# delete just one if --max-total was set
|
||||
delete_target_object.reset_mock()
|
||||
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
||||
source_keys = set()
|
||||
with patch.object(imp, "max_total", new=1):
|
||||
result = imp.do_delete(source_keys)
|
||||
self.assertEqual(delete_target_object.call_count, 1)
|
||||
self.assertEqual(len(result), 1)
|
||||
|
||||
def test_get_record_key(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
record = {'name': 'foo', 'value': 'bar'}
|
||||
self.assertEqual(imp.get_record_key(record), ('foo',))
|
||||
imp.key = ('name', 'value')
|
||||
self.assertEqual(imp.get_record_key(record), ('foo', 'bar'))
|
||||
record = {"name": "foo", "value": "bar"}
|
||||
self.assertEqual(imp.get_record_key(record), ("foo",))
|
||||
imp.key = ("name", "value")
|
||||
self.assertEqual(imp.get_record_key(record), ("foo", "bar"))
|
||||
|
||||
def test_data_diffs(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
# 2 identical records
|
||||
rec1 = {'name': 'foo', 'value': 'bar'}
|
||||
rec2 = {'name': 'foo', 'value': 'bar'}
|
||||
rec1 = {"name": "foo", "value": "bar"}
|
||||
rec2 = {"name": "foo", "value": "bar"}
|
||||
result = imp.data_diffs(rec1, rec2)
|
||||
self.assertEqual(result, [])
|
||||
|
||||
# now they're different
|
||||
rec2['value'] = 'baz'
|
||||
rec2["value"] = "baz"
|
||||
result = imp.data_diffs(rec1, rec2)
|
||||
self.assertEqual(result, ['value'])
|
||||
self.assertEqual(result, ["value"])
|
||||
|
||||
def test_normalize_source_data(self):
|
||||
model = self.app.model
|
||||
|
|
@ -160,12 +401,28 @@ class TestImporter(DataTestCase):
|
|||
self.assertEqual(data, [])
|
||||
|
||||
# now with 1 record
|
||||
setting = model.Setting(name='foo', value='bar')
|
||||
setting = model.Setting(name="foo", value="bar")
|
||||
data = imp.normalize_source_data(source_objects=[setting])
|
||||
self.assertEqual(len(data), 1)
|
||||
# nb. default normalizer returns object as-is
|
||||
self.assertIs(data[0], setting)
|
||||
|
||||
def test_get_unique_data(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
setting1 = model.Setting(name="foo", value="bar1")
|
||||
setting2 = model.Setting(name="foo", value="bar2")
|
||||
|
||||
result = imp.get_unique_data([setting2, setting1])
|
||||
self.assertIsInstance(result, tuple)
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertIsInstance(result[0], list)
|
||||
self.assertEqual(len(result[0]), 1)
|
||||
self.assertIs(result[0][0], setting2) # nb. not setting1
|
||||
self.assertIsInstance(result[1], set)
|
||||
self.assertEqual(result[1], {("foo",)})
|
||||
|
||||
def test_get_source_objects(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
|
@ -174,11 +431,18 @@ class TestImporter(DataTestCase):
|
|||
def test_normalize_source_object_all(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
# normal
|
||||
setting = model.Setting()
|
||||
result = imp.normalize_source_object_all(setting)
|
||||
self.assertEqual(len(result), 1)
|
||||
self.assertIs(result[0], setting)
|
||||
|
||||
# unwanted (normalized is None)
|
||||
with patch.object(imp, "normalize_source_object", return_value=None):
|
||||
result = imp.normalize_source_object_all(setting)
|
||||
self.assertIsNone(result)
|
||||
|
||||
def test_normalize_source_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
|
@ -190,7 +454,7 @@ class TestImporter(DataTestCase):
|
|||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
with patch.object(imp, 'get_target_objects') as get_target_objects:
|
||||
with patch.object(imp, "get_target_objects") as get_target_objects:
|
||||
get_target_objects.return_value = []
|
||||
|
||||
# empty cache
|
||||
|
|
@ -198,16 +462,16 @@ class TestImporter(DataTestCase):
|
|||
self.assertEqual(cache, {})
|
||||
|
||||
# cache w/ one record
|
||||
setting = model.Setting(name='foo', value='bar')
|
||||
setting = model.Setting(name="foo", value="bar")
|
||||
get_target_objects.return_value = [setting]
|
||||
cache = imp.get_target_cache()
|
||||
self.assertEqual(len(cache), 1)
|
||||
self.assertIn(('foo',), cache)
|
||||
foo = cache[('foo',)]
|
||||
self.assertIn(("foo",), cache)
|
||||
foo = cache[("foo",)]
|
||||
self.assertEqual(len(foo), 2)
|
||||
self.assertEqual(set(foo), {'object', 'data'})
|
||||
self.assertIs(foo['object'], setting)
|
||||
self.assertEqual(foo['data'], {'name': 'foo', 'value': 'bar'})
|
||||
self.assertEqual(set(foo), {"object", "data"})
|
||||
self.assertIs(foo["object"], setting)
|
||||
self.assertEqual(foo["data"], {"name": "foo", "value": "bar"})
|
||||
|
||||
def test_get_target_objects(self):
|
||||
model = self.app.model
|
||||
|
|
@ -216,74 +480,122 @@ class TestImporter(DataTestCase):
|
|||
|
||||
def test_get_target_object(self):
|
||||
model = self.app.model
|
||||
setting = model.Setting(name='foo', value='bar')
|
||||
setting = model.Setting(name="foo", value="bar")
|
||||
|
||||
# nb. must mock up a target cache for this one
|
||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||
imp.cached_target = {
|
||||
('foo',): {
|
||||
'object': setting,
|
||||
'data': {'name': 'foo', 'value': 'bar'},
|
||||
("foo",): {
|
||||
"object": setting,
|
||||
"data": {"name": "foo", "value": "bar"},
|
||||
},
|
||||
}
|
||||
|
||||
# returns same object
|
||||
result = imp.get_target_object(('foo',))
|
||||
result = imp.get_target_object(("foo",))
|
||||
self.assertIs(result, setting)
|
||||
|
||||
# and one more time just for kicks
|
||||
result = imp.get_target_object(('foo',))
|
||||
result = imp.get_target_object(("foo",))
|
||||
self.assertIs(result, setting)
|
||||
|
||||
# but then not if cache flag is off
|
||||
imp.caches_target = False
|
||||
result = imp.get_target_object(('foo',))
|
||||
result = imp.get_target_object(("foo",))
|
||||
self.assertIsNone(result)
|
||||
|
||||
def test_normalize_target_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
setting = model.Setting(name='foo', value='bar')
|
||||
setting = model.Setting(name="foo", value="bar")
|
||||
data = imp.normalize_target_object(setting)
|
||||
self.assertEqual(data, {'name': 'foo', 'value': 'bar'})
|
||||
self.assertEqual(data, {"name": "foo", "value": "bar"})
|
||||
|
||||
def test_get_deletable_keys(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
# empty set by default (nb. no target cache)
|
||||
result = imp.get_deletable_keys()
|
||||
self.assertIsInstance(result, set)
|
||||
self.assertEqual(result, set())
|
||||
|
||||
setting = model.Setting(name="foo", value="bar")
|
||||
cache = {
|
||||
("foo",): {
|
||||
"object": setting,
|
||||
"data": {"name": "foo", "value": "bar"},
|
||||
},
|
||||
}
|
||||
|
||||
with patch.multiple(imp, create=True, caches_target=True, cached_target=cache):
|
||||
|
||||
# all are deletable by default
|
||||
result = imp.get_deletable_keys()
|
||||
self.assertEqual(result, {("foo",)})
|
||||
|
||||
# but some maybe can't be deleted
|
||||
with patch.object(imp, "can_delete_object", return_value=False):
|
||||
result = imp.get_deletable_keys()
|
||||
self.assertEqual(result, set())
|
||||
|
||||
def test_create_target_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
# basic
|
||||
setting = imp.create_target_object(('foo',), {'name': 'foo', 'value': 'bar'})
|
||||
setting = imp.create_target_object(("foo",), {"name": "foo", "value": "bar"})
|
||||
self.assertIsInstance(setting, model.Setting)
|
||||
self.assertEqual(setting.name, 'foo')
|
||||
self.assertEqual(setting.value, 'bar')
|
||||
self.assertEqual(setting.name, "foo")
|
||||
self.assertEqual(setting.value, "bar")
|
||||
|
||||
# will skip if magic delete flag is set
|
||||
setting = imp.create_target_object(('foo',), {'name': 'foo', 'value': 'bar',
|
||||
'__ignoreme__': True})
|
||||
setting = imp.create_target_object(
|
||||
("foo",), {"name": "foo", "value": "bar", "__ignoreme__": True}
|
||||
)
|
||||
self.assertIsNone(setting)
|
||||
|
||||
def test_make_empty_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
obj = imp.make_empty_object(('foo',))
|
||||
obj = imp.make_empty_object(("foo",))
|
||||
self.assertIsInstance(obj, model.Setting)
|
||||
self.assertEqual(obj.name, 'foo')
|
||||
self.assertEqual(obj.name, "foo")
|
||||
|
||||
def test_make_object(self):
|
||||
model = self.app.model
|
||||
|
||||
# normal
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
obj = imp.make_object()
|
||||
self.assertIsInstance(obj, model.Setting)
|
||||
|
||||
# no model_class
|
||||
imp = self.make_importer()
|
||||
self.assertRaises(AttributeError, imp.make_object)
|
||||
|
||||
def test_update_target_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
setting = model.Setting(name='foo')
|
||||
setting = model.Setting(name="foo")
|
||||
|
||||
# basic logic for updating *new* object
|
||||
obj = imp.update_target_object(setting, {'name': 'foo', 'value': 'bar'})
|
||||
obj = imp.update_target_object(setting, {"name": "foo", "value": "bar"})
|
||||
self.assertIs(obj, setting)
|
||||
self.assertEqual(setting.value, 'bar')
|
||||
self.assertEqual(setting.value, "bar")
|
||||
|
||||
def test_can_delete_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
setting = model.Setting(name="foo")
|
||||
self.assertTrue(imp.can_delete_object(setting))
|
||||
|
||||
def test_delete_target_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
setting = model.Setting(name="foo")
|
||||
# nb. default implementation always returns false
|
||||
self.assertFalse(imp.delete_target_object(setting))
|
||||
|
||||
|
||||
class TestFromFile(DataTestCase):
|
||||
|
|
@ -293,20 +605,20 @@ class TestFromFile(DataTestCase):
|
|||
self.handler = ImportHandler(self.config)
|
||||
|
||||
def make_importer(self, **kwargs):
|
||||
kwargs.setdefault('handler', self.handler)
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
return mod.FromFile(self.config, **kwargs)
|
||||
|
||||
def test_setup(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
with patch.object(imp, 'open_input_file') as open_input_file:
|
||||
with patch.object(imp, "open_input_file") as open_input_file:
|
||||
imp.setup()
|
||||
open_input_file.assert_called_once_with()
|
||||
|
||||
def test_teardown(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
with patch.object(imp, 'close_input_file') as close_input_file:
|
||||
with patch.object(imp, "close_input_file") as close_input_file:
|
||||
imp.teardown()
|
||||
close_input_file.assert_called_once_with()
|
||||
|
||||
|
|
@ -315,13 +627,13 @@ class TestFromFile(DataTestCase):
|
|||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
# path is guessed from dir+filename
|
||||
path = self.write_file('data.txt', '')
|
||||
path = self.write_file("data.txt", "")
|
||||
imp.input_file_dir = self.tempdir
|
||||
imp.input_file_name = 'data.txt'
|
||||
imp.input_file_name = "data.txt"
|
||||
self.assertEqual(imp.get_input_file_path(), path)
|
||||
|
||||
# path can be explicitly set
|
||||
path2 = self.write_file('data2.txt', '')
|
||||
path2 = self.write_file("data2.txt", "")
|
||||
imp.input_file_path = path2
|
||||
self.assertEqual(imp.get_input_file_path(), path2)
|
||||
|
||||
|
|
@ -344,8 +656,8 @@ class TestFromFile(DataTestCase):
|
|||
self.assertRaises(NotImplementedError, imp.get_input_file_name)
|
||||
|
||||
# name can be explicitly set
|
||||
imp.input_file_name = 'data.txt'
|
||||
self.assertEqual(imp.get_input_file_name(), 'data.txt')
|
||||
imp.input_file_name = "data.txt"
|
||||
self.assertEqual(imp.get_input_file_name(), "data.txt")
|
||||
|
||||
def test_open_input_file(self):
|
||||
model = self.app.model
|
||||
|
|
@ -356,14 +668,114 @@ class TestFromFile(DataTestCase):
|
|||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
path = self.write_file('data.txt', '')
|
||||
with open(path, 'rt') as f:
|
||||
path = self.write_file("data.txt", "")
|
||||
with open(path, "rt") as f:
|
||||
imp.input_file = f
|
||||
with patch.object(f, 'close') as close:
|
||||
with patch.object(f, "close") as close:
|
||||
imp.close_input_file()
|
||||
close.assert_called_once_with()
|
||||
|
||||
|
||||
class TestQueryWrapper(DataTestCase):
|
||||
|
||||
def test_basic(self):
|
||||
model = self.app.model
|
||||
|
||||
p1 = model.Person(full_name="John Doe")
|
||||
self.session.add(p1)
|
||||
p2 = model.Person(full_name="Jane Doe")
|
||||
self.session.add(p2)
|
||||
self.session.commit()
|
||||
|
||||
# cannot get count via len(query), must use query.count()
|
||||
query = self.session.query(model.Person)
|
||||
self.assertEqual(query.count(), 2)
|
||||
self.assertRaises(TypeError, len, query)
|
||||
|
||||
# but can use len(wrapper)
|
||||
wrapper = mod.QueryWrapper(query)
|
||||
self.assertEqual(len(wrapper), 2)
|
||||
|
||||
# iter(wrapper) should work too
|
||||
people = [p for p in wrapper]
|
||||
self.assertEqual(people, [p1, p2])
|
||||
people = [p for p in iter(wrapper)]
|
||||
self.assertEqual(people, [p1, p2])
|
||||
people = [p for p in list(wrapper)]
|
||||
self.assertEqual(people, [p1, p2])
|
||||
|
||||
|
||||
class TestFromSqlalchemy(DataTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.setup_db()
|
||||
self.handler = ImportHandler(self.config)
|
||||
|
||||
def make_importer(self, **kwargs):
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
return mod.FromSqlalchemy(self.config, **kwargs)
|
||||
|
||||
def test_get_source_query(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(
|
||||
source_model_class=model.Upgrade, source_session=self.session
|
||||
)
|
||||
query = imp.get_source_query()
|
||||
self.assertIsInstance(query, orm.Query)
|
||||
froms = query.selectable.get_final_froms()
|
||||
self.assertEqual(len(froms), 1)
|
||||
table = froms[0]
|
||||
self.assertEqual(table.name, "upgrade")
|
||||
|
||||
def test_get_source_objects(self):
|
||||
model = self.app.model
|
||||
|
||||
user1 = model.User(username="fred")
|
||||
self.session.add(user1)
|
||||
user2 = model.User(username="bettie")
|
||||
self.session.add(user2)
|
||||
self.session.commit()
|
||||
|
||||
imp = self.make_importer(
|
||||
source_model_class=model.User, source_session=self.session
|
||||
)
|
||||
result = imp.get_source_objects()
|
||||
self.assertIsInstance(result, mod.QueryWrapper)
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertEqual(list(result), [user1, user2])
|
||||
|
||||
|
||||
class TestFromSqlalchemyMirror(DataTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.setup_db()
|
||||
self.handler = ImportHandler(self.config)
|
||||
|
||||
def make_importer(self, **kwargs):
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
return mod.FromSqlalchemyMirror(self.config, **kwargs)
|
||||
|
||||
def test_source_model_class(self):
|
||||
model = self.app.model
|
||||
|
||||
# source_model_class will mirror model_class
|
||||
imp = self.make_importer(model_class=model.Upgrade)
|
||||
self.assertIs(imp.model_class, model.Upgrade)
|
||||
self.assertIs(imp.source_model_class, model.Upgrade)
|
||||
|
||||
def test_normalize_source_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Upgrade)
|
||||
upgrade = model.Upgrade()
|
||||
|
||||
# normalize_source_object() should invoke normalize_target_object()
|
||||
with patch.object(imp, "normalize_target_object") as normalize_target_object:
|
||||
normalize_target_object.return_value = 42
|
||||
result = imp.normalize_source_object(upgrade)
|
||||
self.assertEqual(result, 42)
|
||||
normalize_target_object.assert_called_once_with(upgrade)
|
||||
|
||||
|
||||
class TestToSqlalchemy(DataTestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
|
@ -371,67 +783,89 @@ class TestToSqlalchemy(DataTestCase):
|
|||
self.handler = ImportHandler(self.config)
|
||||
|
||||
def make_importer(self, **kwargs):
|
||||
kwargs.setdefault('handler', self.handler)
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
return mod.ToSqlalchemy(self.config, **kwargs)
|
||||
|
||||
def test_get_target_object(self):
|
||||
model = self.app.model
|
||||
setting = model.Setting(name='foo', value='bar')
|
||||
|
||||
# nb. must mock up a target cache for this one
|
||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||
imp.cached_target = {
|
||||
('foo',): {
|
||||
'object': setting,
|
||||
'data': {'name': 'foo', 'value': 'bar'},
|
||||
},
|
||||
}
|
||||
|
||||
# returns same object
|
||||
result = imp.get_target_object(('foo',))
|
||||
self.assertIs(result, setting)
|
||||
|
||||
# and one more time just for kicks
|
||||
result = imp.get_target_object(('foo',))
|
||||
self.assertIs(result, setting)
|
||||
|
||||
# now let's put a 2nd setting in the db
|
||||
setting2 = model.Setting(name='foo2', value='bar2')
|
||||
self.session.add(setting2)
|
||||
self.session.commit()
|
||||
|
||||
# then we should be able to fetch that via query
|
||||
imp.target_session = self.session
|
||||
result = imp.get_target_object(('foo2',))
|
||||
self.assertIsInstance(result, model.Setting)
|
||||
self.assertIs(result, setting2)
|
||||
|
||||
# but sometimes it will not be found
|
||||
result = imp.get_target_object(('foo3',))
|
||||
self.assertIsNone(result)
|
||||
|
||||
def test_create_target_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting, target_session=self.session)
|
||||
setting = model.Setting(name='foo', value='bar')
|
||||
|
||||
# new object is added to session
|
||||
setting = imp.create_target_object(('foo',), {'name': 'foo', 'value': 'bar'})
|
||||
self.assertIsInstance(setting, model.Setting)
|
||||
self.assertEqual(setting.name, 'foo')
|
||||
self.assertEqual(setting.value, 'bar')
|
||||
self.assertIn(setting, self.session)
|
||||
|
||||
def test_get_target_objects(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting, target_session=self.session)
|
||||
|
||||
setting1 = model.Setting(name='foo', value='bar')
|
||||
setting1 = model.Setting(name="foo", value="bar")
|
||||
self.session.add(setting1)
|
||||
setting2 = model.Setting(name='foo2', value='bar2')
|
||||
setting2 = model.Setting(name="foo2", value="bar2")
|
||||
self.session.add(setting2)
|
||||
self.session.commit()
|
||||
|
||||
result = imp.get_target_objects()
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertEqual(set(result), {setting1, setting2})
|
||||
|
||||
def test_get_target_object(self):
|
||||
model = self.app.model
|
||||
setting = model.Setting(name="foo", value="bar")
|
||||
|
||||
# nb. must mock up a target cache for this one
|
||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||
imp.cached_target = {
|
||||
("foo",): {
|
||||
"object": setting,
|
||||
"data": {"name": "foo", "value": "bar"},
|
||||
},
|
||||
}
|
||||
|
||||
# returns same object
|
||||
result = imp.get_target_object(("foo",))
|
||||
self.assertIs(result, setting)
|
||||
|
||||
# and one more time just for kicks
|
||||
result = imp.get_target_object(("foo",))
|
||||
self.assertIs(result, setting)
|
||||
|
||||
# now let's put a 2nd setting in the db
|
||||
setting2 = model.Setting(name="foo2", value="bar2")
|
||||
self.session.add(setting2)
|
||||
self.session.commit()
|
||||
|
||||
# nb. disable target cache
|
||||
with patch.multiple(
|
||||
imp, create=True, target_session=self.session, caches_target=False
|
||||
):
|
||||
|
||||
# now we should be able to fetch that via query
|
||||
result = imp.get_target_object(("foo2",))
|
||||
self.assertIsInstance(result, model.Setting)
|
||||
self.assertIs(result, setting2)
|
||||
|
||||
# but sometimes it will not be found
|
||||
result = imp.get_target_object(("foo3",))
|
||||
self.assertIsNone(result)
|
||||
|
||||
def test_create_target_object(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting, target_session=self.session)
|
||||
setting = model.Setting(name="foo", value="bar")
|
||||
|
||||
# normal; new object is added to session
|
||||
setting = imp.create_target_object(("foo",), {"name": "foo", "value": "bar"})
|
||||
self.assertIsInstance(setting, model.Setting)
|
||||
self.assertEqual(setting.name, "foo")
|
||||
self.assertEqual(setting.value, "bar")
|
||||
self.assertIn(setting, self.session)
|
||||
|
||||
# unwanted; parent class does not create the object
|
||||
with patch.object(mod.Importer, "create_target_object", return_value=None):
|
||||
setting = imp.create_target_object(
|
||||
("foo",), {"name": "foo", "value": "bar"}
|
||||
)
|
||||
self.assertIsNone(setting)
|
||||
|
||||
def test_delete_target_object(self):
|
||||
model = self.app.model
|
||||
|
||||
setting = model.Setting(name="foo", value="bar")
|
||||
self.session.add(setting)
|
||||
|
||||
self.assertEqual(self.session.query(model.Setting).count(), 1)
|
||||
imp = self.make_importer(model_class=model.Setting, target_session=self.session)
|
||||
imp.delete_target_object(setting)
|
||||
self.assertEqual(self.session.query(model.Setting).count(), 0)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,23 @@
|
|||
#-*- coding: utf-8; -*-
|
||||
# -*- coding: utf-8; -*-
|
||||
|
||||
import csv
|
||||
import datetime
|
||||
import decimal
|
||||
import uuid as _uuid
|
||||
from unittest import TestCase
|
||||
from unittest.mock import patch
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
|
||||
from wuttjamaican.testing import DataTestCase
|
||||
|
||||
from wuttasync.importing import csv as mod, ImportHandler, ToSqlalchemyHandler, ToSqlalchemy
|
||||
from wuttasync.importing import (
|
||||
csv as mod,
|
||||
ImportHandler,
|
||||
ToSqlalchemyHandler,
|
||||
ToSqlalchemy,
|
||||
)
|
||||
|
||||
|
||||
class TestFromCsv(DataTestCase):
|
||||
|
|
@ -14,8 +26,17 @@ class TestFromCsv(DataTestCase):
|
|||
self.setup_db()
|
||||
self.handler = ImportHandler(self.config)
|
||||
|
||||
self.data_path = self.write_file(
|
||||
"data.txt",
|
||||
"""\
|
||||
name,value
|
||||
foo,bar
|
||||
foo2,bar2
|
||||
""",
|
||||
)
|
||||
|
||||
def make_importer(self, **kwargs):
|
||||
kwargs.setdefault('handler', self.handler)
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
return mod.FromCsv(self.config, **kwargs)
|
||||
|
||||
def test_get_input_file_name(self):
|
||||
|
|
@ -23,85 +44,206 @@ class TestFromCsv(DataTestCase):
|
|||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
# name can be guessed
|
||||
self.assertEqual(imp.get_input_file_name(), 'Setting.csv')
|
||||
self.assertEqual(imp.get_input_file_name(), "Setting.csv")
|
||||
|
||||
# name can be explicitly set
|
||||
imp.input_file_name = 'data.txt'
|
||||
self.assertEqual(imp.get_input_file_name(), 'data.txt')
|
||||
imp.input_file_name = "data.txt"
|
||||
self.assertEqual(imp.get_input_file_name(), "data.txt")
|
||||
|
||||
def test_open_input_file(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
path = self.write_file('data.txt', '')
|
||||
imp.input_file_path = path
|
||||
# normal operation, input file includes all fields
|
||||
imp = self.make_importer(
|
||||
model_class=model.Setting, input_file_path=self.data_path
|
||||
)
|
||||
self.assertEqual(imp.fields, ["name", "value"])
|
||||
imp.open_input_file()
|
||||
self.assertEqual(imp.input_file.name, path)
|
||||
self.assertEqual(imp.input_file.name, self.data_path)
|
||||
self.assertIsInstance(imp.input_reader, csv.DictReader)
|
||||
self.assertEqual(imp.fields, ["name", "value"])
|
||||
imp.input_file.close()
|
||||
|
||||
# this file is missing a field, plus we'll pretend more are
|
||||
# supported - but should wind up with just the one field
|
||||
missing = self.write_file("missing.txt", "name")
|
||||
imp = self.make_importer(model_class=model.Setting, input_file_path=missing)
|
||||
imp.fields.extend(["lots", "more"])
|
||||
self.assertEqual(imp.fields, ["name", "value", "lots", "more"])
|
||||
imp.open_input_file()
|
||||
self.assertEqual(imp.fields, ["name"])
|
||||
imp.input_file.close()
|
||||
|
||||
# and what happens when no known fields are found
|
||||
bogus = self.write_file("bogus.txt", "blarg")
|
||||
imp = self.make_importer(model_class=model.Setting, input_file_path=bogus)
|
||||
self.assertEqual(imp.fields, ["name", "value"])
|
||||
self.assertRaises(ValueError, imp.open_input_file)
|
||||
|
||||
def test_close_input_file(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
path = self.write_file('data.txt', '')
|
||||
imp.input_file_path = path
|
||||
imp.input_file_path = self.data_path
|
||||
imp.open_input_file()
|
||||
imp.close_input_file()
|
||||
self.assertFalse(hasattr(imp, 'input_reader'))
|
||||
self.assertFalse(hasattr(imp, 'input_file'))
|
||||
self.assertIsNone(imp.input_reader)
|
||||
self.assertIsNone(imp.input_file)
|
||||
|
||||
def test_get_source_objects(self):
|
||||
model = self.app.model
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
|
||||
path = self.write_file('data.csv', """\
|
||||
name,value
|
||||
foo,bar
|
||||
foo2,bar2
|
||||
""")
|
||||
imp.input_file_path = path
|
||||
imp.input_file_path = self.data_path
|
||||
imp.open_input_file()
|
||||
objects = imp.get_source_objects()
|
||||
imp.close_input_file()
|
||||
self.assertEqual(len(objects), 2)
|
||||
self.assertEqual(objects[0], {'name': 'foo', 'value': 'bar'})
|
||||
self.assertEqual(objects[1], {'name': 'foo2', 'value': 'bar2'})
|
||||
self.assertEqual(objects[0], {"name": "foo", "value": "bar"})
|
||||
self.assertEqual(objects[1], {"name": "foo2", "value": "bar2"})
|
||||
|
||||
|
||||
class MockMixinHandler(mod.FromCsvToSqlalchemyMixin, ToSqlalchemyHandler):
|
||||
ToImporterBase = ToSqlalchemy
|
||||
class MockMixinImporter(mod.FromCsvToSqlalchemyMixin, mod.FromCsv, ToSqlalchemy):
|
||||
pass
|
||||
|
||||
|
||||
class TestFromCsvToSqlalchemyMixin(DataTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.setup_db()
|
||||
self.handler = ImportHandler(self.config)
|
||||
|
||||
def make_importer(self, **kwargs):
|
||||
kwargs.setdefault("handler", self.handler)
|
||||
return MockMixinImporter(self.config, **kwargs)
|
||||
|
||||
def test_constructor(self):
|
||||
model = self.app.model
|
||||
|
||||
# no coercers
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
self.assertEqual(imp.coercers, {})
|
||||
|
||||
# typical
|
||||
imp = self.make_importer(
|
||||
model_class=model.Upgrade, coercers=mod.make_coercers(model.Setting)
|
||||
)
|
||||
self.assertEqual(len(imp.coercers), 2)
|
||||
|
||||
def test_normalize_source_object(self):
|
||||
model = self.app.model
|
||||
|
||||
# no uuid keys
|
||||
imp = self.make_importer(model_class=model.Setting)
|
||||
result = imp.normalize_source_object({"name": "foo", "value": "bar"})
|
||||
self.assertEqual(result, {"name": "foo", "value": "bar"})
|
||||
|
||||
# source has proper UUID
|
||||
imp = self.make_importer(
|
||||
model_class=model.Upgrade,
|
||||
fields=["uuid", "description"],
|
||||
coercers=mod.make_coercers(model.Upgrade),
|
||||
)
|
||||
result = imp.normalize_source_object(
|
||||
{
|
||||
"uuid": "06753693-d892-77f0-8000-ce71bf7ebbba",
|
||||
"description": "testing",
|
||||
}
|
||||
)
|
||||
self.assertEqual(
|
||||
result,
|
||||
{
|
||||
"uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"),
|
||||
"description": "testing",
|
||||
},
|
||||
)
|
||||
|
||||
# source has string uuid
|
||||
imp = self.make_importer(
|
||||
model_class=model.Upgrade,
|
||||
fields=["uuid", "description"],
|
||||
coercers=mod.make_coercers(model.Upgrade),
|
||||
)
|
||||
result = imp.normalize_source_object(
|
||||
{"uuid": "06753693d89277f08000ce71bf7ebbba", "description": "testing"}
|
||||
)
|
||||
self.assertEqual(
|
||||
result,
|
||||
{
|
||||
"uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"),
|
||||
"description": "testing",
|
||||
},
|
||||
)
|
||||
|
||||
# source has boolean true/false
|
||||
imp = self.make_importer(
|
||||
model_class=model.Upgrade,
|
||||
fields=["uuid", "executing"],
|
||||
coercers=mod.make_coercers(model.Upgrade),
|
||||
)
|
||||
result = imp.normalize_source_object(
|
||||
{"uuid": "06753693d89277f08000ce71bf7ebbba", "executing": "True"}
|
||||
)
|
||||
self.assertEqual(
|
||||
result,
|
||||
{
|
||||
"uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"),
|
||||
"executing": True,
|
||||
},
|
||||
)
|
||||
result = imp.normalize_source_object(
|
||||
{"uuid": "06753693d89277f08000ce71bf7ebbba", "executing": "false"}
|
||||
)
|
||||
self.assertEqual(
|
||||
result,
|
||||
{
|
||||
"uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"),
|
||||
"executing": False,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class MockMixinHandler(mod.FromCsvToSqlalchemyHandlerMixin, ToSqlalchemyHandler):
|
||||
ToImporterBase = ToSqlalchemy
|
||||
|
||||
|
||||
class TestFromCsvToSqlalchemyHandlerMixin(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return MockMixinHandler(self.config, **kwargs)
|
||||
|
||||
def test_get_target_model(self):
|
||||
with patch.object(mod.FromCsvToSqlalchemyMixin, 'define_importers', return_value={}):
|
||||
with patch.object(
|
||||
mod.FromCsvToSqlalchemyHandlerMixin, "define_importers", return_value={}
|
||||
):
|
||||
handler = self.make_handler()
|
||||
self.assertRaises(NotImplementedError, handler.get_target_model)
|
||||
|
||||
def test_define_importers(self):
|
||||
model = self.app.model
|
||||
with patch.object(mod.FromCsvToSqlalchemyMixin, 'get_target_model', return_value=model):
|
||||
with patch.object(
|
||||
mod.FromCsvToSqlalchemyHandlerMixin, "get_target_model", return_value=model
|
||||
):
|
||||
handler = self.make_handler()
|
||||
importers = handler.define_importers()
|
||||
self.assertIn('Setting', importers)
|
||||
self.assertTrue(issubclass(importers['Setting'], mod.FromCsv))
|
||||
self.assertTrue(issubclass(importers['Setting'], ToSqlalchemy))
|
||||
self.assertIn('User', importers)
|
||||
self.assertIn('Person', importers)
|
||||
self.assertIn('Role', importers)
|
||||
self.assertIn("Setting", importers)
|
||||
self.assertTrue(issubclass(importers["Setting"], mod.FromCsv))
|
||||
self.assertTrue(issubclass(importers["Setting"], ToSqlalchemy))
|
||||
self.assertIn("User", importers)
|
||||
self.assertIn("Person", importers)
|
||||
self.assertIn("Role", importers)
|
||||
|
||||
def test_make_importer_factory(self):
|
||||
model = self.app.model
|
||||
with patch.object(mod.FromCsvToSqlalchemyMixin, 'define_importers', return_value={}):
|
||||
with patch.object(
|
||||
mod.FromCsvToSqlalchemyHandlerMixin, "define_importers", return_value={}
|
||||
):
|
||||
handler = self.make_handler()
|
||||
factory = handler.make_importer_factory(model.Setting, 'Setting')
|
||||
factory = handler.make_importer_factory(model.Setting, "Setting")
|
||||
self.assertTrue(issubclass(factory, mod.FromCsv))
|
||||
self.assertTrue(issubclass(factory, ToSqlalchemy))
|
||||
self.assertTrue(isinstance(factory.coercers, dict))
|
||||
|
||||
|
||||
class TestFromCsvToWutta(DataTestCase):
|
||||
|
|
@ -112,3 +254,203 @@ class TestFromCsvToWutta(DataTestCase):
|
|||
def test_get_target_model(self):
|
||||
handler = self.make_handler()
|
||||
self.assertIs(handler.get_target_model(), self.app.model)
|
||||
|
||||
|
||||
Base = orm.declarative_base()
|
||||
|
||||
|
||||
class Example(Base):
|
||||
__tablename__ = "example"
|
||||
|
||||
id = sa.Column(sa.Integer(), primary_key=True, nullable=False)
|
||||
optional_id = sa.Column(sa.Integer(), nullable=True)
|
||||
|
||||
name = sa.Column(sa.String(length=100), nullable=False)
|
||||
optional_name = sa.Column(sa.String(length=100), nullable=True)
|
||||
|
||||
flag = sa.Column(sa.Boolean(), nullable=False)
|
||||
optional_flag = sa.Column(sa.Boolean(), nullable=True)
|
||||
|
||||
date = sa.Column(sa.Date(), nullable=False)
|
||||
optional_date = sa.Column(sa.Date(), nullable=True)
|
||||
|
||||
dt = sa.Column(sa.DateTime(), nullable=False)
|
||||
optional_dt = sa.Column(sa.DateTime(), nullable=True)
|
||||
|
||||
dec = sa.Column(sa.Numeric(scale=8, precision=2), nullable=False)
|
||||
optional_dec = sa.Column(sa.Numeric(scale=8, precision=2), nullable=True)
|
||||
|
||||
flt = sa.Column(sa.Float(), nullable=False)
|
||||
optional_flt = sa.Column(sa.Float(), nullable=True)
|
||||
|
||||
|
||||
class TestMakeCoercers(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
coercers = mod.make_coercers(Example)
|
||||
self.assertEqual(len(coercers), 14)
|
||||
|
||||
self.assertIs(coercers["id"], mod.coerce_integer)
|
||||
self.assertIs(coercers["optional_id"], mod.coerce_integer)
|
||||
self.assertIs(coercers["name"], mod.coerce_noop)
|
||||
self.assertIs(coercers["optional_name"], mod.coerce_string_nullable)
|
||||
self.assertIs(coercers["flag"], mod.coerce_boolean)
|
||||
self.assertIs(coercers["optional_flag"], mod.coerce_boolean_nullable)
|
||||
self.assertIs(coercers["date"], mod.coerce_date)
|
||||
self.assertIs(coercers["optional_date"], mod.coerce_date)
|
||||
self.assertIs(coercers["dt"], mod.coerce_datetime)
|
||||
self.assertIs(coercers["optional_dt"], mod.coerce_datetime)
|
||||
self.assertIs(coercers["dec"], mod.coerce_decimal)
|
||||
self.assertIs(coercers["optional_dec"], mod.coerce_decimal)
|
||||
self.assertIs(coercers["flt"], mod.coerce_float)
|
||||
self.assertIs(coercers["optional_flt"], mod.coerce_float)
|
||||
|
||||
|
||||
class TestMakeCoercer(TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
func = mod.make_coercer(Example.id)
|
||||
self.assertIs(func, mod.coerce_integer)
|
||||
|
||||
func = mod.make_coercer(Example.optional_id)
|
||||
self.assertIs(func, mod.coerce_integer)
|
||||
|
||||
func = mod.make_coercer(Example.name)
|
||||
self.assertIs(func, mod.coerce_noop)
|
||||
|
||||
func = mod.make_coercer(Example.optional_name)
|
||||
self.assertIs(func, mod.coerce_string_nullable)
|
||||
|
||||
func = mod.make_coercer(Example.flag)
|
||||
self.assertIs(func, mod.coerce_boolean)
|
||||
|
||||
func = mod.make_coercer(Example.optional_flag)
|
||||
self.assertIs(func, mod.coerce_boolean_nullable)
|
||||
|
||||
func = mod.make_coercer(Example.date)
|
||||
self.assertIs(func, mod.coerce_date)
|
||||
|
||||
func = mod.make_coercer(Example.optional_date)
|
||||
self.assertIs(func, mod.coerce_date)
|
||||
|
||||
func = mod.make_coercer(Example.dt)
|
||||
self.assertIs(func, mod.coerce_datetime)
|
||||
|
||||
func = mod.make_coercer(Example.optional_dt)
|
||||
self.assertIs(func, mod.coerce_datetime)
|
||||
|
||||
func = mod.make_coercer(Example.dec)
|
||||
self.assertIs(func, mod.coerce_decimal)
|
||||
|
||||
func = mod.make_coercer(Example.optional_dec)
|
||||
self.assertIs(func, mod.coerce_decimal)
|
||||
|
||||
func = mod.make_coercer(Example.flt)
|
||||
self.assertIs(func, mod.coerce_float)
|
||||
|
||||
func = mod.make_coercer(Example.optional_flt)
|
||||
self.assertIs(func, mod.coerce_float)
|
||||
|
||||
|
||||
class TestCoercers(TestCase):
|
||||
|
||||
def test_coerce_boolean(self):
|
||||
self.assertTrue(mod.coerce_boolean("true"))
|
||||
self.assertTrue(mod.coerce_boolean("1"))
|
||||
self.assertTrue(mod.coerce_boolean("yes"))
|
||||
|
||||
self.assertFalse(mod.coerce_boolean("false"))
|
||||
self.assertFalse(mod.coerce_boolean("0"))
|
||||
self.assertFalse(mod.coerce_boolean("no"))
|
||||
|
||||
self.assertFalse(mod.coerce_boolean(""))
|
||||
|
||||
def test_coerce_boolean_nullable(self):
|
||||
self.assertTrue(mod.coerce_boolean_nullable("true"))
|
||||
self.assertTrue(mod.coerce_boolean_nullable("1"))
|
||||
self.assertTrue(mod.coerce_boolean_nullable("yes"))
|
||||
|
||||
self.assertFalse(mod.coerce_boolean_nullable("false"))
|
||||
self.assertFalse(mod.coerce_boolean_nullable("0"))
|
||||
self.assertFalse(mod.coerce_boolean_nullable("no"))
|
||||
|
||||
self.assertIsNone(mod.coerce_boolean_nullable(""))
|
||||
|
||||
def test_coerce_date(self):
|
||||
self.assertIsNone(mod.coerce_date(""))
|
||||
|
||||
value = mod.coerce_date("2025-10-19")
|
||||
self.assertIsInstance(value, datetime.date)
|
||||
self.assertEqual(value, datetime.date(2025, 10, 19))
|
||||
|
||||
self.assertRaises(ValueError, mod.coerce_date, "XXX")
|
||||
|
||||
def test_coerce_datetime(self):
|
||||
self.assertIsNone(mod.coerce_datetime(""))
|
||||
|
||||
value = mod.coerce_datetime("2025-10-19 20:56:00")
|
||||
self.assertIsInstance(value, datetime.datetime)
|
||||
self.assertEqual(value, datetime.datetime(2025, 10, 19, 20, 56))
|
||||
|
||||
value = mod.coerce_datetime("2025-10-19 20:56:00.1234")
|
||||
self.assertIsInstance(value, datetime.datetime)
|
||||
self.assertEqual(value, datetime.datetime(2025, 10, 19, 20, 56, 0, 123400))
|
||||
|
||||
self.assertRaises(ValueError, mod.coerce_datetime, "XXX")
|
||||
|
||||
def test_coerce_decimal(self):
|
||||
self.assertIsNone(mod.coerce_decimal(""))
|
||||
|
||||
value = mod.coerce_decimal("42")
|
||||
self.assertIsInstance(value, decimal.Decimal)
|
||||
self.assertEqual(value, decimal.Decimal("42.0"))
|
||||
self.assertEqual(value, 42)
|
||||
|
||||
value = mod.coerce_decimal("42.0")
|
||||
self.assertIsInstance(value, decimal.Decimal)
|
||||
self.assertEqual(value, decimal.Decimal("42.0"))
|
||||
self.assertEqual(value, 42)
|
||||
|
||||
self.assertRaises(decimal.InvalidOperation, mod.coerce_decimal, "XXX")
|
||||
|
||||
def test_coerce_float(self):
|
||||
self.assertEqual(mod.coerce_float("42"), 42.0)
|
||||
self.assertEqual(mod.coerce_float("42.0"), 42.0)
|
||||
|
||||
self.assertIsNone(mod.coerce_float(""))
|
||||
|
||||
self.assertRaises(ValueError, mod.coerce_float, "XXX")
|
||||
|
||||
def test_coerce_integer(self):
|
||||
self.assertEqual(mod.coerce_integer("42"), 42)
|
||||
self.assertRaises(ValueError, mod.coerce_integer, "42.0")
|
||||
|
||||
self.assertIsNone(mod.coerce_integer(""))
|
||||
|
||||
self.assertRaises(ValueError, mod.coerce_integer, "XXX")
|
||||
|
||||
def test_coerce_noop(self):
|
||||
self.assertEqual(mod.coerce_noop(""), "")
|
||||
|
||||
self.assertEqual(mod.coerce_noop("42"), "42")
|
||||
self.assertEqual(mod.coerce_noop("XXX"), "XXX")
|
||||
|
||||
def test_coerce_string_nullable(self):
|
||||
self.assertIsNone(mod.coerce_string_nullable(""))
|
||||
|
||||
self.assertEqual(mod.coerce_string_nullable("42"), "42")
|
||||
self.assertEqual(mod.coerce_string_nullable("XXX"), "XXX")
|
||||
|
||||
def test_coerce_uuid(self):
|
||||
self.assertIsNone(mod.coerce_uuid(""))
|
||||
|
||||
uuid = mod.coerce_uuid("06753693d89277f08000ce71bf7ebbba")
|
||||
self.assertIsInstance(uuid, _uuid.UUID)
|
||||
self.assertEqual(uuid, _uuid.UUID("06753693d89277f08000ce71bf7ebbba"))
|
||||
self.assertEqual(uuid.hex, "06753693d89277f08000ce71bf7ebbba")
|
||||
|
||||
uuid = mod.coerce_uuid("06753693-d892-77f0-8000-ce71bf7ebbba")
|
||||
self.assertIsInstance(uuid, _uuid.UUID)
|
||||
self.assertEqual(uuid, _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"))
|
||||
self.assertEqual(str(uuid), "06753693-d892-77f0-8000-ce71bf7ebbba")
|
||||
self.assertEqual(uuid.hex, "06753693d89277f08000ce71bf7ebbba")
|
||||
|
|
|
|||
|
|
@ -1,11 +1,18 @@
|
|||
#-*- coding: utf-8; -*-
|
||||
# -*- coding: utf-8; -*-
|
||||
|
||||
from collections import OrderedDict
|
||||
from unittest.mock import patch
|
||||
from uuid import UUID
|
||||
|
||||
from wuttjamaican.testing import DataTestCase
|
||||
|
||||
from wuttasync.importing import handlers as mod, Importer, ToSqlalchemy
|
||||
from wuttasync.importing.wutta import FromWuttaToWuttaImport
|
||||
|
||||
|
||||
class FromFooToBar(mod.ImportHandler):
|
||||
source_key = "foo"
|
||||
target_key = "bar"
|
||||
|
||||
|
||||
class TestImportHandler(DataTestCase):
|
||||
|
|
@ -13,38 +20,58 @@ class TestImportHandler(DataTestCase):
|
|||
def make_handler(self, **kwargs):
|
||||
return mod.ImportHandler(self.config, **kwargs)
|
||||
|
||||
def test_constructor(self):
|
||||
|
||||
# attr missing by default
|
||||
handler = self.make_handler()
|
||||
self.assertFalse(hasattr(handler, "some_foo_attr"))
|
||||
|
||||
# but constructor can set it
|
||||
handler = self.make_handler(some_foo_attr="bar")
|
||||
self.assertTrue(hasattr(handler, "some_foo_attr"))
|
||||
self.assertEqual(handler.some_foo_attr, "bar")
|
||||
|
||||
def test_str(self):
|
||||
handler = self.make_handler()
|
||||
self.assertEqual(str(handler), "None → None")
|
||||
|
||||
handler.source_title = 'CSV'
|
||||
handler.target_title = 'Wutta'
|
||||
handler.source_title = "CSV"
|
||||
handler.target_title = "Wutta"
|
||||
self.assertEqual(str(handler), "CSV → Wutta")
|
||||
|
||||
def test_actioner(self):
|
||||
handler = self.make_handler()
|
||||
self.assertEqual(handler.actioner, "importer")
|
||||
|
||||
handler.orientation = mod.Orientation.EXPORT
|
||||
self.assertEqual(handler.actioner, "exporter")
|
||||
|
||||
def test_actioning(self):
|
||||
handler = self.make_handler()
|
||||
self.assertEqual(handler.actioning, 'importing')
|
||||
self.assertEqual(handler.actioning, "importing")
|
||||
|
||||
handler.orientation = mod.Orientation.EXPORT
|
||||
self.assertEqual(handler.actioning, 'exporting')
|
||||
self.assertEqual(handler.actioning, "exporting")
|
||||
|
||||
def test_get_key(self):
|
||||
handler = self.make_handler()
|
||||
self.assertEqual(handler.get_key(), 'to_None.from_None.import')
|
||||
self.assertEqual(handler.get_key(), "import.to_None.from_None")
|
||||
|
||||
with patch.multiple(mod.ImportHandler, source_key='csv', target_key='wutta'):
|
||||
self.assertEqual(handler.get_key(), 'to_wutta.from_csv.import')
|
||||
with patch.multiple(mod.ImportHandler, source_key="csv", target_key="wutta"):
|
||||
self.assertEqual(handler.get_key(), "import.to_wutta.from_csv")
|
||||
|
||||
def test_get_spec(self):
|
||||
handler = self.make_handler()
|
||||
self.assertEqual(handler.get_spec(), 'wuttasync.importing.handlers:ImportHandler')
|
||||
self.assertEqual(
|
||||
handler.get_spec(), "wuttasync.importing.handlers:ImportHandler"
|
||||
)
|
||||
|
||||
def test_get_title(self):
|
||||
handler = self.make_handler()
|
||||
self.assertEqual(handler.get_title(), "None → None")
|
||||
|
||||
handler.source_title = 'CSV'
|
||||
handler.target_title = 'Wutta'
|
||||
handler.source_title = "CSV"
|
||||
handler.target_title = "Wutta"
|
||||
self.assertEqual(handler.get_title(), "CSV → Wutta")
|
||||
|
||||
def test_get_source_title(self):
|
||||
|
|
@ -54,16 +81,16 @@ class TestImportHandler(DataTestCase):
|
|||
self.assertIsNone(handler.get_source_title())
|
||||
|
||||
# which is really using source_key as fallback
|
||||
handler.source_key = 'csv'
|
||||
self.assertEqual(handler.get_source_title(), 'csv')
|
||||
handler.source_key = "csv"
|
||||
self.assertEqual(handler.get_source_title(), "csv")
|
||||
|
||||
# can also use (defined) generic fallback
|
||||
handler.generic_source_title = 'CSV'
|
||||
self.assertEqual(handler.get_source_title(), 'CSV')
|
||||
handler.generic_source_title = "CSV"
|
||||
self.assertEqual(handler.get_source_title(), "CSV")
|
||||
|
||||
# or can set explicitly
|
||||
handler.source_title = 'XXX'
|
||||
self.assertEqual(handler.get_source_title(), 'XXX')
|
||||
handler.source_title = "XXX"
|
||||
self.assertEqual(handler.get_source_title(), "XXX")
|
||||
|
||||
def test_get_target_title(self):
|
||||
handler = self.make_handler()
|
||||
|
|
@ -72,23 +99,23 @@ class TestImportHandler(DataTestCase):
|
|||
self.assertIsNone(handler.get_target_title())
|
||||
|
||||
# which is really using target_key as fallback
|
||||
handler.target_key = 'wutta'
|
||||
self.assertEqual(handler.get_target_title(), 'wutta')
|
||||
handler.target_key = "wutta"
|
||||
self.assertEqual(handler.get_target_title(), "wutta")
|
||||
|
||||
# can also use (defined) generic fallback
|
||||
handler.generic_target_title = 'Wutta'
|
||||
self.assertEqual(handler.get_target_title(), 'Wutta')
|
||||
handler.generic_target_title = "Wutta"
|
||||
self.assertEqual(handler.get_target_title(), "Wutta")
|
||||
|
||||
# or can set explicitly
|
||||
handler.target_title = 'XXX'
|
||||
self.assertEqual(handler.get_target_title(), 'XXX')
|
||||
handler.target_title = "XXX"
|
||||
self.assertEqual(handler.get_target_title(), "XXX")
|
||||
|
||||
def test_process_data(self):
|
||||
model = self.app.model
|
||||
handler = self.make_handler()
|
||||
|
||||
# empy/no-op should commit (not fail)
|
||||
with patch.object(handler, 'commit_transaction') as commit_transaction:
|
||||
with patch.object(handler, "commit_transaction") as commit_transaction:
|
||||
handler.process_data()
|
||||
commit_transaction.assert_called_once_with()
|
||||
|
||||
|
|
@ -96,8 +123,8 @@ class TestImportHandler(DataTestCase):
|
|||
handler.process_data()
|
||||
|
||||
# dry-run should rollback
|
||||
with patch.object(handler, 'commit_transaction') as commit_transaction:
|
||||
with patch.object(handler, 'rollback_transaction') as rollback_transaction:
|
||||
with patch.object(handler, "commit_transaction") as commit_transaction:
|
||||
with patch.object(handler, "rollback_transaction") as rollback_transaction:
|
||||
handler.process_data(dry_run=True)
|
||||
self.assertFalse(commit_transaction.called)
|
||||
rollback_transaction.assert_called_once_with()
|
||||
|
|
@ -106,36 +133,38 @@ class TestImportHandler(DataTestCase):
|
|||
handler.process_data(dry_run=True)
|
||||
|
||||
# outright error should cause rollback
|
||||
with patch.object(handler, 'commit_transaction') as commit_transaction:
|
||||
with patch.object(handler, 'rollback_transaction') as rollback_transaction:
|
||||
with patch.object(handler, 'get_importer', side_effect=RuntimeError):
|
||||
self.assertRaises(RuntimeError, handler.process_data, 'BlahBlah')
|
||||
with patch.object(handler, "commit_transaction") as commit_transaction:
|
||||
with patch.object(handler, "rollback_transaction") as rollback_transaction:
|
||||
with patch.object(handler, "get_importer", side_effect=RuntimeError):
|
||||
self.assertRaises(RuntimeError, handler.process_data, "BlahBlah")
|
||||
self.assertFalse(commit_transaction.called)
|
||||
rollback_transaction.assert_called_once_with()
|
||||
|
||||
# fake importer class/data
|
||||
mock_source_objects = [{'name': 'foo', 'value': 'bar'}]
|
||||
mock_source_objects = [{"name": "foo", "value": "bar"}]
|
||||
|
||||
class SettingImporter(ToSqlalchemy):
|
||||
model_class = model.Setting
|
||||
target_session = self.session
|
||||
|
||||
def get_source_objects(self):
|
||||
return mock_source_objects
|
||||
|
||||
# now for a "normal" one
|
||||
handler.importers['Setting'] = SettingImporter
|
||||
handler.importers["Setting"] = SettingImporter
|
||||
self.assertEqual(self.session.query(model.Setting).count(), 0)
|
||||
handler.process_data('Setting')
|
||||
handler.process_data("Setting")
|
||||
self.assertEqual(self.session.query(model.Setting).count(), 1)
|
||||
|
||||
# then add another mock record
|
||||
mock_source_objects.append({'name': 'foo2', 'value': 'bar2'})
|
||||
handler.process_data('Setting')
|
||||
mock_source_objects.append({"name": "foo2", "value": "bar2"})
|
||||
handler.process_data("Setting")
|
||||
self.assertEqual(self.session.query(model.Setting).count(), 2)
|
||||
|
||||
# nb. even if dry-run, record is added
|
||||
# (rollback would happen later in that case)
|
||||
mock_source_objects.append({'name': 'foo3', 'value': 'bar3'})
|
||||
handler.process_data('Setting', dry_run=True)
|
||||
mock_source_objects.append({"name": "foo3", "value": "bar3"})
|
||||
handler.process_data("Setting", dry_run=True)
|
||||
self.assertEqual(self.session.query(model.Setting).count(), 3)
|
||||
|
||||
def test_consume_kwargs(self):
|
||||
|
|
@ -145,15 +174,57 @@ class TestImportHandler(DataTestCase):
|
|||
kw = {}
|
||||
result = handler.consume_kwargs(kw)
|
||||
self.assertIs(result, kw)
|
||||
self.assertEqual(result, {})
|
||||
|
||||
# captures dry-run flag
|
||||
# dry_run (not consumed)
|
||||
self.assertFalse(handler.dry_run)
|
||||
kw['dry_run'] = True
|
||||
kw["dry_run"] = True
|
||||
result = handler.consume_kwargs(kw)
|
||||
self.assertIs(result, kw)
|
||||
self.assertTrue(kw['dry_run'])
|
||||
self.assertIn("dry_run", kw)
|
||||
self.assertTrue(kw["dry_run"])
|
||||
self.assertTrue(handler.dry_run)
|
||||
|
||||
# warnings (consumed)
|
||||
self.assertFalse(handler.warnings)
|
||||
kw["warnings"] = True
|
||||
result = handler.consume_kwargs(kw)
|
||||
self.assertIs(result, kw)
|
||||
self.assertNotIn("warnings", kw)
|
||||
self.assertTrue(handler.warnings)
|
||||
|
||||
# warnings_recipients (consumed)
|
||||
self.assertIsNone(handler.warnings_recipients)
|
||||
kw["warnings_recipients"] = "bob@example.com"
|
||||
result = handler.consume_kwargs(kw)
|
||||
self.assertIs(result, kw)
|
||||
self.assertNotIn("warnings_recipients", kw)
|
||||
self.assertEqual(handler.warnings_recipients, ["bob@example.com"])
|
||||
|
||||
# warnings_max_diffs (consumed)
|
||||
self.assertEqual(handler.warnings_max_diffs, 15)
|
||||
kw["warnings_max_diffs"] = 30
|
||||
result = handler.consume_kwargs(kw)
|
||||
self.assertIs(result, kw)
|
||||
self.assertNotIn("warnings_max_diffs", kw)
|
||||
self.assertEqual(handler.warnings_max_diffs, 30)
|
||||
|
||||
# runas_username (consumed)
|
||||
self.assertIsNone(handler.runas_username)
|
||||
kw["runas_username"] = "fred"
|
||||
result = handler.consume_kwargs(kw)
|
||||
self.assertIs(result, kw)
|
||||
self.assertNotIn("runas_username", kw)
|
||||
self.assertEqual(handler.runas_username, "fred")
|
||||
|
||||
# transaction_comment (consumed)
|
||||
self.assertIsNone(handler.transaction_comment)
|
||||
kw["transaction_comment"] = "hello world"
|
||||
result = handler.consume_kwargs(kw)
|
||||
self.assertIs(result, kw)
|
||||
self.assertNotIn("transaction_comment", kw)
|
||||
self.assertEqual(handler.transaction_comment, "hello world")
|
||||
|
||||
def test_define_importers(self):
|
||||
handler = self.make_handler()
|
||||
importers = handler.define_importers()
|
||||
|
|
@ -165,12 +236,246 @@ class TestImportHandler(DataTestCase):
|
|||
handler = self.make_handler()
|
||||
|
||||
# normal
|
||||
handler.importers['Setting'] = Importer
|
||||
importer = handler.get_importer('Setting', model_class=model.Setting)
|
||||
handler.importers["Setting"] = Importer
|
||||
importer = handler.get_importer("Setting", model_class=model.Setting)
|
||||
self.assertIsInstance(importer, Importer)
|
||||
|
||||
# specifying empty keys
|
||||
handler.importers["Setting"] = Importer
|
||||
importer = handler.get_importer("Setting", model_class=model.Setting, keys=None)
|
||||
self.assertIsInstance(importer, Importer)
|
||||
importer = handler.get_importer("Setting", model_class=model.Setting, keys="")
|
||||
self.assertIsInstance(importer, Importer)
|
||||
importer = handler.get_importer("Setting", model_class=model.Setting, keys=[])
|
||||
self.assertIsInstance(importer, Importer)
|
||||
|
||||
# key not found
|
||||
self.assertRaises(KeyError, handler.get_importer, 'BunchOfNonsense', model_class=model.Setting)
|
||||
self.assertRaises(
|
||||
KeyError, handler.get_importer, "BunchOfNonsense", model_class=model.Setting
|
||||
)
|
||||
|
||||
def test_is_default(self):
|
||||
handler = self.make_handler()
|
||||
# nb. anything is considered default, by default
|
||||
self.assertTrue(handler.is_default("there_is_no_way_this_is_valid"))
|
||||
|
||||
def test_get_default_importer_keys(self):
|
||||
|
||||
# use handler which already has some non/default keys
|
||||
handler = FromWuttaToWuttaImport(self.config)
|
||||
|
||||
# it supports many importers
|
||||
self.assertIn("Person", handler.importers)
|
||||
self.assertIn("User", handler.importers)
|
||||
self.assertIn("Setting", handler.importers)
|
||||
|
||||
# but only Person is default
|
||||
keys = handler.get_default_importer_keys()
|
||||
self.assertEqual(keys, ["Person"])
|
||||
|
||||
def test_get_warnings_email_key(self):
|
||||
handler = FromFooToBar(self.config)
|
||||
|
||||
# default
|
||||
key = handler.get_warnings_email_key()
|
||||
self.assertEqual(key, "import_to_bar_from_foo_warning")
|
||||
|
||||
# override
|
||||
handler.warnings_email_key = "from_foo_to_bar"
|
||||
key = handler.get_warnings_email_key()
|
||||
self.assertEqual(key, "from_foo_to_bar")
|
||||
|
||||
def test_process_changes(self):
|
||||
model = self.app.model
|
||||
handler = self.make_handler()
|
||||
email_handler = self.app.get_email_handler()
|
||||
|
||||
handler.process_started = self.app.localtime()
|
||||
|
||||
alice = model.User(username="alice")
|
||||
bob = model.User(username="bob")
|
||||
charlie = model.User(username="charlie")
|
||||
|
||||
changes = {
|
||||
"User": (
|
||||
[
|
||||
(
|
||||
alice,
|
||||
{
|
||||
"uuid": UUID("06946d64-1ebf-79db-8000-ce40345044fe"),
|
||||
"username": "alice",
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
(
|
||||
bob,
|
||||
{
|
||||
"uuid": UUID("06946d64-1ebf-7a8c-8000-05d78792b084"),
|
||||
"username": "bob",
|
||||
},
|
||||
{
|
||||
"uuid": UUID("06946d64-1ebf-7a8c-8000-05d78792b084"),
|
||||
"username": "bobbie",
|
||||
},
|
||||
),
|
||||
],
|
||||
[
|
||||
(
|
||||
charlie,
|
||||
{
|
||||
"uuid": UUID("06946d64-1ebf-7ad4-8000-1ba52f720c48"),
|
||||
"username": "charlie",
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
}
|
||||
|
||||
# no email if not in warnings mode
|
||||
self.assertFalse(handler.warnings)
|
||||
with patch.object(self.app, "send_email") as send_email:
|
||||
handler.process_changes(changes)
|
||||
send_email.assert_not_called()
|
||||
|
||||
# email sent (to default recip) if in warnings mode
|
||||
handler.warnings = True
|
||||
self.config.setdefault("wutta.email.default.to", "admin@example.com")
|
||||
with patch.object(email_handler, "deliver_message") as deliver_message:
|
||||
handler.process_changes(changes)
|
||||
deliver_message.assert_called_once()
|
||||
args, kwargs = deliver_message.call_args
|
||||
self.assertEqual(kwargs, {"recips": None})
|
||||
self.assertEqual(len(args), 1)
|
||||
msg = args[0]
|
||||
self.assertEqual(msg.to, ["admin@example.com"])
|
||||
|
||||
# can override email recip
|
||||
handler.warnings_recipients = ["bob@example.com"]
|
||||
with patch.object(email_handler, "deliver_message") as deliver_message:
|
||||
handler.process_changes(changes)
|
||||
deliver_message.assert_called_once()
|
||||
args, kwargs = deliver_message.call_args
|
||||
self.assertEqual(kwargs, {"recips": None})
|
||||
self.assertEqual(len(args), 1)
|
||||
msg = args[0]
|
||||
self.assertEqual(msg.to, ["bob@example.com"])
|
||||
|
||||
|
||||
class TestFromFileHandler(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.FromFileHandler(self.config, **kwargs)
|
||||
|
||||
def test_process_data(self):
|
||||
handler = self.make_handler()
|
||||
path = self.write_file("data.txt", "")
|
||||
with patch.object(mod.ImportHandler, "process_data") as process_data:
|
||||
|
||||
# bare
|
||||
handler.process_data()
|
||||
process_data.assert_called_once_with()
|
||||
|
||||
# with file path
|
||||
process_data.reset_mock()
|
||||
handler.process_data(input_file_path=path)
|
||||
process_data.assert_called_once_with(input_file_path=path)
|
||||
|
||||
# with folder
|
||||
process_data.reset_mock()
|
||||
handler.process_data(input_file_path=self.tempdir)
|
||||
process_data.assert_called_once_with(input_file_dir=self.tempdir)
|
||||
|
||||
|
||||
class TestFromSqlalchemyHandler(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.FromSqlalchemyHandler(self.config, **kwargs)
|
||||
|
||||
def test_make_source_session(self):
|
||||
handler = self.make_handler()
|
||||
self.assertRaises(NotImplementedError, handler.make_source_session)
|
||||
|
||||
def test_begin_source_transaction(self):
|
||||
handler = self.make_handler()
|
||||
self.assertIsNone(handler.source_session)
|
||||
with patch.object(handler, "make_source_session", return_value=self.session):
|
||||
handler.begin_source_transaction()
|
||||
self.assertIs(handler.source_session, self.session)
|
||||
|
||||
def test_commit_source_transaction(self):
|
||||
model = self.app.model
|
||||
handler = self.make_handler()
|
||||
handler.source_session = self.session
|
||||
self.assertEqual(self.session.query(model.User).count(), 0)
|
||||
|
||||
# nb. do not commit this yet
|
||||
user = model.User(username="fred")
|
||||
self.session.add(user)
|
||||
|
||||
self.assertTrue(self.session.in_transaction())
|
||||
self.assertIn(user, self.session)
|
||||
handler.commit_source_transaction()
|
||||
self.assertIsNone(handler.source_session)
|
||||
self.assertFalse(self.session.in_transaction())
|
||||
self.assertNotIn(user, self.session) # hm, surprising?
|
||||
self.assertEqual(self.session.query(model.User).count(), 1)
|
||||
|
||||
def test_rollback_source_transaction(self):
|
||||
model = self.app.model
|
||||
handler = self.make_handler()
|
||||
handler.source_session = self.session
|
||||
self.assertEqual(self.session.query(model.User).count(), 0)
|
||||
|
||||
# nb. do not commit this yet
|
||||
user = model.User(username="fred")
|
||||
self.session.add(user)
|
||||
|
||||
self.assertTrue(self.session.in_transaction())
|
||||
self.assertIn(user, self.session)
|
||||
handler.rollback_source_transaction()
|
||||
self.assertIsNone(handler.source_session)
|
||||
self.assertFalse(self.session.in_transaction())
|
||||
self.assertNotIn(user, self.session)
|
||||
self.assertEqual(self.session.query(model.User).count(), 0)
|
||||
|
||||
def test_get_importer_kwargs(self):
|
||||
handler = self.make_handler()
|
||||
handler.source_session = self.session
|
||||
kw = handler.get_importer_kwargs("User")
|
||||
self.assertIn("source_session", kw)
|
||||
self.assertIs(kw["source_session"], self.session)
|
||||
|
||||
|
||||
class TestFromWuttaHandler(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.FromWuttaHandler(self.config, **kwargs)
|
||||
|
||||
def test_get_source_title(self):
|
||||
handler = self.make_handler()
|
||||
|
||||
# uses app title by default
|
||||
self.config.setdefault("wutta.app_title", "What About This")
|
||||
self.assertEqual(handler.get_source_title(), "What About This")
|
||||
|
||||
# or generic default if present
|
||||
handler.generic_source_title = "WHATABOUTTHIS"
|
||||
self.assertEqual(handler.get_source_title(), "WHATABOUTTHIS")
|
||||
|
||||
# but prefer specific title if present
|
||||
handler.source_title = "what_about_this"
|
||||
self.assertEqual(handler.get_source_title(), "what_about_this")
|
||||
|
||||
def test_make_source_session(self):
|
||||
handler = self.make_handler()
|
||||
|
||||
# makes "new" (mocked in our case) app session
|
||||
with patch.object(self.app, "make_session") as make_session:
|
||||
make_session.return_value = self.session
|
||||
session = handler.make_source_session()
|
||||
make_session.assert_called_once_with()
|
||||
self.assertIs(session, self.session)
|
||||
|
||||
|
||||
class TestToSqlalchemyHandler(DataTestCase):
|
||||
|
|
@ -180,7 +485,7 @@ class TestToSqlalchemyHandler(DataTestCase):
|
|||
|
||||
def test_begin_target_transaction(self):
|
||||
handler = self.make_handler()
|
||||
with patch.object(handler, 'make_target_session') as make_target_session:
|
||||
with patch.object(handler, "make_target_session") as make_target_session:
|
||||
make_target_session.return_value = self.session
|
||||
self.assertIsNone(handler.target_session)
|
||||
handler.begin_target_transaction()
|
||||
|
|
@ -188,7 +493,7 @@ class TestToSqlalchemyHandler(DataTestCase):
|
|||
|
||||
def test_rollback_target_transaction(self):
|
||||
handler = self.make_handler()
|
||||
with patch.object(handler, 'make_target_session') as make_target_session:
|
||||
with patch.object(handler, "make_target_session") as make_target_session:
|
||||
make_target_session.return_value = self.session
|
||||
self.assertIsNone(handler.target_session)
|
||||
handler.begin_target_transaction()
|
||||
|
|
@ -198,7 +503,7 @@ class TestToSqlalchemyHandler(DataTestCase):
|
|||
|
||||
def test_commit_target_transaction(self):
|
||||
handler = self.make_handler()
|
||||
with patch.object(handler, 'make_target_session') as make_target_session:
|
||||
with patch.object(handler, "make_target_session") as make_target_session:
|
||||
make_target_session.return_value = self.session
|
||||
self.assertIsNone(handler.target_session)
|
||||
handler.begin_target_transaction()
|
||||
|
|
@ -213,6 +518,67 @@ class TestToSqlalchemyHandler(DataTestCase):
|
|||
def test_get_importer_kwargs(self):
|
||||
handler = self.make_handler()
|
||||
handler.target_session = self.session
|
||||
kw = handler.get_importer_kwargs('Setting')
|
||||
self.assertIn('target_session', kw)
|
||||
self.assertIs(kw['target_session'], self.session)
|
||||
kw = handler.get_importer_kwargs("Setting")
|
||||
self.assertIn("target_session", kw)
|
||||
self.assertIs(kw["target_session"], self.session)
|
||||
|
||||
|
||||
class TestToWuttaHandler(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.ToWuttaHandler(self.config, **kwargs)
|
||||
|
||||
def test_get_target_title(self):
|
||||
handler = self.make_handler()
|
||||
|
||||
# uses app title by default
|
||||
self.config.setdefault("wutta.app_title", "What About This")
|
||||
self.assertEqual(handler.get_target_title(), "What About This")
|
||||
|
||||
# or generic default if present
|
||||
handler.generic_target_title = "WHATABOUTTHIS"
|
||||
self.assertEqual(handler.get_target_title(), "WHATABOUTTHIS")
|
||||
|
||||
# but prefer specific title if present
|
||||
handler.target_title = "what_about_this"
|
||||
self.assertEqual(handler.get_target_title(), "what_about_this")
|
||||
|
||||
def test_make_target_session(self):
|
||||
model = self.app.model
|
||||
handler = self.make_handler()
|
||||
|
||||
fred = model.User(username="fred")
|
||||
self.session.add(fred)
|
||||
self.session.commit()
|
||||
|
||||
# makes "new" (mocked in our case) app session, with no runas
|
||||
# username set by default
|
||||
with patch.object(self.app, "make_session") as make_session:
|
||||
make_session.return_value = self.session
|
||||
session = handler.make_target_session()
|
||||
make_session.assert_called_once_with()
|
||||
self.assertIs(session, self.session)
|
||||
self.assertNotIn("continuum_user_id", session.info)
|
||||
self.assertNotIn("continuum_user_id", self.session.info)
|
||||
|
||||
# runas user also should not be set, if username is not valid
|
||||
handler.runas_username = "freddie"
|
||||
with patch.object(self.app, "make_session") as make_session:
|
||||
make_session.return_value = self.session
|
||||
session = handler.make_target_session()
|
||||
make_session.assert_called_once_with()
|
||||
self.assertIs(session, self.session)
|
||||
self.assertNotIn("continuum_user_id", session.info)
|
||||
self.assertNotIn("continuum_user_id", self.session.info)
|
||||
|
||||
# this time we should have runas user properly set
|
||||
handler.runas_username = "fred"
|
||||
with patch.object(self.app, "make_session") as make_session:
|
||||
make_session.return_value = self.session
|
||||
session = handler.make_target_session()
|
||||
make_session.assert_called_once_with()
|
||||
self.assertIs(session, self.session)
|
||||
self.assertIn("continuum_user_id", session.info)
|
||||
self.assertEqual(session.info["continuum_user_id"], fred.uuid)
|
||||
self.assertIn("continuum_user_id", self.session.info)
|
||||
self.assertEqual(self.session.info["continuum_user_id"], fred.uuid)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
#-*- coding: utf-8; -*-
|
||||
# -*- coding: utf-8; -*-
|
||||
|
||||
from wuttasync.importing import model as mod
|
||||
|
|
|
|||
248
tests/importing/test_versions.py
Normal file
248
tests/importing/test_versions.py
Normal file
|
|
@ -0,0 +1,248 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from sqlalchemy import orm
|
||||
import sqlalchemy_continuum as continuum
|
||||
|
||||
from wuttjamaican.util import make_true_uuid
|
||||
from wutta_continuum.testing import VersionTestCase
|
||||
|
||||
from wuttasync.importing import versions as mod, Importer
|
||||
|
||||
|
||||
class TestFromWuttaToVersions(VersionTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.FromWuttaToVersions(self.config, **kwargs)
|
||||
|
||||
def test_begin_target_transaction(self):
|
||||
model = self.app.model
|
||||
txncls = continuum.transaction_class(model.User)
|
||||
|
||||
# basic / defaults
|
||||
handler = self.make_handler()
|
||||
self.assertIsNone(handler.continuum_uow)
|
||||
self.assertIsNone(handler.continuum_txn)
|
||||
handler.begin_target_transaction()
|
||||
self.assertIsInstance(handler.continuum_uow, continuum.UnitOfWork)
|
||||
self.assertIsInstance(handler.continuum_txn, txncls)
|
||||
# nb. no comment
|
||||
self.assertIsNone(handler.continuum_txn.meta.get("comment"))
|
||||
|
||||
# with comment
|
||||
handler = self.make_handler()
|
||||
handler.transaction_comment = "yeehaw"
|
||||
handler.begin_target_transaction()
|
||||
self.assertIn("comment", handler.continuum_txn.meta)
|
||||
self.assertEqual(handler.continuum_txn.meta["comment"], "yeehaw")
|
||||
|
||||
def test_get_importer_kwargs(self):
|
||||
handler = self.make_handler()
|
||||
handler.begin_target_transaction()
|
||||
|
||||
kw = handler.get_importer_kwargs("User")
|
||||
self.assertIn("continuum_txn", kw)
|
||||
self.assertIs(kw["continuum_txn"], handler.continuum_txn)
|
||||
|
||||
def test_make_importer_factory(self):
|
||||
model = self.app.model
|
||||
handler = self.make_handler()
|
||||
|
||||
# versioned class
|
||||
factory = handler.make_importer_factory(model.User, "User")
|
||||
self.assertTrue(issubclass(factory, mod.FromWuttaToVersionBase))
|
||||
self.assertIs(factory.source_model_class, model.User)
|
||||
self.assertIs(factory.model_class, continuum.version_class(model.User))
|
||||
|
||||
# non-versioned
|
||||
factory = handler.make_importer_factory(model.Upgrade, "Upgrade")
|
||||
self.assertIsNone(factory)
|
||||
|
||||
def test_define_importers(self):
|
||||
handler = self.make_handler()
|
||||
|
||||
importers = handler.define_importers()
|
||||
self.assertIn("User", importers)
|
||||
self.assertIn("Person", importers)
|
||||
self.assertNotIn("Upgrade", importers)
|
||||
|
||||
|
||||
class TestFromWuttaToVersionBase(VersionTestCase):
|
||||
|
||||
def make_importer(self, model_class=None, **kwargs):
|
||||
imp = mod.FromWuttaToVersionBase(self.config, **kwargs)
|
||||
if model_class:
|
||||
imp.model_class = model_class
|
||||
return imp
|
||||
|
||||
def test_get_simple_fields(self):
|
||||
model = self.app.model
|
||||
vercls = continuum.version_class(model.User)
|
||||
|
||||
# first confirm what a "normal" importer would do
|
||||
imp = Importer(self.config, model_class=vercls)
|
||||
fields = imp.get_simple_fields()
|
||||
self.assertIn("username", fields)
|
||||
self.assertIn("person_uuid", fields)
|
||||
self.assertIn("transaction_id", fields)
|
||||
self.assertIn("operation_type", fields)
|
||||
self.assertIn("end_transaction_id", fields)
|
||||
|
||||
# now test what the "version" importer does
|
||||
imp = self.make_importer(model_class=vercls)
|
||||
fields = imp.get_simple_fields()
|
||||
self.assertIn("username", fields)
|
||||
self.assertIn("person_uuid", fields)
|
||||
self.assertNotIn("transaction_id", fields)
|
||||
self.assertNotIn("operation_type", fields)
|
||||
self.assertNotIn("end_transaction_id", fields)
|
||||
|
||||
def test_get_target_query(self):
|
||||
model = self.app.model
|
||||
vercls = continuum.version_class(model.User)
|
||||
imp = self.make_importer(model_class=vercls, target_session=self.session)
|
||||
|
||||
# TODO: not sure what else to test here..
|
||||
query = imp.get_target_query()
|
||||
self.assertIsInstance(query, orm.Query)
|
||||
|
||||
def test_normalize_target_object(self):
|
||||
model = self.app.model
|
||||
vercls = continuum.version_class(model.User)
|
||||
imp = self.make_importer(model_class=vercls)
|
||||
|
||||
user = model.User(username="fred")
|
||||
self.session.add(user)
|
||||
self.session.commit()
|
||||
version = user.versions[0]
|
||||
|
||||
# version object should be embedded in data dict
|
||||
data = imp.normalize_target_object(version)
|
||||
self.assertIsInstance(data, dict)
|
||||
self.assertIn("_objref", data)
|
||||
self.assertIs(data["_objref"], version)
|
||||
|
||||
# but normal object is not embedded
|
||||
data = imp.normalize_target_object(user)
|
||||
self.assertIsInstance(data, dict)
|
||||
self.assertNotIn("_version", data)
|
||||
|
||||
def test_make_version(self):
|
||||
model = self.app.model
|
||||
vercls = continuum.version_class(model.User)
|
||||
|
||||
user = model.User(username="fred")
|
||||
self.session.add(user)
|
||||
self.session.commit()
|
||||
|
||||
handler = mod.FromWuttaToVersions(self.config)
|
||||
handler.begin_target_transaction()
|
||||
handler.target_session.close()
|
||||
handler.target_session = self.session
|
||||
|
||||
imp = self.make_importer(
|
||||
model_class=vercls,
|
||||
fields=["uuid", "username"],
|
||||
keys=("uuid",),
|
||||
target_session=self.session,
|
||||
continuum_txn=handler.continuum_txn,
|
||||
)
|
||||
|
||||
data = {"uuid": user.uuid, "username": "freddie"}
|
||||
version = imp.make_version(data, continuum.Operation.UPDATE)
|
||||
self.assertIsInstance(version, vercls)
|
||||
self.assertEqual(version.uuid, user.uuid)
|
||||
self.assertEqual(version.username, "freddie")
|
||||
self.assertIn(version, self.session)
|
||||
self.assertIs(version.transaction, imp.continuum_txn)
|
||||
self.assertEqual(version.operation_type, continuum.Operation.UPDATE)
|
||||
|
||||
def test_create_target_object(self):
|
||||
model = self.app.model
|
||||
vercls = continuum.version_class(model.User)
|
||||
|
||||
handler = mod.FromWuttaToVersions(self.config)
|
||||
handler.begin_target_transaction()
|
||||
handler.target_session.close()
|
||||
handler.target_session = self.session
|
||||
|
||||
imp = self.make_importer(
|
||||
model_class=vercls,
|
||||
fields=["uuid", "username"],
|
||||
keys=("uuid",),
|
||||
target_session=self.session,
|
||||
continuum_txn=handler.continuum_txn,
|
||||
)
|
||||
|
||||
source_data = {"uuid": make_true_uuid(), "username": "bettie"}
|
||||
self.assertEqual(self.session.query(vercls).count(), 0)
|
||||
version = imp.create_target_object((source_data["uuid"], 1), source_data)
|
||||
self.assertEqual(self.session.query(vercls).count(), 1)
|
||||
self.assertEqual(version.transaction_id, imp.continuum_txn.id)
|
||||
self.assertEqual(version.operation_type, continuum.Operation.INSERT)
|
||||
self.assertIsNone(version.end_transaction_id)
|
||||
|
||||
def test_update_target_object(self):
|
||||
model = self.app.model
|
||||
vercls = continuum.version_class(model.User)
|
||||
|
||||
user = model.User(username="fred")
|
||||
self.session.add(user)
|
||||
self.session.commit()
|
||||
version1 = user.versions[0]
|
||||
|
||||
handler = mod.FromWuttaToVersions(self.config)
|
||||
handler.begin_target_transaction()
|
||||
handler.target_session.close()
|
||||
handler.target_session = self.session
|
||||
|
||||
imp = self.make_importer(
|
||||
model_class=vercls,
|
||||
fields=["uuid", "username"],
|
||||
keys=("uuid",),
|
||||
target_session=self.session,
|
||||
continuum_txn=handler.continuum_txn,
|
||||
)
|
||||
|
||||
source_data = {"uuid": user.uuid, "username": "freddie"}
|
||||
target_data = imp.normalize_target_object(version1)
|
||||
self.assertEqual(self.session.query(vercls).count(), 1)
|
||||
self.assertIsNone(version1.end_transaction_id)
|
||||
version2 = imp.update_target_object(
|
||||
version1, source_data, target_data=target_data
|
||||
)
|
||||
self.assertEqual(self.session.query(vercls).count(), 2)
|
||||
self.assertEqual(version1.end_transaction_id, imp.continuum_txn.id)
|
||||
self.assertEqual(version2.transaction_id, imp.continuum_txn.id)
|
||||
self.assertEqual(version2.operation_type, continuum.Operation.UPDATE)
|
||||
self.assertIsNone(version2.end_transaction_id)
|
||||
|
||||
def test_delete_target_object(self):
|
||||
model = self.app.model
|
||||
vercls = continuum.version_class(model.User)
|
||||
|
||||
user = model.User(username="fred")
|
||||
self.session.add(user)
|
||||
self.session.commit()
|
||||
version1 = user.versions[0]
|
||||
|
||||
handler = mod.FromWuttaToVersions(self.config)
|
||||
handler.begin_target_transaction()
|
||||
handler.target_session.close()
|
||||
handler.target_session = self.session
|
||||
|
||||
imp = self.make_importer(
|
||||
model_class=vercls,
|
||||
fields=["uuid", "username"],
|
||||
keys=("uuid",),
|
||||
target_session=self.session,
|
||||
continuum_txn=handler.continuum_txn,
|
||||
)
|
||||
|
||||
self.assertEqual(self.session.query(vercls).count(), 1)
|
||||
self.assertIsNone(version1.end_transaction_id)
|
||||
version2 = imp.delete_target_object(version1)
|
||||
self.assertEqual(self.session.query(vercls).count(), 2)
|
||||
self.assertEqual(version1.end_transaction_id, imp.continuum_txn.id)
|
||||
self.assertEqual(version2.transaction_id, imp.continuum_txn.id)
|
||||
self.assertEqual(version2.operation_type, continuum.Operation.DELETE)
|
||||
self.assertIsNone(version2.end_transaction_id)
|
||||
|
|
@ -1,38 +1,134 @@
|
|||
#-*- coding: utf-8; -*-
|
||||
# -*- coding: utf-8; -*-
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from wuttjamaican.testing import DataTestCase
|
||||
|
||||
from wuttasync.importing import wutta as mod
|
||||
from wuttasync.importing import ToWutta
|
||||
|
||||
|
||||
class TestToWuttaHandler(DataTestCase):
|
||||
class TestFromWuttaMirror(DataTestCase):
|
||||
|
||||
def make_importer(self, **kwargs):
|
||||
return mod.FromWuttaMirror(self.config, **kwargs)
|
||||
|
||||
def test_basic(self):
|
||||
importer = self.make_importer()
|
||||
self.assertIsInstance(importer, mod.FromWuttaMirror)
|
||||
|
||||
|
||||
class TestFromWuttaToWuttaBase(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.ToWuttaHandler(self.config, **kwargs)
|
||||
return mod.FromWuttaToWuttaBase(self.config, **kwargs)
|
||||
|
||||
def test_get_target_title(self):
|
||||
def test_dbkey(self):
|
||||
|
||||
# null by default
|
||||
handler = self.make_handler()
|
||||
self.assertIsNone(handler.dbkey)
|
||||
|
||||
# but caller can specify
|
||||
handler = self.make_handler(dbkey="another")
|
||||
self.assertEqual(handler.dbkey, "another")
|
||||
|
||||
def test_make_importer_factory(self):
|
||||
model = self.app.model
|
||||
handler = self.make_handler()
|
||||
|
||||
# uses app title by default
|
||||
self.config.setdefault('wutta.app_title', "What About This")
|
||||
self.assertEqual(handler.get_target_title(), 'What About This')
|
||||
# returns a typical importer
|
||||
factory = handler.make_importer_factory(model.User, "User")
|
||||
self.assertTrue(issubclass(factory, mod.FromWuttaMirror))
|
||||
self.assertTrue(issubclass(factory, ToWutta))
|
||||
self.assertIs(factory.model_class, model.User)
|
||||
self.assertEqual(factory.__name__, "UserImporter")
|
||||
|
||||
# or generic default if present
|
||||
handler.generic_target_title = "WHATABOUTTHIS"
|
||||
self.assertEqual(handler.get_target_title(), 'WHATABOUTTHIS')
|
||||
def test_define_importers(self):
|
||||
handler = self.make_handler()
|
||||
|
||||
# but prefer specific title if present
|
||||
handler.target_title = "what_about_this"
|
||||
self.assertEqual(handler.get_target_title(), 'what_about_this')
|
||||
# all models are included
|
||||
importers = handler.define_importers()
|
||||
self.assertIn("Setting", importers)
|
||||
self.assertIn("Person", importers)
|
||||
self.assertIn("Role", importers)
|
||||
self.assertIn("Permission", importers)
|
||||
self.assertIn("User", importers)
|
||||
self.assertIn("UserRole", importers)
|
||||
self.assertIn("UserAPIToken", importers)
|
||||
self.assertIn("Upgrade", importers)
|
||||
self.assertNotIn("BatchMixin", importers)
|
||||
self.assertNotIn("BatchRowMixin", importers)
|
||||
self.assertNotIn("Base", importers)
|
||||
|
||||
# also, dependencies are implied by sort order
|
||||
models = list(importers)
|
||||
self.assertLess(models.index("Person"), models.index("User"))
|
||||
self.assertLess(models.index("User"), models.index("UserRole"))
|
||||
self.assertLess(models.index("User"), models.index("Upgrade"))
|
||||
|
||||
|
||||
class TestFromWuttaToWuttaImport(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.FromWuttaToWuttaImport(self.config, **kwargs)
|
||||
|
||||
def test_make_source_session(self):
|
||||
|
||||
# error if null dbkey
|
||||
handler = self.make_handler()
|
||||
self.assertIsNone(handler.dbkey)
|
||||
self.assertRaises(ValueError, handler.make_source_session)
|
||||
|
||||
# error if dbkey not found
|
||||
handler = self.make_handler(dbkey="another")
|
||||
self.assertEqual(handler.dbkey, "another")
|
||||
self.assertNotIn("another", self.config.appdb_engines)
|
||||
self.assertRaises(ValueError, handler.make_source_session)
|
||||
|
||||
# error if dbkey is 'default'
|
||||
handler = self.make_handler(dbkey="default")
|
||||
self.assertEqual(handler.dbkey, "default")
|
||||
self.assertIn("default", self.config.appdb_engines)
|
||||
self.assertRaises(ValueError, handler.make_source_session)
|
||||
|
||||
# expected behavior
|
||||
another_engine = sa.create_engine("sqlite://")
|
||||
handler = self.make_handler(dbkey="another")
|
||||
with patch.dict(self.config.appdb_engines, {"another": another_engine}):
|
||||
session = handler.make_source_session()
|
||||
self.assertIs(session.bind, another_engine)
|
||||
|
||||
|
||||
class TestFromWuttaToWuttaExport(DataTestCase):
|
||||
|
||||
def make_handler(self, **kwargs):
|
||||
return mod.FromWuttaToWuttaExport(self.config, **kwargs)
|
||||
|
||||
def test_make_target_session(self):
|
||||
handler = self.make_handler()
|
||||
|
||||
# makes "new" (mocked in our case) app session
|
||||
with patch.object(self.app, 'make_session') as make_session:
|
||||
make_session.return_value = self.session
|
||||
# error if null dbkey
|
||||
handler = self.make_handler()
|
||||
self.assertIsNone(handler.dbkey)
|
||||
self.assertRaises(ValueError, handler.make_target_session)
|
||||
|
||||
# error if dbkey not found
|
||||
handler = self.make_handler(dbkey="another")
|
||||
self.assertEqual(handler.dbkey, "another")
|
||||
self.assertNotIn("another", self.config.appdb_engines)
|
||||
self.assertRaises(ValueError, handler.make_target_session)
|
||||
|
||||
# error if dbkey is 'default'
|
||||
handler = self.make_handler(dbkey="default")
|
||||
self.assertEqual(handler.dbkey, "default")
|
||||
self.assertIn("default", self.config.appdb_engines)
|
||||
self.assertRaises(ValueError, handler.make_target_session)
|
||||
|
||||
# expected behavior
|
||||
another_engine = sa.create_engine("sqlite://")
|
||||
handler = self.make_handler(dbkey="another")
|
||||
with patch.dict(self.config.appdb_engines, {"another": another_engine}):
|
||||
session = handler.make_target_session()
|
||||
make_session.assert_called_once_with()
|
||||
self.assertIs(session, self.session)
|
||||
self.assertIs(session.bind, another_engine)
|
||||
|
|
|
|||
170
tests/test_app.py
Normal file
170
tests/test_app.py
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from wuttjamaican.testing import ConfigTestCase
|
||||
|
||||
from wuttasync import app as mod
|
||||
from wuttasync.importing import ImportHandler
|
||||
from wuttasync.importing.csv import FromCsvToWutta
|
||||
|
||||
|
||||
class FromFooToBar(ImportHandler):
|
||||
source_key = "foo"
|
||||
target_key = "bar"
|
||||
|
||||
|
||||
class FromCsvToPoser(FromCsvToWutta):
|
||||
pass
|
||||
|
||||
|
||||
class FromFooToBaz1(ImportHandler):
|
||||
source_key = "foo"
|
||||
target_key = "baz"
|
||||
|
||||
|
||||
class FromFooToBaz2(ImportHandler):
|
||||
source_key = "foo"
|
||||
target_key = "baz"
|
||||
|
||||
|
||||
class TestWuttaSyncAppProvider(ConfigTestCase):
|
||||
|
||||
def test_get_all_import_handlers(self):
|
||||
|
||||
# by default our custom handler is not found
|
||||
handlers = self.app.get_all_import_handlers()
|
||||
self.assertIn(FromCsvToWutta, handlers)
|
||||
self.assertNotIn(FromFooToBar, handlers)
|
||||
|
||||
# make sure if we configure a custom handler, it is found
|
||||
self.config.setdefault(
|
||||
"wuttasync.importing.import.to_wutta.from_csv.handler",
|
||||
"tests.test_app:FromFooToBar",
|
||||
)
|
||||
handlers = self.app.get_all_import_handlers()
|
||||
self.assertIn(FromCsvToWutta, handlers)
|
||||
self.assertIn(FromFooToBar, handlers)
|
||||
|
||||
# now for something completely different..here we pretend there
|
||||
# are multiple handler entry points with same key. all should
|
||||
# be returned, including both which share the key.
|
||||
entry_points = {
|
||||
"import.to_baz.from_foo": [FromFooToBaz1, FromFooToBaz2],
|
||||
}
|
||||
with patch.object(mod, "load_entry_points", return_value=entry_points):
|
||||
handlers = self.app.get_all_import_handlers()
|
||||
self.assertEqual(len(handlers), 2)
|
||||
self.assertIn(FromFooToBaz1, handlers)
|
||||
self.assertIn(FromFooToBaz2, handlers)
|
||||
|
||||
def test_get_designated_import_handler_spec(self):
|
||||
|
||||
# fetch of unknown key returns none
|
||||
spec = self.app.get_designated_import_handler_spec("test01")
|
||||
self.assertIsNone(spec)
|
||||
|
||||
# unless we require it, in which case, error
|
||||
self.assertRaises(
|
||||
ValueError,
|
||||
self.app.get_designated_import_handler_spec,
|
||||
"test01",
|
||||
require=True,
|
||||
)
|
||||
|
||||
# we configure one for whatever key we like
|
||||
self.config.setdefault(
|
||||
"wuttasync.importing.test02.handler", "tests.test_app:FromBarToFoo"
|
||||
)
|
||||
spec = self.app.get_designated_import_handler_spec("test02")
|
||||
self.assertEqual(spec, "tests.test_app:FromBarToFoo")
|
||||
|
||||
# we can also define a "default" designated handler
|
||||
self.config.setdefault(
|
||||
"wuttasync.importing.test03.default_handler",
|
||||
"tests.test_app:FromBarToFoo",
|
||||
)
|
||||
spec = self.app.get_designated_import_handler_spec("test03")
|
||||
self.assertEqual(spec, "tests.test_app:FromBarToFoo")
|
||||
|
||||
def test_get_designated_import_handlers(self):
|
||||
|
||||
# some designated handlers exist, but not our custom handler
|
||||
handlers = self.app.get_designated_import_handlers()
|
||||
csv_handlers = [
|
||||
h for h in handlers if h.get_key() == "import.to_wutta.from_csv"
|
||||
]
|
||||
self.assertEqual(len(csv_handlers), 1)
|
||||
csv_handler = csv_handlers[0]
|
||||
self.assertIsInstance(csv_handler, FromCsvToWutta)
|
||||
self.assertFalse(isinstance(csv_handler, FromCsvToPoser))
|
||||
self.assertFalse(
|
||||
any([h.get_key() == "import.to_bar.from_foo" for h in handlers])
|
||||
)
|
||||
self.assertFalse(any([isinstance(h, FromFooToBar) for h in handlers]))
|
||||
self.assertFalse(any([isinstance(h, FromCsvToPoser) for h in handlers]))
|
||||
self.assertTrue(
|
||||
any([h.get_key() == "import.to_versions.from_wutta" for h in handlers])
|
||||
)
|
||||
|
||||
# but we can make custom designated
|
||||
self.config.setdefault(
|
||||
"wuttasync.importing.import.to_wutta.from_csv.handler",
|
||||
"tests.test_app:FromCsvToPoser",
|
||||
)
|
||||
handlers = self.app.get_designated_import_handlers()
|
||||
csv_handlers = [
|
||||
h for h in handlers if h.get_key() == "import.to_wutta.from_csv"
|
||||
]
|
||||
self.assertEqual(len(csv_handlers), 1)
|
||||
csv_handler = csv_handlers[0]
|
||||
self.assertIsInstance(csv_handler, FromCsvToWutta)
|
||||
self.assertIsInstance(csv_handler, FromCsvToPoser)
|
||||
self.assertTrue(
|
||||
any([h.get_key() == "import.to_versions.from_wutta" for h in handlers])
|
||||
)
|
||||
|
||||
# nothing returned if multiple handlers found but none are designated
|
||||
with patch.object(
|
||||
self.app.providers["wuttasync"],
|
||||
"get_all_import_handlers",
|
||||
return_value=[FromFooToBaz1, FromFooToBaz2],
|
||||
):
|
||||
handlers = self.app.get_designated_import_handlers()
|
||||
baz_handlers = [
|
||||
h for h in handlers if h.get_key() == "import.to_baz.from_foo"
|
||||
]
|
||||
self.assertEqual(len(baz_handlers), 0)
|
||||
|
||||
def test_get_import_handler(self):
|
||||
|
||||
# make sure a basic fetch works
|
||||
handler = self.app.get_import_handler("import.to_wutta.from_csv")
|
||||
self.assertIsInstance(handler, FromCsvToWutta)
|
||||
self.assertFalse(isinstance(handler, FromCsvToPoser))
|
||||
|
||||
# and make sure custom override works
|
||||
self.config.setdefault(
|
||||
"wuttasync.importing.import.to_wutta.from_csv.handler",
|
||||
"tests.test_app:FromCsvToPoser",
|
||||
)
|
||||
handler = self.app.get_import_handler("import.to_wutta.from_csv")
|
||||
self.assertIsInstance(handler, FromCsvToWutta)
|
||||
self.assertIsInstance(handler, FromCsvToPoser)
|
||||
self.assertFalse(hasattr(handler, "foo_attr"))
|
||||
|
||||
# can pass extra kwargs
|
||||
handler = self.app.get_import_handler(
|
||||
"import.to_wutta.from_csv", foo_attr="whatever"
|
||||
)
|
||||
self.assertTrue(hasattr(handler, "foo_attr"))
|
||||
self.assertEqual(handler.foo_attr, "whatever")
|
||||
|
||||
# unknown importer cannot be found
|
||||
handler = self.app.get_import_handler("bogus")
|
||||
self.assertIsNone(handler)
|
||||
|
||||
# and if we require it, error will raise
|
||||
self.assertRaises(
|
||||
ValueError, self.app.get_import_handler, "bogus", require=True
|
||||
)
|
||||
39
tests/test_conf.py
Normal file
39
tests/test_conf.py
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from wuttjamaican.testing import ConfigTestCase
|
||||
|
||||
from wuttasync import conf as mod
|
||||
|
||||
|
||||
class TestWuttaSyncConfig(ConfigTestCase):
|
||||
|
||||
def make_extension(self):
|
||||
return mod.WuttaSyncConfig()
|
||||
|
||||
def test_default_import_handlers(self):
|
||||
|
||||
# base config has no default handlers
|
||||
spec = self.config.get(
|
||||
"wuttasync.importing.import.to_wutta.from_wutta.default_handler"
|
||||
)
|
||||
self.assertIsNone(spec)
|
||||
spec = self.config.get(
|
||||
"wuttasync.importing.export.to_wutta.from_wutta.default_handler"
|
||||
)
|
||||
self.assertIsNone(spec)
|
||||
|
||||
# extend config
|
||||
ext = self.make_extension()
|
||||
ext.configure(self.config)
|
||||
|
||||
# config now has default handlers
|
||||
spec = self.config.get(
|
||||
"wuttasync.importing.import.to_wutta.from_wutta.default_handler"
|
||||
)
|
||||
self.assertIsNotNone(spec)
|
||||
self.assertEqual(spec, "wuttasync.importing.wutta:FromWuttaToWuttaImport")
|
||||
spec = self.config.get(
|
||||
"wuttasync.importing.export.to_wutta.from_wutta.default_handler"
|
||||
)
|
||||
self.assertIsNotNone(spec)
|
||||
self.assertEqual(spec, "wuttasync.importing.wutta:FromWuttaToWuttaExport")
|
||||
98
tests/test_emails.py
Normal file
98
tests/test_emails.py
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
# -*- coding: utf-8; -*-
|
||||
|
||||
from wuttjamaican.testing import ConfigTestCase
|
||||
|
||||
from wuttasync import emails as mod
|
||||
from wuttasync.importing import ImportHandler
|
||||
from wuttasync.testing import ImportExportWarningTestCase
|
||||
from wuttasync.conf import WuttaSyncConfig
|
||||
|
||||
|
||||
class FromFooToWutta(ImportHandler):
|
||||
pass
|
||||
|
||||
|
||||
class TestImportExportWarning(ConfigTestCase):
|
||||
|
||||
def make_setting(self, factory=None):
|
||||
if not factory:
|
||||
factory = mod.ImportExportWarning
|
||||
setting = factory(self.config)
|
||||
return setting
|
||||
|
||||
def test_get_description(self):
|
||||
self.config.setdefault("wutta.app_title", "Wutta Poser")
|
||||
setting = self.make_setting()
|
||||
setting.import_handler_key = "import.to_wutta.from_csv"
|
||||
self.assertEqual(
|
||||
setting.get_description(),
|
||||
"Diff warning email for importing CSV → Wutta Poser",
|
||||
)
|
||||
|
||||
def test_get_default_subject(self):
|
||||
self.config.setdefault("wutta.app_title", "Wutta Poser")
|
||||
setting = self.make_setting()
|
||||
setting.import_handler_key = "import.to_wutta.from_csv"
|
||||
self.assertEqual(setting.get_default_subject(), "Changes for CSV → Wutta Poser")
|
||||
|
||||
def test_get_import_handler(self):
|
||||
|
||||
# nb. typical name pattern
|
||||
class import_to_wutta_from_foo_warning(mod.ImportExportWarning):
|
||||
pass
|
||||
|
||||
# nb. name does not match spec pattern
|
||||
class import_to_wutta_from_bar_blah(mod.ImportExportWarning):
|
||||
pass
|
||||
|
||||
# register our import handler
|
||||
self.config.setdefault(
|
||||
"wuttasync.importing.import.to_wutta.from_foo.handler",
|
||||
"tests.test_emails:FromFooToWutta",
|
||||
)
|
||||
|
||||
# error if spec/key not discoverable
|
||||
setting = self.make_setting(import_to_wutta_from_bar_blah)
|
||||
self.assertRaises(ValueError, setting.get_import_handler)
|
||||
|
||||
# can lookup by name (auto-spec)
|
||||
setting = self.make_setting(import_to_wutta_from_foo_warning)
|
||||
handler = setting.get_import_handler()
|
||||
self.assertIsInstance(handler, FromFooToWutta)
|
||||
|
||||
# can lookup by explicit spec
|
||||
setting = self.make_setting(import_to_wutta_from_bar_blah)
|
||||
setting.import_handler_spec = "tests.test_emails:FromFooToWutta"
|
||||
handler = setting.get_import_handler()
|
||||
self.assertIsInstance(handler, FromFooToWutta)
|
||||
|
||||
# can lookup by explicit key
|
||||
setting = self.make_setting(import_to_wutta_from_bar_blah)
|
||||
setting.import_handler_key = "import.to_wutta.from_foo"
|
||||
handler = setting.get_import_handler()
|
||||
self.assertIsInstance(handler, FromFooToWutta)
|
||||
|
||||
|
||||
class TestEmailSettings(ImportExportWarningTestCase):
|
||||
|
||||
def make_config(self, files=None, **kwargs):
|
||||
config = super().make_config(files, **kwargs)
|
||||
|
||||
# need this to ensure default import/export handlers. since
|
||||
# behavior can vary depending on what packages are installed.
|
||||
ext = WuttaSyncConfig()
|
||||
ext.configure(config)
|
||||
|
||||
return config
|
||||
|
||||
def test_export_to_wutta_from_wutta_warning(self):
|
||||
self.do_test_preview("export_to_wutta_from_wutta_warning")
|
||||
|
||||
def test_import_to_versions_from_wutta_warning(self):
|
||||
self.do_test_preview("import_to_versions_from_wutta_warning")
|
||||
|
||||
def test_import_to_wutta_from_csv_warning(self):
|
||||
self.do_test_preview("import_to_wutta_from_csv_warning")
|
||||
|
||||
def test_import_to_wutta_from_wutta_warning(self):
|
||||
self.do_test_preview("import_to_wutta_from_wutta_warning")
|
||||
|
|
@ -8,22 +8,24 @@ from wuttasync import util as mod
|
|||
class TestDataDiffs(TestCase):
|
||||
|
||||
def test_source_missing_field(self):
|
||||
source = {'foo': 'bar'}
|
||||
target = {'baz': 'xyz', 'foo': 'bar'}
|
||||
source = {"foo": "bar"}
|
||||
target = {"baz": "xyz", "foo": "bar"}
|
||||
self.assertRaises(KeyError, mod.data_diffs, source, target)
|
||||
|
||||
def test_target_missing_field(self):
|
||||
source = {'foo': 'bar', 'baz': 'xyz'}
|
||||
target = {'baz': 'xyz'}
|
||||
self.assertRaises(KeyError, mod.data_diffs, source, target, fields=['foo', 'baz'])
|
||||
source = {"foo": "bar", "baz": "xyz"}
|
||||
target = {"baz": "xyz"}
|
||||
self.assertRaises(
|
||||
KeyError, mod.data_diffs, source, target, fields=["foo", "baz"]
|
||||
)
|
||||
|
||||
def test_no_diffs(self):
|
||||
source = {'foo': 'bar', 'baz': 'xyz'}
|
||||
target = {'baz': 'xyz', 'foo': 'bar'}
|
||||
source = {"foo": "bar", "baz": "xyz"}
|
||||
target = {"baz": "xyz", "foo": "bar"}
|
||||
self.assertFalse(mod.data_diffs(source, target))
|
||||
|
||||
def test_with_diffs(self):
|
||||
source = {'foo': 'bar', 'baz': 'xyz'}
|
||||
target = {'baz': 'xyz', 'foo': 'BAR'}
|
||||
source = {"foo": "bar", "baz": "xyz"}
|
||||
target = {"baz": "xyz", "foo": "BAR"}
|
||||
result = mod.data_diffs(source, target)
|
||||
self.assertEqual(result, ['foo'])
|
||||
self.assertEqual(result, ["foo"])
|
||||
|
|
|
|||
4
tox.ini
4
tox.ini
|
|
@ -6,6 +6,10 @@ envlist = py38, py39, py310, py311
|
|||
extras = tests
|
||||
commands = pytest {posargs}
|
||||
|
||||
[testenv:pylint]
|
||||
basepython = python3.11
|
||||
commands = pylint wuttasync
|
||||
|
||||
[testenv:coverage]
|
||||
basepython = python3.11
|
||||
commands = pytest --cov=wuttasync --cov-report=html --cov-fail-under=100
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue