fix: format all code with black
and from now on should not deviate from that...
This commit is contained in:
parent
45dabce956
commit
33ac0e008e
17 changed files with 730 additions and 576 deletions
34
docs/conf.py
34
docs/conf.py
|
@ -8,35 +8,35 @@
|
||||||
|
|
||||||
from importlib.metadata import version as get_version
|
from importlib.metadata import version as get_version
|
||||||
|
|
||||||
project = 'WuttaSync'
|
project = "WuttaSync"
|
||||||
copyright = '2024, Lance Edgar'
|
copyright = "2024, Lance Edgar"
|
||||||
author = 'Lance Edgar'
|
author = "Lance Edgar"
|
||||||
release = get_version('WuttaSync')
|
release = get_version("WuttaSync")
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
# -- General configuration ---------------------------------------------------
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||||
|
|
||||||
extensions = [
|
extensions = [
|
||||||
'sphinx.ext.autodoc',
|
"sphinx.ext.autodoc",
|
||||||
'sphinx.ext.intersphinx',
|
"sphinx.ext.intersphinx",
|
||||||
'sphinx.ext.viewcode',
|
"sphinx.ext.viewcode",
|
||||||
'sphinx.ext.todo',
|
"sphinx.ext.todo",
|
||||||
'enum_tools.autoenum',
|
"enum_tools.autoenum",
|
||||||
'sphinxcontrib.programoutput',
|
"sphinxcontrib.programoutput",
|
||||||
]
|
]
|
||||||
|
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||||
|
|
||||||
intersphinx_mapping = {
|
intersphinx_mapping = {
|
||||||
'python': ('https://docs.python.org/3/', None),
|
"python": ("https://docs.python.org/3/", None),
|
||||||
'rattail-manual': ('https://docs.wuttaproject.org/rattail-manual/', None),
|
"rattail-manual": ("https://docs.wuttaproject.org/rattail-manual/", None),
|
||||||
'wuttjamaican': ('https://docs.wuttaproject.org/wuttjamaican/', None),
|
"wuttjamaican": ("https://docs.wuttaproject.org/wuttjamaican/", None),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output -------------------------------------------------
|
# -- Options for HTML output -------------------------------------------------
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||||
|
|
||||||
html_theme = 'furo'
|
html_theme = "furo"
|
||||||
html_static_path = ['_static']
|
html_static_path = ["_static"]
|
||||||
|
|
|
@ -3,4 +3,4 @@
|
||||||
from importlib.metadata import version
|
from importlib.metadata import version
|
||||||
|
|
||||||
|
|
||||||
__version__ = version('WuttaSync')
|
__version__ = version("WuttaSync")
|
||||||
|
|
|
@ -79,7 +79,7 @@ class ImportCommandHandler(GenericHandler):
|
||||||
self.import_handler = import_handler
|
self.import_handler = import_handler
|
||||||
elif callable(import_handler):
|
elif callable(import_handler):
|
||||||
self.import_handler = import_handler(self.config)
|
self.import_handler = import_handler(self.config)
|
||||||
else: # spec
|
else: # spec
|
||||||
factory = self.app.load_object(import_handler)
|
factory = self.app.load_object(import_handler)
|
||||||
self.import_handler = factory(self.config)
|
self.import_handler = factory(self.config)
|
||||||
|
|
||||||
|
@ -101,20 +101,22 @@ class ImportCommandHandler(GenericHandler):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# maybe just list models and bail
|
# maybe just list models and bail
|
||||||
if params.get('list_models'):
|
if params.get("list_models"):
|
||||||
self.list_models(params)
|
self.list_models(params)
|
||||||
return
|
return
|
||||||
|
|
||||||
# otherwise process some data
|
# otherwise process some data
|
||||||
kw = dict(params)
|
kw = dict(params)
|
||||||
models = kw.pop('models')
|
models = kw.pop("models")
|
||||||
log.debug("using handler: %s", self.import_handler.get_spec())
|
log.debug("using handler: %s", self.import_handler.get_spec())
|
||||||
# TODO: need to use all/default models if none specified
|
# TODO: need to use all/default models if none specified
|
||||||
# (and should know models by now for logging purposes)
|
# (and should know models by now for logging purposes)
|
||||||
log.debug("running %s %s for: %s",
|
log.debug(
|
||||||
self.import_handler,
|
"running %s %s for: %s",
|
||||||
self.import_handler.orientation.value,
|
self.import_handler,
|
||||||
', '.join(models))
|
self.import_handler.orientation.value,
|
||||||
|
", ".join(models),
|
||||||
|
)
|
||||||
log.debug("params are: %s", kw)
|
log.debug("params are: %s", kw)
|
||||||
self.import_handler.process_data(*models, **kw)
|
self.import_handler.process_data(*models, **kw)
|
||||||
|
|
||||||
|
@ -134,72 +136,93 @@ class ImportCommandHandler(GenericHandler):
|
||||||
|
|
||||||
|
|
||||||
def import_command_template(
|
def import_command_template(
|
||||||
|
models: Annotated[
|
||||||
models: Annotated[
|
Optional[List[str]],
|
||||||
Optional[List[str]],
|
typer.Argument(
|
||||||
typer.Argument(help="Model(s) to process. Can specify one or more, "
|
help="Model(s) to process. Can specify one or more, "
|
||||||
"or omit to process default models.")] = None,
|
"or omit to process default models."
|
||||||
|
),
|
||||||
list_models: Annotated[
|
] = None,
|
||||||
bool,
|
list_models: Annotated[
|
||||||
typer.Option('--list-models', '-l',
|
bool,
|
||||||
help="List available target models and exit.")] = False,
|
typer.Option(
|
||||||
|
"--list-models", "-l", help="List available target models and exit."
|
||||||
create: Annotated[
|
),
|
||||||
bool,
|
] = False,
|
||||||
typer.Option(help="Allow new target records to be created. "
|
create: Annotated[
|
||||||
"See aso --max-create.")] = True,
|
bool,
|
||||||
|
typer.Option(
|
||||||
update: Annotated[
|
help="Allow new target records to be created. " "See aso --max-create."
|
||||||
bool,
|
),
|
||||||
typer.Option(help="Allow existing target records to be updated. "
|
] = True,
|
||||||
"See also --max-update.")] = True,
|
update: Annotated[
|
||||||
|
bool,
|
||||||
delete: Annotated[
|
typer.Option(
|
||||||
bool,
|
help="Allow existing target records to be updated. "
|
||||||
typer.Option(help="Allow existing target records to be deleted. "
|
"See also --max-update."
|
||||||
"See also --max-delete.")] = False,
|
),
|
||||||
|
] = True,
|
||||||
fields: Annotated[
|
delete: Annotated[
|
||||||
str,
|
bool,
|
||||||
typer.Option('--fields',
|
typer.Option(
|
||||||
help="List of fields to process. See also --exclude and --key.")] = None,
|
help="Allow existing target records to be deleted. "
|
||||||
|
"See also --max-delete."
|
||||||
excluded_fields: Annotated[
|
),
|
||||||
str,
|
] = False,
|
||||||
typer.Option('--exclude',
|
fields: Annotated[
|
||||||
help="List of fields *not* to process. See also --fields.")] = None,
|
str,
|
||||||
|
typer.Option(
|
||||||
keys: Annotated[
|
"--fields", help="List of fields to process. See also --exclude and --key."
|
||||||
str,
|
),
|
||||||
typer.Option('--key', '--keys',
|
] = None,
|
||||||
help="List of fields to use as record key/identifier. "
|
excluded_fields: Annotated[
|
||||||
"See also --fields.")] = None,
|
str,
|
||||||
|
typer.Option(
|
||||||
max_create: Annotated[
|
"--exclude", help="List of fields *not* to process. See also --fields."
|
||||||
int,
|
),
|
||||||
typer.Option(help="Max number of target records to create (per model). "
|
] = None,
|
||||||
"See also --create.")] = None,
|
keys: Annotated[
|
||||||
|
str,
|
||||||
max_update: Annotated[
|
typer.Option(
|
||||||
int,
|
"--key",
|
||||||
typer.Option(help="Max number of target records to update (per model). "
|
"--keys",
|
||||||
"See also --update.")] = None,
|
help="List of fields to use as record key/identifier. "
|
||||||
|
"See also --fields.",
|
||||||
max_delete: Annotated[
|
),
|
||||||
int,
|
] = None,
|
||||||
typer.Option(help="Max number of target records to delete (per model). "
|
max_create: Annotated[
|
||||||
"See also --delete.")] = None,
|
int,
|
||||||
|
typer.Option(
|
||||||
max_total: Annotated[
|
help="Max number of target records to create (per model). "
|
||||||
int,
|
"See also --create."
|
||||||
typer.Option(help="Max number of *any* target record changes which may occur (per model).")] = None,
|
),
|
||||||
|
] = None,
|
||||||
dry_run: Annotated[
|
max_update: Annotated[
|
||||||
bool,
|
int,
|
||||||
typer.Option('--dry-run',
|
typer.Option(
|
||||||
help="Go through the motions, but rollback the transaction.")] = False,
|
help="Max number of target records to update (per model). "
|
||||||
|
"See also --update."
|
||||||
|
),
|
||||||
|
] = None,
|
||||||
|
max_delete: Annotated[
|
||||||
|
int,
|
||||||
|
typer.Option(
|
||||||
|
help="Max number of target records to delete (per model). "
|
||||||
|
"See also --delete."
|
||||||
|
),
|
||||||
|
] = None,
|
||||||
|
max_total: Annotated[
|
||||||
|
int,
|
||||||
|
typer.Option(
|
||||||
|
help="Max number of *any* target record changes which may occur (per model)."
|
||||||
|
),
|
||||||
|
] = None,
|
||||||
|
dry_run: Annotated[
|
||||||
|
bool,
|
||||||
|
typer.Option(
|
||||||
|
"--dry-run", help="Go through the motions, but rollback the transaction."
|
||||||
|
),
|
||||||
|
] = False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Stub function which provides a common param signature; used with
|
Stub function which provides a common param signature; used with
|
||||||
|
@ -248,15 +271,18 @@ def import_command(fn):
|
||||||
|
|
||||||
|
|
||||||
def file_import_command_template(
|
def file_import_command_template(
|
||||||
|
input_file_path: Annotated[
|
||||||
input_file_path: Annotated[
|
Path,
|
||||||
Path,
|
typer.Option(
|
||||||
typer.Option('--input-path',
|
"--input-path",
|
||||||
exists=True, file_okay=True, dir_okay=True,
|
exists=True,
|
||||||
help="Path to input file(s). Can be a folder "
|
file_okay=True,
|
||||||
"if app logic can guess the filename(s); "
|
dir_okay=True,
|
||||||
"otherwise must be complete file path.")] = None,
|
help="Path to input file(s). Can be a folder "
|
||||||
|
"if app logic can guess the filename(s); "
|
||||||
|
"otherwise must be complete file path.",
|
||||||
|
),
|
||||||
|
] = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Stub function to provide signature for import/export commands
|
Stub function to provide signature for import/export commands
|
||||||
|
@ -278,9 +304,9 @@ def file_import_command(fn):
|
||||||
original_sig = inspect.signature(fn)
|
original_sig = inspect.signature(fn)
|
||||||
plain_import_sig = inspect.signature(import_command_template)
|
plain_import_sig = inspect.signature(import_command_template)
|
||||||
file_import_sig = inspect.signature(file_import_command_template)
|
file_import_sig = inspect.signature(file_import_command_template)
|
||||||
desired_params = (
|
desired_params = list(plain_import_sig.parameters.values()) + list(
|
||||||
list(plain_import_sig.parameters.values())
|
file_import_sig.parameters.values()
|
||||||
+ list(file_import_sig.parameters.values()))
|
)
|
||||||
|
|
||||||
params = list(original_sig.parameters.values())
|
params = list(original_sig.parameters.values())
|
||||||
for i, param in enumerate(desired_params):
|
for i, param in enumerate(desired_params):
|
||||||
|
|
|
@ -35,14 +35,12 @@ from .base import file_import_command, ImportCommandHandler
|
||||||
|
|
||||||
@wutta_typer.command()
|
@wutta_typer.command()
|
||||||
@file_import_command
|
@file_import_command
|
||||||
def import_csv(
|
def import_csv(ctx: typer.Context, **kwargs):
|
||||||
ctx: typer.Context,
|
|
||||||
**kwargs
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Import data from CSV file(s) to Wutta DB
|
Import data from CSV file(s) to Wutta DB
|
||||||
"""
|
"""
|
||||||
config = ctx.parent.wutta_config
|
config = ctx.parent.wutta_config
|
||||||
handler = ImportCommandHandler(
|
handler = ImportCommandHandler(
|
||||||
config, import_handler='wuttasync.importing.csv:FromCsvToWutta')
|
config, import_handler="wuttasync.importing.csv:FromCsvToWutta"
|
||||||
|
)
|
||||||
handler.run(ctx.params)
|
handler.run(ctx.params)
|
||||||
|
|
|
@ -191,12 +191,15 @@ class Importer:
|
||||||
self.config = config
|
self.config = config
|
||||||
self.app = self.config.get_app()
|
self.app = self.config.get_app()
|
||||||
|
|
||||||
self.create = kwargs.pop('create',
|
self.create = kwargs.pop(
|
||||||
kwargs.pop('allow_create', self.allow_create))
|
"create", kwargs.pop("allow_create", self.allow_create)
|
||||||
self.update = kwargs.pop('update',
|
)
|
||||||
kwargs.pop('allow_update', self.allow_update))
|
self.update = kwargs.pop(
|
||||||
self.delete = kwargs.pop('delete',
|
"update", kwargs.pop("allow_update", self.allow_update)
|
||||||
kwargs.pop('allow_delete', self.allow_delete))
|
)
|
||||||
|
self.delete = kwargs.pop(
|
||||||
|
"delete", kwargs.pop("allow_delete", self.allow_delete)
|
||||||
|
)
|
||||||
|
|
||||||
self.__dict__.update(kwargs)
|
self.__dict__.update(kwargs)
|
||||||
|
|
||||||
|
@ -207,12 +210,11 @@ class Importer:
|
||||||
self.fields = self.config.parse_list(self.fields)
|
self.fields = self.config.parse_list(self.fields)
|
||||||
|
|
||||||
# discard any fields caller asked to exclude
|
# discard any fields caller asked to exclude
|
||||||
excluded = getattr(self, 'excluded_fields', None)
|
excluded = getattr(self, "excluded_fields", None)
|
||||||
if excluded:
|
if excluded:
|
||||||
if isinstance(excluded, str):
|
if isinstance(excluded, str):
|
||||||
excluded = self.config.parse_list(excluded)
|
excluded = self.config.parse_list(excluded)
|
||||||
self.fields = [f for f in self.fields
|
self.fields = [f for f in self.fields if f not in excluded]
|
||||||
if f not in excluded]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def orientation(self):
|
def orientation(self):
|
||||||
|
@ -245,7 +247,7 @@ class Importer:
|
||||||
"""
|
"""
|
||||||
Returns the display title for the target data model.
|
Returns the display title for the target data model.
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'model_title'):
|
if hasattr(self, "model_title"):
|
||||||
return self.model_title
|
return self.model_title
|
||||||
|
|
||||||
# TODO: this will fail if not using a model class, obviously..
|
# TODO: this will fail if not using a model class, obviously..
|
||||||
|
@ -264,7 +266,7 @@ class Importer:
|
||||||
|
|
||||||
:returns: Possibly empty list of "simple" field names.
|
:returns: Possibly empty list of "simple" field names.
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'simple_fields'):
|
if hasattr(self, "simple_fields"):
|
||||||
return self.simple_fields
|
return self.simple_fields
|
||||||
|
|
||||||
fields = get_columns(self.model_class)
|
fields = get_columns(self.model_class)
|
||||||
|
@ -287,7 +289,7 @@ class Importer:
|
||||||
|
|
||||||
:returns: List of all "supported" field names.
|
:returns: List of all "supported" field names.
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'supported_fields'):
|
if hasattr(self, "supported_fields"):
|
||||||
return self.supported_fields
|
return self.supported_fields
|
||||||
|
|
||||||
return self.get_simple_fields()
|
return self.get_simple_fields()
|
||||||
|
@ -306,7 +308,7 @@ class Importer:
|
||||||
|
|
||||||
:returns: List of "effective" field names.
|
:returns: List of "effective" field names.
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'fields') and self.fields is not None:
|
if hasattr(self, "fields") and self.fields is not None:
|
||||||
return self.fields
|
return self.fields
|
||||||
|
|
||||||
return self.get_supported_fields()
|
return self.get_supported_fields()
|
||||||
|
@ -322,9 +324,9 @@ class Importer:
|
||||||
"""
|
"""
|
||||||
keys = None
|
keys = None
|
||||||
# nb. prefer 'keys' but use 'key' as fallback
|
# nb. prefer 'keys' but use 'key' as fallback
|
||||||
if hasattr(self, 'keys'):
|
if hasattr(self, "keys"):
|
||||||
keys = self.keys
|
keys = self.keys
|
||||||
elif hasattr(self, 'key'):
|
elif hasattr(self, "key"):
|
||||||
keys = self.key
|
keys = self.key
|
||||||
if keys:
|
if keys:
|
||||||
if isinstance(keys, str):
|
if isinstance(keys, str):
|
||||||
|
@ -401,7 +403,7 @@ class Importer:
|
||||||
updated = []
|
updated = []
|
||||||
deleted = []
|
deleted = []
|
||||||
|
|
||||||
log.debug("using key fields: %s", ', '.join(self.get_keys()))
|
log.debug("using key fields: %s", ", ".join(self.get_keys()))
|
||||||
|
|
||||||
# get complete set of normalized source data
|
# get complete set of normalized source data
|
||||||
if source_data is None:
|
if source_data is None:
|
||||||
|
@ -411,8 +413,7 @@ class Importer:
|
||||||
source_data, source_keys = self.get_unique_data(source_data)
|
source_data, source_keys = self.get_unique_data(source_data)
|
||||||
|
|
||||||
model_title = self.get_model_title()
|
model_title = self.get_model_title()
|
||||||
log.debug(f"got %s {model_title} records from source",
|
log.debug(f"got %s {model_title} records from source", len(source_data))
|
||||||
len(source_data))
|
|
||||||
|
|
||||||
# maybe cache existing target data
|
# maybe cache existing target data
|
||||||
if self.caches_target:
|
if self.caches_target:
|
||||||
|
@ -426,8 +427,10 @@ class Importer:
|
||||||
if self.delete:
|
if self.delete:
|
||||||
changes = len(created) + len(updated)
|
changes = len(created) + len(updated)
|
||||||
if self.max_total and changes >= self.max_total:
|
if self.max_total and changes >= self.max_total:
|
||||||
log.debug("max of %s total changes already reached; skipping deletions",
|
log.debug(
|
||||||
self.max_total)
|
"max of %s total changes already reached; skipping deletions",
|
||||||
|
self.max_total,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
deleted = self.do_delete(source_keys, changes, progress=progress)
|
deleted = self.do_delete(source_keys, changes, progress=progress)
|
||||||
|
|
||||||
|
@ -480,21 +483,32 @@ class Importer:
|
||||||
if diffs:
|
if diffs:
|
||||||
|
|
||||||
# data differs, so update target object
|
# data differs, so update target object
|
||||||
log.debug("fields (%s) differed for target data: %s and source data: %s",
|
log.debug(
|
||||||
','.join(diffs), target_data, source_data)
|
"fields (%s) differed for target data: %s and source data: %s",
|
||||||
target_object = self.update_target_object(target_object,
|
",".join(diffs),
|
||||||
source_data,
|
target_data,
|
||||||
target_data=target_data)
|
source_data,
|
||||||
|
)
|
||||||
|
target_object = self.update_target_object(
|
||||||
|
target_object, source_data, target_data=target_data
|
||||||
|
)
|
||||||
updated.append((target_object, target_data, source_data))
|
updated.append((target_object, target_data, source_data))
|
||||||
|
|
||||||
# stop if we reach max allowed
|
# stop if we reach max allowed
|
||||||
if self.max_update and len(updated) >= self.max_update:
|
if self.max_update and len(updated) >= self.max_update:
|
||||||
log.warning("max of %s *updated* records has been reached; stopping now",
|
log.warning(
|
||||||
self.max_update)
|
"max of %s *updated* records has been reached; stopping now",
|
||||||
|
self.max_update,
|
||||||
|
)
|
||||||
raise ImportLimitReached()
|
raise ImportLimitReached()
|
||||||
elif self.max_total and (len(created) + len(updated)) >= self.max_total:
|
elif (
|
||||||
log.warning("max of %s *total changes* has been reached; stopping now",
|
self.max_total
|
||||||
self.max_total)
|
and (len(created) + len(updated)) >= self.max_total
|
||||||
|
):
|
||||||
|
log.warning(
|
||||||
|
"max of %s *total changes* has been reached; stopping now",
|
||||||
|
self.max_total,
|
||||||
|
)
|
||||||
raise ImportLimitReached()
|
raise ImportLimitReached()
|
||||||
|
|
||||||
elif not target_object and self.create:
|
elif not target_object and self.create:
|
||||||
|
@ -513,12 +527,19 @@ class Importer:
|
||||||
|
|
||||||
# stop if we reach max allowed
|
# stop if we reach max allowed
|
||||||
if self.max_create and len(created) >= self.max_create:
|
if self.max_create and len(created) >= self.max_create:
|
||||||
log.warning("max of %s *created* records has been reached; stopping now",
|
log.warning(
|
||||||
self.max_create)
|
"max of %s *created* records has been reached; stopping now",
|
||||||
|
self.max_create,
|
||||||
|
)
|
||||||
raise ImportLimitReached()
|
raise ImportLimitReached()
|
||||||
elif self.max_total and (len(created) + len(updated)) >= self.max_total:
|
elif (
|
||||||
log.warning("max of %s *total changes* has been reached; stopping now",
|
self.max_total
|
||||||
self.max_total)
|
and (len(created) + len(updated)) >= self.max_total
|
||||||
|
):
|
||||||
|
log.warning(
|
||||||
|
"max of %s *total changes* has been reached; stopping now",
|
||||||
|
self.max_total,
|
||||||
|
)
|
||||||
raise ImportLimitReached()
|
raise ImportLimitReached()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -527,8 +548,12 @@ class Importer:
|
||||||
actioning = self.actioning.capitalize()
|
actioning = self.actioning.capitalize()
|
||||||
target_title = self.handler.get_target_title()
|
target_title = self.handler.get_target_title()
|
||||||
try:
|
try:
|
||||||
self.app.progress_loop(create_update, all_source_data, progress,
|
self.app.progress_loop(
|
||||||
message=f"{actioning} {model_title} data to {target_title}")
|
create_update,
|
||||||
|
all_source_data,
|
||||||
|
progress,
|
||||||
|
message=f"{actioning} {model_title} data to {target_title}",
|
||||||
|
)
|
||||||
except ImportLimitReached:
|
except ImportLimitReached:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -575,27 +600,35 @@ class Importer:
|
||||||
|
|
||||||
def delete(key, i):
|
def delete(key, i):
|
||||||
cached = self.cached_target.pop(key)
|
cached = self.cached_target.pop(key)
|
||||||
obj = cached['object']
|
obj = cached["object"]
|
||||||
|
|
||||||
# delete target object
|
# delete target object
|
||||||
log.debug("deleting %s %s: %s", model_title, key, obj)
|
log.debug("deleting %s %s: %s", model_title, key, obj)
|
||||||
if self.delete_target_object(obj):
|
if self.delete_target_object(obj):
|
||||||
deleted.append((obj, cached['data']))
|
deleted.append((obj, cached["data"]))
|
||||||
|
|
||||||
# stop if we reach max allowed
|
# stop if we reach max allowed
|
||||||
if self.max_delete and len(deleted) >= self.max_delete:
|
if self.max_delete and len(deleted) >= self.max_delete:
|
||||||
log.warning("max of %s *deleted* records has been reached; stopping now",
|
log.warning(
|
||||||
self.max_delete)
|
"max of %s *deleted* records has been reached; stopping now",
|
||||||
|
self.max_delete,
|
||||||
|
)
|
||||||
raise ImportLimitReached()
|
raise ImportLimitReached()
|
||||||
elif self.max_total and (changes + len(deleted)) >= self.max_total:
|
elif self.max_total and (changes + len(deleted)) >= self.max_total:
|
||||||
log.warning("max of %s *total changes* has been reached; stopping now",
|
log.warning(
|
||||||
self.max_total)
|
"max of %s *total changes* has been reached; stopping now",
|
||||||
|
self.max_total,
|
||||||
|
)
|
||||||
raise ImportLimitReached()
|
raise ImportLimitReached()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
model_title = self.get_model_title()
|
model_title = self.get_model_title()
|
||||||
self.app.progress_loop(delete, sorted(deletable), progress,
|
self.app.progress_loop(
|
||||||
message=f"Deleting {model_title} records")
|
delete,
|
||||||
|
sorted(deletable),
|
||||||
|
progress,
|
||||||
|
message=f"Deleting {model_title} records",
|
||||||
|
)
|
||||||
except ImportLimitReached:
|
except ImportLimitReached:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -685,8 +718,12 @@ class Importer:
|
||||||
|
|
||||||
model_title = self.get_model_title()
|
model_title = self.get_model_title()
|
||||||
source_title = self.handler.get_source_title()
|
source_title = self.handler.get_source_title()
|
||||||
self.app.progress_loop(normalize, source_objects, progress,
|
self.app.progress_loop(
|
||||||
message=f"Reading {model_title} data from {source_title}")
|
normalize,
|
||||||
|
source_objects,
|
||||||
|
progress,
|
||||||
|
message=f"Reading {model_title} data from {source_title}",
|
||||||
|
)
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
def get_unique_data(self, source_data):
|
def get_unique_data(self, source_data):
|
||||||
|
@ -724,10 +761,12 @@ class Importer:
|
||||||
for data in source_data:
|
for data in source_data:
|
||||||
key = self.get_record_key(data)
|
key = self.get_record_key(data)
|
||||||
if key in unique:
|
if key in unique:
|
||||||
log.warning("duplicate %s records detected from %s for key: %s",
|
log.warning(
|
||||||
self.get_model_title(),
|
"duplicate %s records detected from %s for key: %s",
|
||||||
self.handler.get_source_title(),
|
self.get_model_title(),
|
||||||
key)
|
self.handler.get_source_title(),
|
||||||
|
key,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
unique[key] = data
|
unique[key] = data
|
||||||
return list(unique.values()), set(unique)
|
return list(unique.values()), set(unique)
|
||||||
|
@ -830,12 +869,16 @@ class Importer:
|
||||||
data = self.normalize_target_object(obj)
|
data = self.normalize_target_object(obj)
|
||||||
if data:
|
if data:
|
||||||
key = self.get_record_key(data)
|
key = self.get_record_key(data)
|
||||||
cached[key] = {'object': obj, 'data': data}
|
cached[key] = {"object": obj, "data": data}
|
||||||
|
|
||||||
model_title = self.get_model_title()
|
model_title = self.get_model_title()
|
||||||
target_title = self.handler.get_target_title()
|
target_title = self.handler.get_target_title()
|
||||||
self.app.progress_loop(cache, objects, progress,
|
self.app.progress_loop(
|
||||||
message=f"Reading {model_title} data from {target_title}")
|
cache,
|
||||||
|
objects,
|
||||||
|
progress,
|
||||||
|
message=f"Reading {model_title} data from {target_title}",
|
||||||
|
)
|
||||||
log.debug(f"cached %s {model_title} records from target", len(cached))
|
log.debug(f"cached %s {model_title} records from target", len(cached))
|
||||||
return cached
|
return cached
|
||||||
|
|
||||||
|
@ -877,7 +920,7 @@ class Importer:
|
||||||
"""
|
"""
|
||||||
if self.caches_target and self.cached_target is not None:
|
if self.caches_target and self.cached_target is not None:
|
||||||
cached = self.cached_target.get(key)
|
cached = self.cached_target.get(key)
|
||||||
return cached['object'] if cached else None
|
return cached["object"] if cached else None
|
||||||
|
|
||||||
def normalize_target_object(self, obj):
|
def normalize_target_object(self, obj):
|
||||||
"""
|
"""
|
||||||
|
@ -901,10 +944,8 @@ class Importer:
|
||||||
:returns: Dict of normalized data fields, or ``None``.
|
:returns: Dict of normalized data fields, or ``None``.
|
||||||
"""
|
"""
|
||||||
fields = self.get_fields()
|
fields = self.get_fields()
|
||||||
fields = [f for f in self.get_simple_fields()
|
fields = [f for f in self.get_simple_fields() if f in fields]
|
||||||
if f in fields]
|
data = dict([(field, getattr(obj, field)) for field in fields])
|
||||||
data = dict([(field, getattr(obj, field))
|
|
||||||
for field in fields])
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_deletable_keys(self, progress=None):
|
def get_deletable_keys(self, progress=None):
|
||||||
|
@ -930,13 +971,17 @@ class Importer:
|
||||||
keys = set()
|
keys = set()
|
||||||
|
|
||||||
def check(key, i):
|
def check(key, i):
|
||||||
data = self.cached_target[key]['data']
|
data = self.cached_target[key]["data"]
|
||||||
obj = self.cached_target[key]['object']
|
obj = self.cached_target[key]["object"]
|
||||||
if self.can_delete_object(obj, data):
|
if self.can_delete_object(obj, data):
|
||||||
keys.add(key)
|
keys.add(key)
|
||||||
|
|
||||||
self.app.progress_loop(check, set(self.cached_target), progress,
|
self.app.progress_loop(
|
||||||
message="Determining which objects can be deleted")
|
check,
|
||||||
|
set(self.cached_target),
|
||||||
|
progress,
|
||||||
|
message="Determining which objects can be deleted",
|
||||||
|
)
|
||||||
return keys
|
return keys
|
||||||
|
|
||||||
##############################
|
##############################
|
||||||
|
@ -954,7 +999,7 @@ class Importer:
|
||||||
|
|
||||||
:returns: New object for the target side, or ``None``.
|
:returns: New object for the target side, or ``None``.
|
||||||
"""
|
"""
|
||||||
if source_data.get('__ignoreme__'):
|
if source_data.get("__ignoreme__"):
|
||||||
return
|
return
|
||||||
|
|
||||||
obj = self.make_empty_object(key)
|
obj = self.make_empty_object(key)
|
||||||
|
@ -1035,9 +1080,11 @@ class Importer:
|
||||||
|
|
||||||
# field is eligible for update generally, so compare
|
# field is eligible for update generally, so compare
|
||||||
# values between records
|
# values between records
|
||||||
if (not target_data
|
if (
|
||||||
|
not target_data
|
||||||
or field not in target_data
|
or field not in target_data
|
||||||
or target_data[field] != source_data[field]):
|
or target_data[field] != source_data[field]
|
||||||
|
):
|
||||||
|
|
||||||
# data mismatch; update field for target object
|
# data mismatch; update field for target object
|
||||||
setattr(obj, field, source_data[field])
|
setattr(obj, field, source_data[field])
|
||||||
|
@ -1150,7 +1197,7 @@ class FromFile(Importer):
|
||||||
|
|
||||||
:returns: Path to input file.
|
:returns: Path to input file.
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'input_file_path'):
|
if hasattr(self, "input_file_path"):
|
||||||
return self.input_file_path
|
return self.input_file_path
|
||||||
|
|
||||||
folder = self.get_input_file_dir()
|
folder = self.get_input_file_dir()
|
||||||
|
@ -1166,7 +1213,7 @@ class FromFile(Importer):
|
||||||
|
|
||||||
:returns: Path to folder with input file(s).
|
:returns: Path to folder with input file(s).
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'input_file_dir'):
|
if hasattr(self, "input_file_dir"):
|
||||||
return self.input_file_dir
|
return self.input_file_dir
|
||||||
|
|
||||||
raise NotImplementedError("can't guess path to input file(s) folder")
|
raise NotImplementedError("can't guess path to input file(s) folder")
|
||||||
|
@ -1180,7 +1227,7 @@ class FromFile(Importer):
|
||||||
|
|
||||||
:returns: Input filename, sans folder path.
|
:returns: Input filename, sans folder path.
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'input_file_name'):
|
if hasattr(self, "input_file_name"):
|
||||||
return self.input_file_name
|
return self.input_file_name
|
||||||
|
|
||||||
raise NotImplementedError("can't guess input filename")
|
raise NotImplementedError("can't guess input filename")
|
||||||
|
@ -1218,7 +1265,7 @@ class ToSqlalchemy(Importer):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
caches_target = True
|
caches_target = True
|
||||||
"" # nb. suppress sphinx docs
|
"" # nb. suppress sphinx docs
|
||||||
|
|
||||||
def get_target_object(self, key):
|
def get_target_object(self, key):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -61,7 +61,7 @@ class FromCsv(FromFile):
|
||||||
:class:`python:csv.DictReader` instance.
|
:class:`python:csv.DictReader` instance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
csv_encoding = 'utf_8'
|
csv_encoding = "utf_8"
|
||||||
"""
|
"""
|
||||||
Encoding used by the CSV input file.
|
Encoding used by the CSV input file.
|
||||||
|
|
||||||
|
@ -78,11 +78,11 @@ class FromCsv(FromFile):
|
||||||
:meth:`~wuttasync.importing.base.Importer.get_model_title()`
|
:meth:`~wuttasync.importing.base.Importer.get_model_title()`
|
||||||
to obtain the model name.
|
to obtain the model name.
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'input_file_name'):
|
if hasattr(self, "input_file_name"):
|
||||||
return self.input_file_name
|
return self.input_file_name
|
||||||
|
|
||||||
model_title = self.get_model_title()
|
model_title = self.get_model_title()
|
||||||
return f'{model_title}.csv'
|
return f"{model_title}.csv"
|
||||||
|
|
||||||
def open_input_file(self):
|
def open_input_file(self):
|
||||||
"""
|
"""
|
||||||
|
@ -104,7 +104,7 @@ class FromCsv(FromFile):
|
||||||
"""
|
"""
|
||||||
path = self.get_input_file_path()
|
path = self.get_input_file_path()
|
||||||
log.debug("opening input file: %s", path)
|
log.debug("opening input file: %s", path)
|
||||||
self.input_file = open(path, 'rt', encoding=self.csv_encoding)
|
self.input_file = open(path, "rt", encoding=self.csv_encoding)
|
||||||
self.input_reader = csv.DictReader(self.input_file)
|
self.input_reader = csv.DictReader(self.input_file)
|
||||||
|
|
||||||
# nb. importer may have all supported fields by default, so
|
# nb. importer may have all supported fields by default, so
|
||||||
|
@ -112,8 +112,7 @@ class FromCsv(FromFile):
|
||||||
fields = self.get_fields()
|
fields = self.get_fields()
|
||||||
orientation = self.orientation.value
|
orientation = self.orientation.value
|
||||||
log.debug(f"supported fields for {orientation}: %s", fields)
|
log.debug(f"supported fields for {orientation}: %s", fields)
|
||||||
self.fields = [f for f in self.input_reader.fieldnames or []
|
self.fields = [f for f in self.input_reader.fieldnames or [] if f in fields]
|
||||||
if f in fields]
|
|
||||||
log.debug("fields present in source data: %s", self.fields)
|
log.debug("fields present in source data: %s", self.fields)
|
||||||
if not self.fields:
|
if not self.fields:
|
||||||
self.input_file.close()
|
self.input_file.close()
|
||||||
|
@ -188,7 +187,8 @@ class FromCsvToSqlalchemyHandlerMixin:
|
||||||
|
|
||||||
This all happens within :meth:`define_importers()`.
|
This all happens within :meth:`define_importers()`.
|
||||||
"""
|
"""
|
||||||
source_key = 'csv'
|
|
||||||
|
source_key = "csv"
|
||||||
generic_source_title = "CSV"
|
generic_source_title = "CSV"
|
||||||
|
|
||||||
FromImporterBase = FromCsv
|
FromImporterBase = FromCsv
|
||||||
|
@ -237,15 +237,18 @@ class FromCsvToSqlalchemyHandlerMixin:
|
||||||
# mostly try to make an importer for every data model
|
# mostly try to make an importer for every data model
|
||||||
for name in dir(model):
|
for name in dir(model):
|
||||||
cls = getattr(model, name)
|
cls = getattr(model, name)
|
||||||
if isinstance(cls, type) and issubclass(cls, model.Base) and cls is not model.Base:
|
if (
|
||||||
|
isinstance(cls, type)
|
||||||
|
and issubclass(cls, model.Base)
|
||||||
|
and cls is not model.Base
|
||||||
|
):
|
||||||
importers[name] = self.make_importer_factory(cls, name)
|
importers[name] = self.make_importer_factory(cls, name)
|
||||||
|
|
||||||
# sort importers according to schema topography
|
# sort importers according to schema topography
|
||||||
topo_sortkey = make_topo_sortkey(model)
|
topo_sortkey = make_topo_sortkey(model)
|
||||||
importers = OrderedDict([
|
importers = OrderedDict(
|
||||||
(name, importers[name])
|
[(name, importers[name]) for name in sorted(importers, key=topo_sortkey)]
|
||||||
for name in sorted(importers, key=topo_sortkey)
|
)
|
||||||
])
|
|
||||||
|
|
||||||
return importers
|
return importers
|
||||||
|
|
||||||
|
@ -269,11 +272,14 @@ class FromCsvToSqlalchemyHandlerMixin:
|
||||||
:returns: The new class, meant to process import/export
|
:returns: The new class, meant to process import/export
|
||||||
targeting the given data model.
|
targeting the given data model.
|
||||||
"""
|
"""
|
||||||
return type(f'{name}Importer',
|
return type(
|
||||||
(FromCsvToSqlalchemyMixin, self.FromImporterBase, self.ToImporterBase), {
|
f"{name}Importer",
|
||||||
'model_class': model_class,
|
(FromCsvToSqlalchemyMixin, self.FromImporterBase, self.ToImporterBase),
|
||||||
'key': list(get_primary_keys(model_class)),
|
{
|
||||||
})
|
"model_class": model_class,
|
||||||
|
"key": list(get_primary_keys(model_class)),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class FromCsvToWutta(FromCsvToSqlalchemyHandlerMixin, FromFileHandler, ToWuttaHandler):
|
class FromCsvToWutta(FromCsvToSqlalchemyHandlerMixin, FromFileHandler, ToWuttaHandler):
|
||||||
|
@ -283,6 +289,7 @@ class FromCsvToWutta(FromCsvToSqlalchemyHandlerMixin, FromFileHandler, ToWuttaHa
|
||||||
This uses :class:`FromCsvToSqlalchemyHandlerMixin` for most of the
|
This uses :class:`FromCsvToSqlalchemyHandlerMixin` for most of the
|
||||||
heavy lifting.
|
heavy lifting.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
ToImporterBase = ToWutta
|
ToImporterBase = ToWutta
|
||||||
|
|
||||||
def get_target_model(self):
|
def get_target_model(self):
|
||||||
|
|
|
@ -39,8 +39,9 @@ class Orientation(Enum):
|
||||||
"""
|
"""
|
||||||
Enum values for :attr:`ImportHandler.orientation`.
|
Enum values for :attr:`ImportHandler.orientation`.
|
||||||
"""
|
"""
|
||||||
IMPORT = 'import'
|
|
||||||
EXPORT = 'export'
|
IMPORT = "import"
|
||||||
|
EXPORT = "export"
|
||||||
|
|
||||||
|
|
||||||
class ImportHandler(GenericHandler):
|
class ImportHandler(GenericHandler):
|
||||||
|
@ -158,7 +159,7 @@ class ImportHandler(GenericHandler):
|
||||||
* ``'importing'``
|
* ``'importing'``
|
||||||
* ``'exporting'``
|
* ``'exporting'``
|
||||||
"""
|
"""
|
||||||
return f'{self.orientation.value}ing'
|
return f"{self.orientation.value}ing"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_key(cls):
|
def get_key(cls):
|
||||||
|
@ -174,7 +175,7 @@ class ImportHandler(GenericHandler):
|
||||||
here; but only one will be configured as the "default" handler
|
here; but only one will be configured as the "default" handler
|
||||||
for that key. See also :meth:`get_spec()`.
|
for that key. See also :meth:`get_spec()`.
|
||||||
"""
|
"""
|
||||||
return f'to_{cls.target_key}.from_{cls.source_key}.{cls.orientation.value}'
|
return f"to_{cls.target_key}.from_{cls.source_key}.{cls.orientation.value}"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_spec(cls):
|
def get_spec(cls):
|
||||||
|
@ -188,7 +189,7 @@ class ImportHandler(GenericHandler):
|
||||||
|
|
||||||
See also :meth:`get_key()`.
|
See also :meth:`get_key()`.
|
||||||
"""
|
"""
|
||||||
return f'{cls.__module__}:{cls.__name__}'
|
return f"{cls.__module__}:{cls.__name__}"
|
||||||
|
|
||||||
def get_title(self):
|
def get_title(self):
|
||||||
"""
|
"""
|
||||||
|
@ -210,9 +211,9 @@ class ImportHandler(GenericHandler):
|
||||||
|
|
||||||
See also :meth:`get_title()` and :meth:`get_target_title()`.
|
See also :meth:`get_title()` and :meth:`get_target_title()`.
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'source_title'):
|
if hasattr(self, "source_title"):
|
||||||
return self.source_title
|
return self.source_title
|
||||||
if hasattr(self, 'generic_source_title'):
|
if hasattr(self, "generic_source_title"):
|
||||||
return self.generic_source_title
|
return self.generic_source_title
|
||||||
return self.source_key
|
return self.source_key
|
||||||
|
|
||||||
|
@ -222,9 +223,9 @@ class ImportHandler(GenericHandler):
|
||||||
|
|
||||||
See also :meth:`get_title()` and :meth:`get_source_title()`.
|
See also :meth:`get_title()` and :meth:`get_source_title()`.
|
||||||
"""
|
"""
|
||||||
if hasattr(self, 'target_title'):
|
if hasattr(self, "target_title"):
|
||||||
return self.target_title
|
return self.target_title
|
||||||
if hasattr(self, 'generic_target_title'):
|
if hasattr(self, "generic_target_title"):
|
||||||
return self.generic_target_title
|
return self.generic_target_title
|
||||||
return self.target_key
|
return self.target_key
|
||||||
|
|
||||||
|
@ -269,7 +270,9 @@ class ImportHandler(GenericHandler):
|
||||||
msg = "%s: added %d; updated %d; deleted %d %s records"
|
msg = "%s: added %d; updated %d; deleted %d %s records"
|
||||||
if self.dry_run:
|
if self.dry_run:
|
||||||
msg += " (dry run)"
|
msg += " (dry run)"
|
||||||
log.info(msg, self.get_title(), len(created), len(updated), len(deleted), key)
|
log.info(
|
||||||
|
msg, self.get_title(), len(created), len(updated), len(deleted), key
|
||||||
|
)
|
||||||
|
|
||||||
except:
|
except:
|
||||||
# TODO: what should happen here?
|
# TODO: what should happen here?
|
||||||
|
@ -308,8 +311,8 @@ class ImportHandler(GenericHandler):
|
||||||
|
|
||||||
:returns: Dict of kwargs, "post-consumption."
|
:returns: Dict of kwargs, "post-consumption."
|
||||||
"""
|
"""
|
||||||
if 'dry_run' in kwargs:
|
if "dry_run" in kwargs:
|
||||||
self.dry_run = kwargs['dry_run']
|
self.dry_run = kwargs["dry_run"]
|
||||||
|
|
||||||
return kwargs
|
return kwargs
|
||||||
|
|
||||||
|
@ -485,11 +488,11 @@ class ImportHandler(GenericHandler):
|
||||||
raise KeyError(f"unknown {orientation} key: {key}")
|
raise KeyError(f"unknown {orientation} key: {key}")
|
||||||
|
|
||||||
kwargs = self.get_importer_kwargs(key, **kwargs)
|
kwargs = self.get_importer_kwargs(key, **kwargs)
|
||||||
kwargs['handler'] = self
|
kwargs["handler"] = self
|
||||||
|
|
||||||
# nb. default logic should (normally) determine keys
|
# nb. default logic should (normally) determine keys
|
||||||
if 'keys' in kwargs and not kwargs['keys']:
|
if "keys" in kwargs and not kwargs["keys"]:
|
||||||
del kwargs['keys']
|
del kwargs["keys"]
|
||||||
|
|
||||||
factory = self.importers[key]
|
factory = self.importers[key]
|
||||||
return factory(self.config, **kwargs)
|
return factory(self.config, **kwargs)
|
||||||
|
@ -524,12 +527,12 @@ class FromFileHandler(ImportHandler):
|
||||||
|
|
||||||
# interpret file vs. folder path
|
# interpret file vs. folder path
|
||||||
# nb. this assumes FromFile importer/exporter
|
# nb. this assumes FromFile importer/exporter
|
||||||
path = kwargs.pop('input_file_path', None)
|
path = kwargs.pop("input_file_path", None)
|
||||||
if path:
|
if path:
|
||||||
if not kwargs.get('input_file_dir') and os.path.isdir(path):
|
if not kwargs.get("input_file_dir") and os.path.isdir(path):
|
||||||
kwargs['input_file_dir'] = path
|
kwargs["input_file_dir"] = path
|
||||||
else:
|
else:
|
||||||
kwargs['input_file_path'] = path
|
kwargs["input_file_path"] = path
|
||||||
|
|
||||||
# and carry on
|
# and carry on
|
||||||
super().process_data(*keys, **kwargs)
|
super().process_data(*keys, **kwargs)
|
||||||
|
@ -586,5 +589,5 @@ class ToSqlalchemyHandler(ImportHandler):
|
||||||
def get_importer_kwargs(self, key, **kwargs):
|
def get_importer_kwargs(self, key, **kwargs):
|
||||||
""" """
|
""" """
|
||||||
kwargs = super().get_importer_kwargs(key, **kwargs)
|
kwargs = super().get_importer_kwargs(key, **kwargs)
|
||||||
kwargs.setdefault('target_session', self.target_session)
|
kwargs.setdefault("target_session", self.target_session)
|
||||||
return kwargs
|
return kwargs
|
||||||
|
|
|
@ -33,15 +33,15 @@ class ToWuttaHandler(ToSqlalchemyHandler):
|
||||||
database`).
|
database`).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
target_key = 'wutta'
|
target_key = "wutta"
|
||||||
"" # nb. suppress docs
|
"" # nb. suppress docs
|
||||||
|
|
||||||
def get_target_title(self):
|
def get_target_title(self):
|
||||||
""" """
|
""" """
|
||||||
# nb. we override parent to use app title as default
|
# nb. we override parent to use app title as default
|
||||||
if hasattr(self, 'target_title'):
|
if hasattr(self, "target_title"):
|
||||||
return self.target_title
|
return self.target_title
|
||||||
if hasattr(self, 'generic_target_title'):
|
if hasattr(self, "generic_target_title"):
|
||||||
return self.generic_target_title
|
return self.generic_target_title
|
||||||
return self.app.get_title()
|
return self.app.get_title()
|
||||||
|
|
||||||
|
|
10
tasks.py
10
tasks.py
|
@ -15,10 +15,10 @@ def release(c, skip_tests=False):
|
||||||
Release a new version of WuttaSync
|
Release a new version of WuttaSync
|
||||||
"""
|
"""
|
||||||
if not skip_tests:
|
if not skip_tests:
|
||||||
c.run('pytest')
|
c.run("pytest")
|
||||||
|
|
||||||
if os.path.exists('dist'):
|
if os.path.exists("dist"):
|
||||||
shutil.rmtree('dist')
|
shutil.rmtree("dist")
|
||||||
|
|
||||||
c.run('python -m build --sdist')
|
c.run("python -m build --sdist")
|
||||||
c.run('twine upload dist/*')
|
c.run("twine upload dist/*")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#-*- coding: utf-8; -*-
|
# -*- coding: utf-8; -*-
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
@ -19,7 +19,7 @@ class TestImportCommandHandler(DataTestCase):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
self.assertIsNone(handler.import_handler)
|
self.assertIsNone(handler.import_handler)
|
||||||
|
|
||||||
FromCsvToWutta = self.app.load_object('wuttasync.importing.csv:FromCsvToWutta')
|
FromCsvToWutta = self.app.load_object("wuttasync.importing.csv:FromCsvToWutta")
|
||||||
|
|
||||||
# as spec
|
# as spec
|
||||||
handler = self.make_handler(import_handler=FromCsvToWutta.get_spec())
|
handler = self.make_handler(import_handler=FromCsvToWutta.get_spec())
|
||||||
|
@ -35,26 +35,30 @@ class TestImportCommandHandler(DataTestCase):
|
||||||
self.assertIs(handler.import_handler, myhandler)
|
self.assertIs(handler.import_handler, myhandler)
|
||||||
|
|
||||||
def test_run(self):
|
def test_run(self):
|
||||||
handler = self.make_handler(import_handler='wuttasync.importing.csv:FromCsvToWutta')
|
handler = self.make_handler(
|
||||||
|
import_handler="wuttasync.importing.csv:FromCsvToWutta"
|
||||||
|
)
|
||||||
|
|
||||||
with patch.object(handler, 'list_models') as list_models:
|
with patch.object(handler, "list_models") as list_models:
|
||||||
handler.run({'list_models': True})
|
handler.run({"list_models": True})
|
||||||
list_models.assert_called_once_with({'list_models': True})
|
list_models.assert_called_once_with({"list_models": True})
|
||||||
|
|
||||||
with patch.object(handler, 'import_handler') as import_handler:
|
with patch.object(handler, "import_handler") as import_handler:
|
||||||
handler.run({'models': []})
|
handler.run({"models": []})
|
||||||
import_handler.process_data.assert_called_once_with()
|
import_handler.process_data.assert_called_once_with()
|
||||||
|
|
||||||
def test_list_models(self):
|
def test_list_models(self):
|
||||||
handler = self.make_handler(import_handler='wuttasync.importing.csv:FromCsvToWutta')
|
handler = self.make_handler(
|
||||||
|
import_handler="wuttasync.importing.csv:FromCsvToWutta"
|
||||||
|
)
|
||||||
|
|
||||||
with patch.object(mod, 'sys') as sys:
|
with patch.object(mod, "sys") as sys:
|
||||||
handler.list_models({})
|
handler.list_models({})
|
||||||
# just test a few random things we expect to see
|
# just test a few random things we expect to see
|
||||||
self.assertTrue(sys.stdout.write.has_call('ALL MODELS:\n'))
|
self.assertTrue(sys.stdout.write.has_call("ALL MODELS:\n"))
|
||||||
self.assertTrue(sys.stdout.write.has_call('Person'))
|
self.assertTrue(sys.stdout.write.has_call("Person"))
|
||||||
self.assertTrue(sys.stdout.write.has_call('User'))
|
self.assertTrue(sys.stdout.write.has_call("User"))
|
||||||
self.assertTrue(sys.stdout.write.has_call('Upgrade'))
|
self.assertTrue(sys.stdout.write.has_call("Upgrade"))
|
||||||
|
|
||||||
|
|
||||||
class TestImporterCommand(TestCase):
|
class TestImporterCommand(TestCase):
|
||||||
|
@ -64,12 +68,12 @@ class TestImporterCommand(TestCase):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
sig1 = inspect.signature(myfunc)
|
sig1 = inspect.signature(myfunc)
|
||||||
self.assertIn('kwargs', sig1.parameters)
|
self.assertIn("kwargs", sig1.parameters)
|
||||||
self.assertNotIn('dry_run', sig1.parameters)
|
self.assertNotIn("dry_run", sig1.parameters)
|
||||||
wrapt = mod.import_command(myfunc)
|
wrapt = mod.import_command(myfunc)
|
||||||
sig2 = inspect.signature(wrapt)
|
sig2 = inspect.signature(wrapt)
|
||||||
self.assertNotIn('kwargs', sig2.parameters)
|
self.assertNotIn("kwargs", sig2.parameters)
|
||||||
self.assertIn('dry_run', sig2.parameters)
|
self.assertIn("dry_run", sig2.parameters)
|
||||||
|
|
||||||
|
|
||||||
class TestFileImporterCommand(TestCase):
|
class TestFileImporterCommand(TestCase):
|
||||||
|
@ -79,11 +83,11 @@ class TestFileImporterCommand(TestCase):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
sig1 = inspect.signature(myfunc)
|
sig1 = inspect.signature(myfunc)
|
||||||
self.assertIn('kwargs', sig1.parameters)
|
self.assertIn("kwargs", sig1.parameters)
|
||||||
self.assertNotIn('dry_run', sig1.parameters)
|
self.assertNotIn("dry_run", sig1.parameters)
|
||||||
self.assertNotIn('input_file_path', sig1.parameters)
|
self.assertNotIn("input_file_path", sig1.parameters)
|
||||||
wrapt = mod.file_import_command(myfunc)
|
wrapt = mod.file_import_command(myfunc)
|
||||||
sig2 = inspect.signature(wrapt)
|
sig2 = inspect.signature(wrapt)
|
||||||
self.assertNotIn('kwargs', sig2.parameters)
|
self.assertNotIn("kwargs", sig2.parameters)
|
||||||
self.assertIn('dry_run', sig2.parameters)
|
self.assertIn("dry_run", sig2.parameters)
|
||||||
self.assertIn('input_file_path', sig2.parameters)
|
self.assertIn("input_file_path", sig2.parameters)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#-*- coding: utf-8; -*-
|
# -*- coding: utf-8; -*-
|
||||||
|
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
@ -6,14 +6,17 @@ from unittest.mock import MagicMock, patch
|
||||||
from wuttasync.cli import import_csv as mod, ImportCommandHandler
|
from wuttasync.cli import import_csv as mod, ImportCommandHandler
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class TestImportCsv(TestCase):
|
class TestImportCsv(TestCase):
|
||||||
|
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
params = {'models': [],
|
params = {
|
||||||
'create': True, 'update': True, 'delete': False,
|
"models": [],
|
||||||
'dry_run': True}
|
"create": True,
|
||||||
|
"update": True,
|
||||||
|
"delete": False,
|
||||||
|
"dry_run": True,
|
||||||
|
}
|
||||||
ctx = MagicMock(params=params)
|
ctx = MagicMock(params=params)
|
||||||
with patch.object(ImportCommandHandler, 'run') as run:
|
with patch.object(ImportCommandHandler, "run") as run:
|
||||||
mod.import_csv(ctx)
|
mod.import_csv(ctx)
|
||||||
run.assert_called_once_with(params)
|
run.assert_called_once_with(params)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#-*- coding: utf-8; -*-
|
# -*- coding: utf-8; -*-
|
||||||
|
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ class TestImporter(DataTestCase):
|
||||||
self.handler = ImportHandler(self.config)
|
self.handler = ImportHandler(self.config)
|
||||||
|
|
||||||
def make_importer(self, **kwargs):
|
def make_importer(self, **kwargs):
|
||||||
kwargs.setdefault('handler', self.handler)
|
kwargs.setdefault("handler", self.handler)
|
||||||
return mod.Importer(self.config, **kwargs)
|
return mod.Importer(self.config, **kwargs)
|
||||||
|
|
||||||
def test_constructor(self):
|
def test_constructor(self):
|
||||||
|
@ -24,11 +24,11 @@ class TestImporter(DataTestCase):
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
|
|
||||||
# fields
|
# fields
|
||||||
self.assertEqual(imp.fields, ['name', 'value'])
|
self.assertEqual(imp.fields, ["name", "value"])
|
||||||
|
|
||||||
# orientation etc.
|
# orientation etc.
|
||||||
self.assertEqual(imp.orientation, Orientation.IMPORT)
|
self.assertEqual(imp.orientation, Orientation.IMPORT)
|
||||||
self.assertEqual(imp.actioning, 'importing')
|
self.assertEqual(imp.actioning, "importing")
|
||||||
self.assertTrue(imp.create)
|
self.assertTrue(imp.create)
|
||||||
self.assertTrue(imp.update)
|
self.assertTrue(imp.update)
|
||||||
self.assertTrue(imp.delete)
|
self.assertTrue(imp.delete)
|
||||||
|
@ -38,106 +38,111 @@ class TestImporter(DataTestCase):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
|
||||||
# basic importer
|
# basic importer
|
||||||
imp = self.make_importer(model_class=model.Setting, fields='name')
|
imp = self.make_importer(model_class=model.Setting, fields="name")
|
||||||
self.assertEqual(imp.fields, ['name'])
|
self.assertEqual(imp.fields, ["name"])
|
||||||
|
|
||||||
def test_constructor_excluded_fields(self):
|
def test_constructor_excluded_fields(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
|
||||||
# basic importer
|
# basic importer
|
||||||
imp = self.make_importer(model_class=model.Setting, excluded_fields='value')
|
imp = self.make_importer(model_class=model.Setting, excluded_fields="value")
|
||||||
self.assertEqual(imp.fields, ['name'])
|
self.assertEqual(imp.fields, ["name"])
|
||||||
|
|
||||||
def test_get_model_title(self):
|
def test_get_model_title(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
self.assertEqual(imp.get_model_title(), 'Setting')
|
self.assertEqual(imp.get_model_title(), "Setting")
|
||||||
imp.model_title = "SeTtInG"
|
imp.model_title = "SeTtInG"
|
||||||
self.assertEqual(imp.get_model_title(), 'SeTtInG')
|
self.assertEqual(imp.get_model_title(), "SeTtInG")
|
||||||
|
|
||||||
def test_get_simple_fields(self):
|
def test_get_simple_fields(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
self.assertEqual(imp.get_simple_fields(), ['name', 'value'])
|
self.assertEqual(imp.get_simple_fields(), ["name", "value"])
|
||||||
imp.simple_fields = ['name']
|
imp.simple_fields = ["name"]
|
||||||
self.assertEqual(imp.get_simple_fields(), ['name'])
|
self.assertEqual(imp.get_simple_fields(), ["name"])
|
||||||
|
|
||||||
def test_get_supported_fields(self):
|
def test_get_supported_fields(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
self.assertEqual(imp.get_supported_fields(), ['name', 'value'])
|
self.assertEqual(imp.get_supported_fields(), ["name", "value"])
|
||||||
imp.supported_fields = ['name']
|
imp.supported_fields = ["name"]
|
||||||
self.assertEqual(imp.get_supported_fields(), ['name'])
|
self.assertEqual(imp.get_supported_fields(), ["name"])
|
||||||
|
|
||||||
def test_get_fields(self):
|
def test_get_fields(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
self.assertEqual(imp.get_fields(), ['name', 'value'])
|
self.assertEqual(imp.get_fields(), ["name", "value"])
|
||||||
imp.fields = ['name']
|
imp.fields = ["name"]
|
||||||
self.assertEqual(imp.get_fields(), ['name'])
|
self.assertEqual(imp.get_fields(), ["name"])
|
||||||
|
|
||||||
def test_get_keys(self):
|
def test_get_keys(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
self.assertEqual(imp.get_keys(), ['name'])
|
self.assertEqual(imp.get_keys(), ["name"])
|
||||||
with patch.multiple(imp, create=True, key='value'):
|
with patch.multiple(imp, create=True, key="value"):
|
||||||
self.assertEqual(imp.get_keys(), ['value'])
|
self.assertEqual(imp.get_keys(), ["value"])
|
||||||
with patch.multiple(imp, create=True, keys=['foo', 'bar']):
|
with patch.multiple(imp, create=True, keys=["foo", "bar"]):
|
||||||
self.assertEqual(imp.get_keys(), ['foo', 'bar'])
|
self.assertEqual(imp.get_keys(), ["foo", "bar"])
|
||||||
|
|
||||||
def test_process_data(self):
|
def test_process_data(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting, caches_target=True,
|
imp = self.make_importer(
|
||||||
delete=True)
|
model_class=model.Setting, caches_target=True, delete=True
|
||||||
|
)
|
||||||
|
|
||||||
def make_cache():
|
def make_cache():
|
||||||
setting1 = model.Setting(name='foo1', value='bar1')
|
setting1 = model.Setting(name="foo1", value="bar1")
|
||||||
setting2 = model.Setting(name='foo2', value='bar2')
|
setting2 = model.Setting(name="foo2", value="bar2")
|
||||||
setting3 = model.Setting(name='foo3', value='bar3')
|
setting3 = model.Setting(name="foo3", value="bar3")
|
||||||
cache = {
|
cache = {
|
||||||
('foo1',): {
|
("foo1",): {
|
||||||
'object': setting1,
|
"object": setting1,
|
||||||
'data': {'name': 'foo1', 'value': 'bar1'},
|
"data": {"name": "foo1", "value": "bar1"},
|
||||||
},
|
},
|
||||||
('foo2',): {
|
("foo2",): {
|
||||||
'object': setting2,
|
"object": setting2,
|
||||||
'data': {'name': 'foo2', 'value': 'bar2'},
|
"data": {"name": "foo2", "value": "bar2"},
|
||||||
},
|
},
|
||||||
('foo3',): {
|
("foo3",): {
|
||||||
'object': setting3,
|
"object": setting3,
|
||||||
'data': {'name': 'foo3', 'value': 'bar3'},
|
"data": {"name": "foo3", "value": "bar3"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
return cache
|
return cache
|
||||||
|
|
||||||
# nb. delete always succeeds
|
# nb. delete always succeeds
|
||||||
with patch.object(imp, 'delete_target_object', return_value=True):
|
with patch.object(imp, "delete_target_object", return_value=True):
|
||||||
|
|
||||||
# create + update + delete all as needed
|
# create + update + delete all as needed
|
||||||
with patch.object(imp, 'get_target_cache', return_value=make_cache()):
|
with patch.object(imp, "get_target_cache", return_value=make_cache()):
|
||||||
created, updated, deleted = imp.process_data([
|
created, updated, deleted = imp.process_data(
|
||||||
{'name': 'foo3', 'value': 'BAR3'},
|
[
|
||||||
{'name': 'foo4', 'value': 'BAR4'},
|
{"name": "foo3", "value": "BAR3"},
|
||||||
{'name': 'foo5', 'value': 'BAR5'},
|
{"name": "foo4", "value": "BAR4"},
|
||||||
])
|
{"name": "foo5", "value": "BAR5"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 2)
|
self.assertEqual(len(created), 2)
|
||||||
self.assertEqual(len(updated), 1)
|
self.assertEqual(len(updated), 1)
|
||||||
self.assertEqual(len(deleted), 2)
|
self.assertEqual(len(deleted), 2)
|
||||||
|
|
||||||
# same but with --max-total so delete gets skipped
|
# same but with --max-total so delete gets skipped
|
||||||
with patch.object(imp, 'get_target_cache', return_value=make_cache()):
|
with patch.object(imp, "get_target_cache", return_value=make_cache()):
|
||||||
with patch.object(imp, 'max_total', new=3):
|
with patch.object(imp, "max_total", new=3):
|
||||||
created, updated, deleted = imp.process_data([
|
created, updated, deleted = imp.process_data(
|
||||||
{'name': 'foo3', 'value': 'BAR3'},
|
[
|
||||||
{'name': 'foo4', 'value': 'BAR4'},
|
{"name": "foo3", "value": "BAR3"},
|
||||||
{'name': 'foo5', 'value': 'BAR5'},
|
{"name": "foo4", "value": "BAR4"},
|
||||||
])
|
{"name": "foo5", "value": "BAR5"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 2)
|
self.assertEqual(len(created), 2)
|
||||||
self.assertEqual(len(updated), 1)
|
self.assertEqual(len(updated), 1)
|
||||||
self.assertEqual(len(deleted), 0)
|
self.assertEqual(len(deleted), 0)
|
||||||
|
|
||||||
# delete all if source data empty
|
# delete all if source data empty
|
||||||
with patch.object(imp, 'get_target_cache', return_value=make_cache()):
|
with patch.object(imp, "get_target_cache", return_value=make_cache()):
|
||||||
created, updated, deleted = imp.process_data()
|
created, updated, deleted = imp.process_data()
|
||||||
self.assertEqual(len(created), 0)
|
self.assertEqual(len(created), 0)
|
||||||
self.assertEqual(len(updated), 0)
|
self.assertEqual(len(updated), 0)
|
||||||
|
@ -148,120 +153,140 @@ class TestImporter(DataTestCase):
|
||||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||||
|
|
||||||
def make_cache():
|
def make_cache():
|
||||||
setting1 = model.Setting(name='foo1', value='bar1')
|
setting1 = model.Setting(name="foo1", value="bar1")
|
||||||
setting2 = model.Setting(name='foo2', value='bar2')
|
setting2 = model.Setting(name="foo2", value="bar2")
|
||||||
cache = {
|
cache = {
|
||||||
('foo1',): {
|
("foo1",): {
|
||||||
'object': setting1,
|
"object": setting1,
|
||||||
'data': {'name': 'foo1', 'value': 'bar1'},
|
"data": {"name": "foo1", "value": "bar1"},
|
||||||
},
|
},
|
||||||
('foo2',): {
|
("foo2",): {
|
||||||
'object': setting2,
|
"object": setting2,
|
||||||
'data': {'name': 'foo2', 'value': 'bar2'},
|
"data": {"name": "foo2", "value": "bar2"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
return cache
|
return cache
|
||||||
|
|
||||||
# change nothing if data matches
|
# change nothing if data matches
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'bar1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'bar2'},
|
{"name": "foo1", "value": "bar1"},
|
||||||
])
|
{"name": "foo2", "value": "bar2"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 0)
|
self.assertEqual(len(created), 0)
|
||||||
self.assertEqual(len(updated), 0)
|
self.assertEqual(len(updated), 0)
|
||||||
|
|
||||||
# update all as needed
|
# update all as needed
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'BAR1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'BAR2'},
|
{"name": "foo1", "value": "BAR1"},
|
||||||
])
|
{"name": "foo2", "value": "BAR2"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 0)
|
self.assertEqual(len(created), 0)
|
||||||
self.assertEqual(len(updated), 2)
|
self.assertEqual(len(updated), 2)
|
||||||
|
|
||||||
# update all, with --max-update
|
# update all, with --max-update
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_update=1):
|
with patch.multiple(imp, create=True, cached_target=make_cache(), max_update=1):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'BAR1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'BAR2'},
|
{"name": "foo1", "value": "BAR1"},
|
||||||
])
|
{"name": "foo2", "value": "BAR2"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 0)
|
self.assertEqual(len(created), 0)
|
||||||
self.assertEqual(len(updated), 1)
|
self.assertEqual(len(updated), 1)
|
||||||
|
|
||||||
# update all, with --max-total
|
# update all, with --max-total
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1):
|
with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'BAR1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'BAR2'},
|
{"name": "foo1", "value": "BAR1"},
|
||||||
])
|
{"name": "foo2", "value": "BAR2"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 0)
|
self.assertEqual(len(created), 0)
|
||||||
self.assertEqual(len(updated), 1)
|
self.assertEqual(len(updated), 1)
|
||||||
|
|
||||||
# create all as needed
|
# create all as needed
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'bar1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'bar2'},
|
{"name": "foo1", "value": "bar1"},
|
||||||
{'name': 'foo3', 'value': 'BAR3'},
|
{"name": "foo2", "value": "bar2"},
|
||||||
{'name': 'foo4', 'value': 'BAR4'},
|
{"name": "foo3", "value": "BAR3"},
|
||||||
])
|
{"name": "foo4", "value": "BAR4"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 2)
|
self.assertEqual(len(created), 2)
|
||||||
self.assertEqual(len(updated), 0)
|
self.assertEqual(len(updated), 0)
|
||||||
|
|
||||||
# what happens when create gets skipped
|
# what happens when create gets skipped
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||||
with patch.object(imp, 'create_target_object', return_value=None):
|
with patch.object(imp, "create_target_object", return_value=None):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'bar1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'bar2'},
|
{"name": "foo1", "value": "bar1"},
|
||||||
{'name': 'foo3', 'value': 'BAR3'},
|
{"name": "foo2", "value": "bar2"},
|
||||||
{'name': 'foo4', 'value': 'BAR4'},
|
{"name": "foo3", "value": "BAR3"},
|
||||||
])
|
{"name": "foo4", "value": "BAR4"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 0)
|
self.assertEqual(len(created), 0)
|
||||||
self.assertEqual(len(updated), 0)
|
self.assertEqual(len(updated), 0)
|
||||||
|
|
||||||
# create all, with --max-create
|
# create all, with --max-create
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_create=1):
|
with patch.multiple(imp, create=True, cached_target=make_cache(), max_create=1):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'bar1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'bar2'},
|
{"name": "foo1", "value": "bar1"},
|
||||||
{'name': 'foo3', 'value': 'BAR3'},
|
{"name": "foo2", "value": "bar2"},
|
||||||
{'name': 'foo4', 'value': 'BAR4'},
|
{"name": "foo3", "value": "BAR3"},
|
||||||
])
|
{"name": "foo4", "value": "BAR4"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 1)
|
self.assertEqual(len(created), 1)
|
||||||
self.assertEqual(len(updated), 0)
|
self.assertEqual(len(updated), 0)
|
||||||
|
|
||||||
# create all, with --max-total
|
# create all, with --max-total
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1):
|
with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'bar1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'bar2'},
|
{"name": "foo1", "value": "bar1"},
|
||||||
{'name': 'foo3', 'value': 'BAR3'},
|
{"name": "foo2", "value": "bar2"},
|
||||||
{'name': 'foo4', 'value': 'BAR4'},
|
{"name": "foo3", "value": "BAR3"},
|
||||||
])
|
{"name": "foo4", "value": "BAR4"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 1)
|
self.assertEqual(len(created), 1)
|
||||||
self.assertEqual(len(updated), 0)
|
self.assertEqual(len(updated), 0)
|
||||||
|
|
||||||
# create + update all as needed
|
# create + update all as needed
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
with patch.multiple(imp, create=True, cached_target=make_cache()):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'BAR1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'BAR2'},
|
{"name": "foo1", "value": "BAR1"},
|
||||||
{'name': 'foo3', 'value': 'BAR3'},
|
{"name": "foo2", "value": "BAR2"},
|
||||||
{'name': 'foo4', 'value': 'BAR4'},
|
{"name": "foo3", "value": "BAR3"},
|
||||||
])
|
{"name": "foo4", "value": "BAR4"},
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(len(created), 2)
|
self.assertEqual(len(created), 2)
|
||||||
self.assertEqual(len(updated), 2)
|
self.assertEqual(len(updated), 2)
|
||||||
|
|
||||||
# create + update all, with --max-total
|
# create + update all, with --max-total
|
||||||
with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1):
|
with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1):
|
||||||
created, updated = imp.do_create_update([
|
created, updated = imp.do_create_update(
|
||||||
{'name': 'foo1', 'value': 'BAR1'},
|
[
|
||||||
{'name': 'foo2', 'value': 'BAR2'},
|
{"name": "foo1", "value": "BAR1"},
|
||||||
{'name': 'foo3', 'value': 'BAR3'},
|
{"name": "foo2", "value": "BAR2"},
|
||||||
{'name': 'foo4', 'value': 'BAR4'},
|
{"name": "foo3", "value": "BAR3"},
|
||||||
])
|
{"name": "foo4", "value": "BAR4"},
|
||||||
|
]
|
||||||
|
)
|
||||||
# nb. foo1 is updated first
|
# nb. foo1 is updated first
|
||||||
self.assertEqual(len(created), 0)
|
self.assertEqual(len(created), 0)
|
||||||
self.assertEqual(len(updated), 1)
|
self.assertEqual(len(updated), 1)
|
||||||
|
@ -270,21 +295,21 @@ class TestImporter(DataTestCase):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
|
||||||
# this requires a mock target cache
|
# this requires a mock target cache
|
||||||
setting1 = model.Setting(name='foo1', value='bar1')
|
setting1 = model.Setting(name="foo1", value="bar1")
|
||||||
setting2 = model.Setting(name='foo2', value='bar2')
|
setting2 = model.Setting(name="foo2", value="bar2")
|
||||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||||
cache = {
|
cache = {
|
||||||
('foo1',): {
|
("foo1",): {
|
||||||
'object': setting1,
|
"object": setting1,
|
||||||
'data': {'name': 'foo1', 'value': 'bar1'},
|
"data": {"name": "foo1", "value": "bar1"},
|
||||||
},
|
},
|
||||||
('foo2',): {
|
("foo2",): {
|
||||||
'object': setting2,
|
"object": setting2,
|
||||||
'data': {'name': 'foo2', 'value': 'bar2'},
|
"data": {"name": "foo2", "value": "bar2"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
with patch.object(imp, 'delete_target_object') as delete_target_object:
|
with patch.object(imp, "delete_target_object") as delete_target_object:
|
||||||
|
|
||||||
# delete nothing if source has same keys
|
# delete nothing if source has same keys
|
||||||
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
||||||
|
@ -305,7 +330,7 @@ class TestImporter(DataTestCase):
|
||||||
delete_target_object.reset_mock()
|
delete_target_object.reset_mock()
|
||||||
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
||||||
source_keys = set()
|
source_keys = set()
|
||||||
with patch.object(imp, 'max_delete', new=1):
|
with patch.object(imp, "max_delete", new=1):
|
||||||
result = imp.do_delete(source_keys)
|
result = imp.do_delete(source_keys)
|
||||||
self.assertEqual(delete_target_object.call_count, 1)
|
self.assertEqual(delete_target_object.call_count, 1)
|
||||||
self.assertEqual(len(result), 1)
|
self.assertEqual(len(result), 1)
|
||||||
|
@ -314,7 +339,7 @@ class TestImporter(DataTestCase):
|
||||||
delete_target_object.reset_mock()
|
delete_target_object.reset_mock()
|
||||||
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
with patch.multiple(imp, create=True, cached_target=dict(cache)):
|
||||||
source_keys = set()
|
source_keys = set()
|
||||||
with patch.object(imp, 'max_total', new=1):
|
with patch.object(imp, "max_total", new=1):
|
||||||
result = imp.do_delete(source_keys)
|
result = imp.do_delete(source_keys)
|
||||||
self.assertEqual(delete_target_object.call_count, 1)
|
self.assertEqual(delete_target_object.call_count, 1)
|
||||||
self.assertEqual(len(result), 1)
|
self.assertEqual(len(result), 1)
|
||||||
|
@ -322,25 +347,25 @@ class TestImporter(DataTestCase):
|
||||||
def test_get_record_key(self):
|
def test_get_record_key(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
record = {'name': 'foo', 'value': 'bar'}
|
record = {"name": "foo", "value": "bar"}
|
||||||
self.assertEqual(imp.get_record_key(record), ('foo',))
|
self.assertEqual(imp.get_record_key(record), ("foo",))
|
||||||
imp.key = ('name', 'value')
|
imp.key = ("name", "value")
|
||||||
self.assertEqual(imp.get_record_key(record), ('foo', 'bar'))
|
self.assertEqual(imp.get_record_key(record), ("foo", "bar"))
|
||||||
|
|
||||||
def test_data_diffs(self):
|
def test_data_diffs(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
|
|
||||||
# 2 identical records
|
# 2 identical records
|
||||||
rec1 = {'name': 'foo', 'value': 'bar'}
|
rec1 = {"name": "foo", "value": "bar"}
|
||||||
rec2 = {'name': 'foo', 'value': 'bar'}
|
rec2 = {"name": "foo", "value": "bar"}
|
||||||
result = imp.data_diffs(rec1, rec2)
|
result = imp.data_diffs(rec1, rec2)
|
||||||
self.assertEqual(result, [])
|
self.assertEqual(result, [])
|
||||||
|
|
||||||
# now they're different
|
# now they're different
|
||||||
rec2['value'] = 'baz'
|
rec2["value"] = "baz"
|
||||||
result = imp.data_diffs(rec1, rec2)
|
result = imp.data_diffs(rec1, rec2)
|
||||||
self.assertEqual(result, ['value'])
|
self.assertEqual(result, ["value"])
|
||||||
|
|
||||||
def test_normalize_source_data(self):
|
def test_normalize_source_data(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
@ -351,7 +376,7 @@ class TestImporter(DataTestCase):
|
||||||
self.assertEqual(data, [])
|
self.assertEqual(data, [])
|
||||||
|
|
||||||
# now with 1 record
|
# now with 1 record
|
||||||
setting = model.Setting(name='foo', value='bar')
|
setting = model.Setting(name="foo", value="bar")
|
||||||
data = imp.normalize_source_data(source_objects=[setting])
|
data = imp.normalize_source_data(source_objects=[setting])
|
||||||
self.assertEqual(len(data), 1)
|
self.assertEqual(len(data), 1)
|
||||||
# nb. default normalizer returns object as-is
|
# nb. default normalizer returns object as-is
|
||||||
|
@ -361,17 +386,17 @@ class TestImporter(DataTestCase):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
|
|
||||||
setting1 = model.Setting(name='foo', value='bar1')
|
setting1 = model.Setting(name="foo", value="bar1")
|
||||||
setting2 = model.Setting(name='foo', value='bar2')
|
setting2 = model.Setting(name="foo", value="bar2")
|
||||||
|
|
||||||
result = imp.get_unique_data([setting2, setting1])
|
result = imp.get_unique_data([setting2, setting1])
|
||||||
self.assertIsInstance(result, tuple)
|
self.assertIsInstance(result, tuple)
|
||||||
self.assertEqual(len(result), 2)
|
self.assertEqual(len(result), 2)
|
||||||
self.assertIsInstance(result[0], list)
|
self.assertIsInstance(result[0], list)
|
||||||
self.assertEqual(len(result[0]), 1)
|
self.assertEqual(len(result[0]), 1)
|
||||||
self.assertIs(result[0][0], setting2) # nb. not setting1
|
self.assertIs(result[0][0], setting2) # nb. not setting1
|
||||||
self.assertIsInstance(result[1], set)
|
self.assertIsInstance(result[1], set)
|
||||||
self.assertEqual(result[1], {('foo',)})
|
self.assertEqual(result[1], {("foo",)})
|
||||||
|
|
||||||
def test_get_source_objects(self):
|
def test_get_source_objects(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
@ -397,7 +422,7 @@ class TestImporter(DataTestCase):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
|
|
||||||
with patch.object(imp, 'get_target_objects') as get_target_objects:
|
with patch.object(imp, "get_target_objects") as get_target_objects:
|
||||||
get_target_objects.return_value = []
|
get_target_objects.return_value = []
|
||||||
|
|
||||||
# empty cache
|
# empty cache
|
||||||
|
@ -405,16 +430,16 @@ class TestImporter(DataTestCase):
|
||||||
self.assertEqual(cache, {})
|
self.assertEqual(cache, {})
|
||||||
|
|
||||||
# cache w/ one record
|
# cache w/ one record
|
||||||
setting = model.Setting(name='foo', value='bar')
|
setting = model.Setting(name="foo", value="bar")
|
||||||
get_target_objects.return_value = [setting]
|
get_target_objects.return_value = [setting]
|
||||||
cache = imp.get_target_cache()
|
cache = imp.get_target_cache()
|
||||||
self.assertEqual(len(cache), 1)
|
self.assertEqual(len(cache), 1)
|
||||||
self.assertIn(('foo',), cache)
|
self.assertIn(("foo",), cache)
|
||||||
foo = cache[('foo',)]
|
foo = cache[("foo",)]
|
||||||
self.assertEqual(len(foo), 2)
|
self.assertEqual(len(foo), 2)
|
||||||
self.assertEqual(set(foo), {'object', 'data'})
|
self.assertEqual(set(foo), {"object", "data"})
|
||||||
self.assertIs(foo['object'], setting)
|
self.assertIs(foo["object"], setting)
|
||||||
self.assertEqual(foo['data'], {'name': 'foo', 'value': 'bar'})
|
self.assertEqual(foo["data"], {"name": "foo", "value": "bar"})
|
||||||
|
|
||||||
def test_get_target_objects(self):
|
def test_get_target_objects(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
@ -423,36 +448,36 @@ class TestImporter(DataTestCase):
|
||||||
|
|
||||||
def test_get_target_object(self):
|
def test_get_target_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
setting = model.Setting(name='foo', value='bar')
|
setting = model.Setting(name="foo", value="bar")
|
||||||
|
|
||||||
# nb. must mock up a target cache for this one
|
# nb. must mock up a target cache for this one
|
||||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||||
imp.cached_target = {
|
imp.cached_target = {
|
||||||
('foo',): {
|
("foo",): {
|
||||||
'object': setting,
|
"object": setting,
|
||||||
'data': {'name': 'foo', 'value': 'bar'},
|
"data": {"name": "foo", "value": "bar"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# returns same object
|
# returns same object
|
||||||
result = imp.get_target_object(('foo',))
|
result = imp.get_target_object(("foo",))
|
||||||
self.assertIs(result, setting)
|
self.assertIs(result, setting)
|
||||||
|
|
||||||
# and one more time just for kicks
|
# and one more time just for kicks
|
||||||
result = imp.get_target_object(('foo',))
|
result = imp.get_target_object(("foo",))
|
||||||
self.assertIs(result, setting)
|
self.assertIs(result, setting)
|
||||||
|
|
||||||
# but then not if cache flag is off
|
# but then not if cache flag is off
|
||||||
imp.caches_target = False
|
imp.caches_target = False
|
||||||
result = imp.get_target_object(('foo',))
|
result = imp.get_target_object(("foo",))
|
||||||
self.assertIsNone(result)
|
self.assertIsNone(result)
|
||||||
|
|
||||||
def test_normalize_target_object(self):
|
def test_normalize_target_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
setting = model.Setting(name='foo', value='bar')
|
setting = model.Setting(name="foo", value="bar")
|
||||||
data = imp.normalize_target_object(setting)
|
data = imp.normalize_target_object(setting)
|
||||||
self.assertEqual(data, {'name': 'foo', 'value': 'bar'})
|
self.assertEqual(data, {"name": "foo", "value": "bar"})
|
||||||
|
|
||||||
def test_get_deletable_keys(self):
|
def test_get_deletable_keys(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
@ -463,11 +488,11 @@ class TestImporter(DataTestCase):
|
||||||
self.assertIsInstance(result, set)
|
self.assertIsInstance(result, set)
|
||||||
self.assertEqual(result, set())
|
self.assertEqual(result, set())
|
||||||
|
|
||||||
setting = model.Setting(name='foo', value='bar')
|
setting = model.Setting(name="foo", value="bar")
|
||||||
cache = {
|
cache = {
|
||||||
('foo',): {
|
("foo",): {
|
||||||
'object': setting,
|
"object": setting,
|
||||||
'data': {'name': 'foo', 'value': 'bar'},
|
"data": {"name": "foo", "value": "bar"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -475,10 +500,10 @@ class TestImporter(DataTestCase):
|
||||||
|
|
||||||
# all are deletable by default
|
# all are deletable by default
|
||||||
result = imp.get_deletable_keys()
|
result = imp.get_deletable_keys()
|
||||||
self.assertEqual(result, {('foo',)})
|
self.assertEqual(result, {("foo",)})
|
||||||
|
|
||||||
# but some maybe can't be deleted
|
# but some maybe can't be deleted
|
||||||
with patch.object(imp, 'can_delete_object', return_value=False):
|
with patch.object(imp, "can_delete_object", return_value=False):
|
||||||
result = imp.get_deletable_keys()
|
result = imp.get_deletable_keys()
|
||||||
self.assertEqual(result, set())
|
self.assertEqual(result, set())
|
||||||
|
|
||||||
|
@ -487,22 +512,23 @@ class TestImporter(DataTestCase):
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
|
|
||||||
# basic
|
# basic
|
||||||
setting = imp.create_target_object(('foo',), {'name': 'foo', 'value': 'bar'})
|
setting = imp.create_target_object(("foo",), {"name": "foo", "value": "bar"})
|
||||||
self.assertIsInstance(setting, model.Setting)
|
self.assertIsInstance(setting, model.Setting)
|
||||||
self.assertEqual(setting.name, 'foo')
|
self.assertEqual(setting.name, "foo")
|
||||||
self.assertEqual(setting.value, 'bar')
|
self.assertEqual(setting.value, "bar")
|
||||||
|
|
||||||
# will skip if magic delete flag is set
|
# will skip if magic delete flag is set
|
||||||
setting = imp.create_target_object(('foo',), {'name': 'foo', 'value': 'bar',
|
setting = imp.create_target_object(
|
||||||
'__ignoreme__': True})
|
("foo",), {"name": "foo", "value": "bar", "__ignoreme__": True}
|
||||||
|
)
|
||||||
self.assertIsNone(setting)
|
self.assertIsNone(setting)
|
||||||
|
|
||||||
def test_make_empty_object(self):
|
def test_make_empty_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
obj = imp.make_empty_object(('foo',))
|
obj = imp.make_empty_object(("foo",))
|
||||||
self.assertIsInstance(obj, model.Setting)
|
self.assertIsInstance(obj, model.Setting)
|
||||||
self.assertEqual(obj.name, 'foo')
|
self.assertEqual(obj.name, "foo")
|
||||||
|
|
||||||
def test_make_object(self):
|
def test_make_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
@ -513,23 +539,23 @@ class TestImporter(DataTestCase):
|
||||||
def test_update_target_object(self):
|
def test_update_target_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
setting = model.Setting(name='foo')
|
setting = model.Setting(name="foo")
|
||||||
|
|
||||||
# basic logic for updating *new* object
|
# basic logic for updating *new* object
|
||||||
obj = imp.update_target_object(setting, {'name': 'foo', 'value': 'bar'})
|
obj = imp.update_target_object(setting, {"name": "foo", "value": "bar"})
|
||||||
self.assertIs(obj, setting)
|
self.assertIs(obj, setting)
|
||||||
self.assertEqual(setting.value, 'bar')
|
self.assertEqual(setting.value, "bar")
|
||||||
|
|
||||||
def test_can_delete_object(self):
|
def test_can_delete_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
setting = model.Setting(name='foo')
|
setting = model.Setting(name="foo")
|
||||||
self.assertTrue(imp.can_delete_object(setting))
|
self.assertTrue(imp.can_delete_object(setting))
|
||||||
|
|
||||||
def test_delete_target_object(self):
|
def test_delete_target_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
setting = model.Setting(name='foo')
|
setting = model.Setting(name="foo")
|
||||||
# nb. default implementation always returns false
|
# nb. default implementation always returns false
|
||||||
self.assertFalse(imp.delete_target_object(setting))
|
self.assertFalse(imp.delete_target_object(setting))
|
||||||
|
|
||||||
|
@ -541,20 +567,20 @@ class TestFromFile(DataTestCase):
|
||||||
self.handler = ImportHandler(self.config)
|
self.handler = ImportHandler(self.config)
|
||||||
|
|
||||||
def make_importer(self, **kwargs):
|
def make_importer(self, **kwargs):
|
||||||
kwargs.setdefault('handler', self.handler)
|
kwargs.setdefault("handler", self.handler)
|
||||||
return mod.FromFile(self.config, **kwargs)
|
return mod.FromFile(self.config, **kwargs)
|
||||||
|
|
||||||
def test_setup(self):
|
def test_setup(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
with patch.object(imp, 'open_input_file') as open_input_file:
|
with patch.object(imp, "open_input_file") as open_input_file:
|
||||||
imp.setup()
|
imp.setup()
|
||||||
open_input_file.assert_called_once_with()
|
open_input_file.assert_called_once_with()
|
||||||
|
|
||||||
def test_teardown(self):
|
def test_teardown(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
with patch.object(imp, 'close_input_file') as close_input_file:
|
with patch.object(imp, "close_input_file") as close_input_file:
|
||||||
imp.teardown()
|
imp.teardown()
|
||||||
close_input_file.assert_called_once_with()
|
close_input_file.assert_called_once_with()
|
||||||
|
|
||||||
|
@ -563,13 +589,13 @@ class TestFromFile(DataTestCase):
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
|
|
||||||
# path is guessed from dir+filename
|
# path is guessed from dir+filename
|
||||||
path = self.write_file('data.txt', '')
|
path = self.write_file("data.txt", "")
|
||||||
imp.input_file_dir = self.tempdir
|
imp.input_file_dir = self.tempdir
|
||||||
imp.input_file_name = 'data.txt'
|
imp.input_file_name = "data.txt"
|
||||||
self.assertEqual(imp.get_input_file_path(), path)
|
self.assertEqual(imp.get_input_file_path(), path)
|
||||||
|
|
||||||
# path can be explicitly set
|
# path can be explicitly set
|
||||||
path2 = self.write_file('data2.txt', '')
|
path2 = self.write_file("data2.txt", "")
|
||||||
imp.input_file_path = path2
|
imp.input_file_path = path2
|
||||||
self.assertEqual(imp.get_input_file_path(), path2)
|
self.assertEqual(imp.get_input_file_path(), path2)
|
||||||
|
|
||||||
|
@ -592,8 +618,8 @@ class TestFromFile(DataTestCase):
|
||||||
self.assertRaises(NotImplementedError, imp.get_input_file_name)
|
self.assertRaises(NotImplementedError, imp.get_input_file_name)
|
||||||
|
|
||||||
# name can be explicitly set
|
# name can be explicitly set
|
||||||
imp.input_file_name = 'data.txt'
|
imp.input_file_name = "data.txt"
|
||||||
self.assertEqual(imp.get_input_file_name(), 'data.txt')
|
self.assertEqual(imp.get_input_file_name(), "data.txt")
|
||||||
|
|
||||||
def test_open_input_file(self):
|
def test_open_input_file(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
@ -604,10 +630,10 @@ class TestFromFile(DataTestCase):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
|
|
||||||
path = self.write_file('data.txt', '')
|
path = self.write_file("data.txt", "")
|
||||||
with open(path, 'rt') as f:
|
with open(path, "rt") as f:
|
||||||
imp.input_file = f
|
imp.input_file = f
|
||||||
with patch.object(f, 'close') as close:
|
with patch.object(f, "close") as close:
|
||||||
imp.close_input_file()
|
imp.close_input_file()
|
||||||
close.assert_called_once_with()
|
close.assert_called_once_with()
|
||||||
|
|
||||||
|
@ -619,16 +645,16 @@ class TestToSqlalchemy(DataTestCase):
|
||||||
self.handler = ImportHandler(self.config)
|
self.handler = ImportHandler(self.config)
|
||||||
|
|
||||||
def make_importer(self, **kwargs):
|
def make_importer(self, **kwargs):
|
||||||
kwargs.setdefault('handler', self.handler)
|
kwargs.setdefault("handler", self.handler)
|
||||||
return mod.ToSqlalchemy(self.config, **kwargs)
|
return mod.ToSqlalchemy(self.config, **kwargs)
|
||||||
|
|
||||||
def test_get_target_objects(self):
|
def test_get_target_objects(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting, target_session=self.session)
|
imp = self.make_importer(model_class=model.Setting, target_session=self.session)
|
||||||
|
|
||||||
setting1 = model.Setting(name='foo', value='bar')
|
setting1 = model.Setting(name="foo", value="bar")
|
||||||
self.session.add(setting1)
|
self.session.add(setting1)
|
||||||
setting2 = model.Setting(name='foo2', value='bar2')
|
setting2 = model.Setting(name="foo2", value="bar2")
|
||||||
self.session.add(setting2)
|
self.session.add(setting2)
|
||||||
self.session.commit()
|
self.session.commit()
|
||||||
|
|
||||||
|
@ -638,60 +664,60 @@ class TestToSqlalchemy(DataTestCase):
|
||||||
|
|
||||||
def test_get_target_object(self):
|
def test_get_target_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
setting = model.Setting(name='foo', value='bar')
|
setting = model.Setting(name="foo", value="bar")
|
||||||
|
|
||||||
# nb. must mock up a target cache for this one
|
# nb. must mock up a target cache for this one
|
||||||
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
imp = self.make_importer(model_class=model.Setting, caches_target=True)
|
||||||
imp.cached_target = {
|
imp.cached_target = {
|
||||||
('foo',): {
|
("foo",): {
|
||||||
'object': setting,
|
"object": setting,
|
||||||
'data': {'name': 'foo', 'value': 'bar'},
|
"data": {"name": "foo", "value": "bar"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# returns same object
|
# returns same object
|
||||||
result = imp.get_target_object(('foo',))
|
result = imp.get_target_object(("foo",))
|
||||||
self.assertIs(result, setting)
|
self.assertIs(result, setting)
|
||||||
|
|
||||||
# and one more time just for kicks
|
# and one more time just for kicks
|
||||||
result = imp.get_target_object(('foo',))
|
result = imp.get_target_object(("foo",))
|
||||||
self.assertIs(result, setting)
|
self.assertIs(result, setting)
|
||||||
|
|
||||||
# now let's put a 2nd setting in the db
|
# now let's put a 2nd setting in the db
|
||||||
setting2 = model.Setting(name='foo2', value='bar2')
|
setting2 = model.Setting(name="foo2", value="bar2")
|
||||||
self.session.add(setting2)
|
self.session.add(setting2)
|
||||||
self.session.commit()
|
self.session.commit()
|
||||||
|
|
||||||
# nb. disable target cache
|
# nb. disable target cache
|
||||||
with patch.multiple(imp, create=True,
|
with patch.multiple(
|
||||||
target_session=self.session,
|
imp, create=True, target_session=self.session, caches_target=False
|
||||||
caches_target=False):
|
):
|
||||||
|
|
||||||
# now we should be able to fetch that via query
|
# now we should be able to fetch that via query
|
||||||
result = imp.get_target_object(('foo2',))
|
result = imp.get_target_object(("foo2",))
|
||||||
self.assertIsInstance(result, model.Setting)
|
self.assertIsInstance(result, model.Setting)
|
||||||
self.assertIs(result, setting2)
|
self.assertIs(result, setting2)
|
||||||
|
|
||||||
# but sometimes it will not be found
|
# but sometimes it will not be found
|
||||||
result = imp.get_target_object(('foo3',))
|
result = imp.get_target_object(("foo3",))
|
||||||
self.assertIsNone(result)
|
self.assertIsNone(result)
|
||||||
|
|
||||||
def test_create_target_object(self):
|
def test_create_target_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting, target_session=self.session)
|
imp = self.make_importer(model_class=model.Setting, target_session=self.session)
|
||||||
setting = model.Setting(name='foo', value='bar')
|
setting = model.Setting(name="foo", value="bar")
|
||||||
|
|
||||||
# new object is added to session
|
# new object is added to session
|
||||||
setting = imp.create_target_object(('foo',), {'name': 'foo', 'value': 'bar'})
|
setting = imp.create_target_object(("foo",), {"name": "foo", "value": "bar"})
|
||||||
self.assertIsInstance(setting, model.Setting)
|
self.assertIsInstance(setting, model.Setting)
|
||||||
self.assertEqual(setting.name, 'foo')
|
self.assertEqual(setting.name, "foo")
|
||||||
self.assertEqual(setting.value, 'bar')
|
self.assertEqual(setting.value, "bar")
|
||||||
self.assertIn(setting, self.session)
|
self.assertIn(setting, self.session)
|
||||||
|
|
||||||
def test_delete_target_object(self):
|
def test_delete_target_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
|
||||||
setting = model.Setting(name='foo', value='bar')
|
setting = model.Setting(name="foo", value="bar")
|
||||||
self.session.add(setting)
|
self.session.add(setting)
|
||||||
|
|
||||||
self.assertEqual(self.session.query(model.Setting).count(), 1)
|
self.assertEqual(self.session.query(model.Setting).count(), 1)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#-*- coding: utf-8; -*-
|
# -*- coding: utf-8; -*-
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
import uuid as _uuid
|
import uuid as _uuid
|
||||||
|
@ -6,7 +6,12 @@ from unittest.mock import patch
|
||||||
|
|
||||||
from wuttjamaican.testing import DataTestCase
|
from wuttjamaican.testing import DataTestCase
|
||||||
|
|
||||||
from wuttasync.importing import csv as mod, ImportHandler, ToSqlalchemyHandler, ToSqlalchemy
|
from wuttasync.importing import (
|
||||||
|
csv as mod,
|
||||||
|
ImportHandler,
|
||||||
|
ToSqlalchemyHandler,
|
||||||
|
ToSqlalchemy,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestFromCsv(DataTestCase):
|
class TestFromCsv(DataTestCase):
|
||||||
|
@ -15,14 +20,17 @@ class TestFromCsv(DataTestCase):
|
||||||
self.setup_db()
|
self.setup_db()
|
||||||
self.handler = ImportHandler(self.config)
|
self.handler = ImportHandler(self.config)
|
||||||
|
|
||||||
self.data_path = self.write_file('data.txt', """\
|
self.data_path = self.write_file(
|
||||||
|
"data.txt",
|
||||||
|
"""\
|
||||||
name,value
|
name,value
|
||||||
foo,bar
|
foo,bar
|
||||||
foo2,bar2
|
foo2,bar2
|
||||||
""")
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
def make_importer(self, **kwargs):
|
def make_importer(self, **kwargs):
|
||||||
kwargs.setdefault('handler', self.handler)
|
kwargs.setdefault("handler", self.handler)
|
||||||
return mod.FromCsv(self.config, **kwargs)
|
return mod.FromCsv(self.config, **kwargs)
|
||||||
|
|
||||||
def test_get_input_file_name(self):
|
def test_get_input_file_name(self):
|
||||||
|
@ -30,39 +38,41 @@ foo2,bar2
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
|
|
||||||
# name can be guessed
|
# name can be guessed
|
||||||
self.assertEqual(imp.get_input_file_name(), 'Setting.csv')
|
self.assertEqual(imp.get_input_file_name(), "Setting.csv")
|
||||||
|
|
||||||
# name can be explicitly set
|
# name can be explicitly set
|
||||||
imp.input_file_name = 'data.txt'
|
imp.input_file_name = "data.txt"
|
||||||
self.assertEqual(imp.get_input_file_name(), 'data.txt')
|
self.assertEqual(imp.get_input_file_name(), "data.txt")
|
||||||
|
|
||||||
def test_open_input_file(self):
|
def test_open_input_file(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
|
|
||||||
# normal operation, input file includes all fields
|
# normal operation, input file includes all fields
|
||||||
imp = self.make_importer(model_class=model.Setting, input_file_path=self.data_path)
|
imp = self.make_importer(
|
||||||
self.assertEqual(imp.fields, ['name', 'value'])
|
model_class=model.Setting, input_file_path=self.data_path
|
||||||
|
)
|
||||||
|
self.assertEqual(imp.fields, ["name", "value"])
|
||||||
imp.open_input_file()
|
imp.open_input_file()
|
||||||
self.assertEqual(imp.input_file.name, self.data_path)
|
self.assertEqual(imp.input_file.name, self.data_path)
|
||||||
self.assertIsInstance(imp.input_reader, csv.DictReader)
|
self.assertIsInstance(imp.input_reader, csv.DictReader)
|
||||||
self.assertEqual(imp.fields, ['name', 'value'])
|
self.assertEqual(imp.fields, ["name", "value"])
|
||||||
imp.input_file.close()
|
imp.input_file.close()
|
||||||
|
|
||||||
# this file is missing a field, plus we'll pretend more are
|
# this file is missing a field, plus we'll pretend more are
|
||||||
# supported - but should wind up with just the one field
|
# supported - but should wind up with just the one field
|
||||||
missing = self.write_file('missing.txt', 'name')
|
missing = self.write_file("missing.txt", "name")
|
||||||
imp = self.make_importer(model_class=model.Setting, input_file_path=missing)
|
imp = self.make_importer(model_class=model.Setting, input_file_path=missing)
|
||||||
imp.fields.extend(['lots', 'more'])
|
imp.fields.extend(["lots", "more"])
|
||||||
self.assertEqual(imp.fields, ['name', 'value', 'lots', 'more'])
|
self.assertEqual(imp.fields, ["name", "value", "lots", "more"])
|
||||||
imp.open_input_file()
|
imp.open_input_file()
|
||||||
self.assertEqual(imp.fields, ['name'])
|
self.assertEqual(imp.fields, ["name"])
|
||||||
imp.input_file.close()
|
imp.input_file.close()
|
||||||
|
|
||||||
# and what happens when no known fields are found
|
# and what happens when no known fields are found
|
||||||
bogus = self.write_file('bogus.txt', 'blarg')
|
bogus = self.write_file("bogus.txt", "blarg")
|
||||||
imp = self.make_importer(model_class=model.Setting, input_file_path=bogus)
|
imp = self.make_importer(model_class=model.Setting, input_file_path=bogus)
|
||||||
self.assertEqual(imp.fields, ['name', 'value'])
|
self.assertEqual(imp.fields, ["name", "value"])
|
||||||
self.assertRaises(ValueError, imp.open_input_file)
|
self.assertRaises(ValueError, imp.open_input_file)
|
||||||
|
|
||||||
def test_close_input_file(self):
|
def test_close_input_file(self):
|
||||||
|
@ -72,8 +82,8 @@ foo2,bar2
|
||||||
imp.input_file_path = self.data_path
|
imp.input_file_path = self.data_path
|
||||||
imp.open_input_file()
|
imp.open_input_file()
|
||||||
imp.close_input_file()
|
imp.close_input_file()
|
||||||
self.assertFalse(hasattr(imp, 'input_reader'))
|
self.assertFalse(hasattr(imp, "input_reader"))
|
||||||
self.assertFalse(hasattr(imp, 'input_file'))
|
self.assertFalse(hasattr(imp, "input_file"))
|
||||||
|
|
||||||
def test_get_source_objects(self):
|
def test_get_source_objects(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
@ -84,8 +94,8 @@ foo2,bar2
|
||||||
objects = imp.get_source_objects()
|
objects = imp.get_source_objects()
|
||||||
imp.close_input_file()
|
imp.close_input_file()
|
||||||
self.assertEqual(len(objects), 2)
|
self.assertEqual(len(objects), 2)
|
||||||
self.assertEqual(objects[0], {'name': 'foo', 'value': 'bar'})
|
self.assertEqual(objects[0], {"name": "foo", "value": "bar"})
|
||||||
self.assertEqual(objects[1], {'name': 'foo2', 'value': 'bar2'})
|
self.assertEqual(objects[1], {"name": "foo2", "value": "bar2"})
|
||||||
|
|
||||||
|
|
||||||
class MockMixinImporter(mod.FromCsvToSqlalchemyMixin, mod.FromCsv, ToSqlalchemy):
|
class MockMixinImporter(mod.FromCsvToSqlalchemyMixin, mod.FromCsv, ToSqlalchemy):
|
||||||
|
@ -99,7 +109,7 @@ class TestFromCsvToSqlalchemyMixin(DataTestCase):
|
||||||
self.handler = ImportHandler(self.config)
|
self.handler = ImportHandler(self.config)
|
||||||
|
|
||||||
def make_importer(self, **kwargs):
|
def make_importer(self, **kwargs):
|
||||||
kwargs.setdefault('handler', self.handler)
|
kwargs.setdefault("handler", self.handler)
|
||||||
return MockMixinImporter(self.config, **kwargs)
|
return MockMixinImporter(self.config, **kwargs)
|
||||||
|
|
||||||
def test_constructor(self):
|
def test_constructor(self):
|
||||||
|
@ -112,31 +122,50 @@ class TestFromCsvToSqlalchemyMixin(DataTestCase):
|
||||||
# typical
|
# typical
|
||||||
# nb. as of now Upgrade is the only table using proper UUID
|
# nb. as of now Upgrade is the only table using proper UUID
|
||||||
imp = self.make_importer(model_class=model.Upgrade)
|
imp = self.make_importer(model_class=model.Upgrade)
|
||||||
self.assertEqual(imp.uuid_keys, ['uuid'])
|
self.assertEqual(imp.uuid_keys, ["uuid"])
|
||||||
|
|
||||||
def test_normalize_source_object(self):
|
def test_normalize_source_object(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
|
|
||||||
# no uuid keys
|
# no uuid keys
|
||||||
imp = self.make_importer(model_class=model.Setting)
|
imp = self.make_importer(model_class=model.Setting)
|
||||||
result = imp.normalize_source_object({'name': 'foo', 'value': 'bar'})
|
result = imp.normalize_source_object({"name": "foo", "value": "bar"})
|
||||||
self.assertEqual(result, {'name': 'foo', 'value': 'bar'})
|
self.assertEqual(result, {"name": "foo", "value": "bar"})
|
||||||
|
|
||||||
# source has proper UUID
|
# source has proper UUID
|
||||||
# nb. as of now Upgrade is the only table using proper UUID
|
# nb. as of now Upgrade is the only table using proper UUID
|
||||||
imp = self.make_importer(model_class=model.Upgrade, fields=['uuid', 'description'])
|
imp = self.make_importer(
|
||||||
result = imp.normalize_source_object({'uuid': _uuid.UUID('06753693-d892-77f0-8000-ce71bf7ebbba'),
|
model_class=model.Upgrade, fields=["uuid", "description"]
|
||||||
'description': 'testing'})
|
)
|
||||||
self.assertEqual(result, {'uuid': _uuid.UUID('06753693-d892-77f0-8000-ce71bf7ebbba'),
|
result = imp.normalize_source_object(
|
||||||
'description': 'testing'})
|
{
|
||||||
|
"uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"),
|
||||||
|
"description": "testing",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
result,
|
||||||
|
{
|
||||||
|
"uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"),
|
||||||
|
"description": "testing",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
# source has string uuid
|
# source has string uuid
|
||||||
# nb. as of now Upgrade is the only table using proper UUID
|
# nb. as of now Upgrade is the only table using proper UUID
|
||||||
imp = self.make_importer(model_class=model.Upgrade, fields=['uuid', 'description'])
|
imp = self.make_importer(
|
||||||
result = imp.normalize_source_object({'uuid': '06753693d89277f08000ce71bf7ebbba',
|
model_class=model.Upgrade, fields=["uuid", "description"]
|
||||||
'description': 'testing'})
|
)
|
||||||
self.assertEqual(result, {'uuid': _uuid.UUID('06753693-d892-77f0-8000-ce71bf7ebbba'),
|
result = imp.normalize_source_object(
|
||||||
'description': 'testing'})
|
{"uuid": "06753693d89277f08000ce71bf7ebbba", "description": "testing"}
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
result,
|
||||||
|
{
|
||||||
|
"uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"),
|
||||||
|
"description": "testing",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MockMixinHandler(mod.FromCsvToSqlalchemyHandlerMixin, ToSqlalchemyHandler):
|
class MockMixinHandler(mod.FromCsvToSqlalchemyHandlerMixin, ToSqlalchemyHandler):
|
||||||
|
@ -149,27 +178,33 @@ class TestFromCsvToSqlalchemyHandlerMixin(DataTestCase):
|
||||||
return MockMixinHandler(self.config, **kwargs)
|
return MockMixinHandler(self.config, **kwargs)
|
||||||
|
|
||||||
def test_get_target_model(self):
|
def test_get_target_model(self):
|
||||||
with patch.object(mod.FromCsvToSqlalchemyHandlerMixin, 'define_importers', return_value={}):
|
with patch.object(
|
||||||
|
mod.FromCsvToSqlalchemyHandlerMixin, "define_importers", return_value={}
|
||||||
|
):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
self.assertRaises(NotImplementedError, handler.get_target_model)
|
self.assertRaises(NotImplementedError, handler.get_target_model)
|
||||||
|
|
||||||
def test_define_importers(self):
|
def test_define_importers(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
with patch.object(mod.FromCsvToSqlalchemyHandlerMixin, 'get_target_model', return_value=model):
|
with patch.object(
|
||||||
|
mod.FromCsvToSqlalchemyHandlerMixin, "get_target_model", return_value=model
|
||||||
|
):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
importers = handler.define_importers()
|
importers = handler.define_importers()
|
||||||
self.assertIn('Setting', importers)
|
self.assertIn("Setting", importers)
|
||||||
self.assertTrue(issubclass(importers['Setting'], mod.FromCsv))
|
self.assertTrue(issubclass(importers["Setting"], mod.FromCsv))
|
||||||
self.assertTrue(issubclass(importers['Setting'], ToSqlalchemy))
|
self.assertTrue(issubclass(importers["Setting"], ToSqlalchemy))
|
||||||
self.assertIn('User', importers)
|
self.assertIn("User", importers)
|
||||||
self.assertIn('Person', importers)
|
self.assertIn("Person", importers)
|
||||||
self.assertIn('Role', importers)
|
self.assertIn("Role", importers)
|
||||||
|
|
||||||
def test_make_importer_factory(self):
|
def test_make_importer_factory(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
with patch.object(mod.FromCsvToSqlalchemyHandlerMixin, 'define_importers', return_value={}):
|
with patch.object(
|
||||||
|
mod.FromCsvToSqlalchemyHandlerMixin, "define_importers", return_value={}
|
||||||
|
):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
factory = handler.make_importer_factory(model.Setting, 'Setting')
|
factory = handler.make_importer_factory(model.Setting, "Setting")
|
||||||
self.assertTrue(issubclass(factory, mod.FromCsv))
|
self.assertTrue(issubclass(factory, mod.FromCsv))
|
||||||
self.assertTrue(issubclass(factory, ToSqlalchemy))
|
self.assertTrue(issubclass(factory, ToSqlalchemy))
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#-*- coding: utf-8; -*-
|
# -*- coding: utf-8; -*-
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
@ -17,34 +17,36 @@ class TestImportHandler(DataTestCase):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
self.assertEqual(str(handler), "None → None")
|
self.assertEqual(str(handler), "None → None")
|
||||||
|
|
||||||
handler.source_title = 'CSV'
|
handler.source_title = "CSV"
|
||||||
handler.target_title = 'Wutta'
|
handler.target_title = "Wutta"
|
||||||
self.assertEqual(str(handler), "CSV → Wutta")
|
self.assertEqual(str(handler), "CSV → Wutta")
|
||||||
|
|
||||||
def test_actioning(self):
|
def test_actioning(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
self.assertEqual(handler.actioning, 'importing')
|
self.assertEqual(handler.actioning, "importing")
|
||||||
|
|
||||||
handler.orientation = mod.Orientation.EXPORT
|
handler.orientation = mod.Orientation.EXPORT
|
||||||
self.assertEqual(handler.actioning, 'exporting')
|
self.assertEqual(handler.actioning, "exporting")
|
||||||
|
|
||||||
def test_get_key(self):
|
def test_get_key(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
self.assertEqual(handler.get_key(), 'to_None.from_None.import')
|
self.assertEqual(handler.get_key(), "to_None.from_None.import")
|
||||||
|
|
||||||
with patch.multiple(mod.ImportHandler, source_key='csv', target_key='wutta'):
|
with patch.multiple(mod.ImportHandler, source_key="csv", target_key="wutta"):
|
||||||
self.assertEqual(handler.get_key(), 'to_wutta.from_csv.import')
|
self.assertEqual(handler.get_key(), "to_wutta.from_csv.import")
|
||||||
|
|
||||||
def test_get_spec(self):
|
def test_get_spec(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
self.assertEqual(handler.get_spec(), 'wuttasync.importing.handlers:ImportHandler')
|
self.assertEqual(
|
||||||
|
handler.get_spec(), "wuttasync.importing.handlers:ImportHandler"
|
||||||
|
)
|
||||||
|
|
||||||
def test_get_title(self):
|
def test_get_title(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
self.assertEqual(handler.get_title(), "None → None")
|
self.assertEqual(handler.get_title(), "None → None")
|
||||||
|
|
||||||
handler.source_title = 'CSV'
|
handler.source_title = "CSV"
|
||||||
handler.target_title = 'Wutta'
|
handler.target_title = "Wutta"
|
||||||
self.assertEqual(handler.get_title(), "CSV → Wutta")
|
self.assertEqual(handler.get_title(), "CSV → Wutta")
|
||||||
|
|
||||||
def test_get_source_title(self):
|
def test_get_source_title(self):
|
||||||
|
@ -54,16 +56,16 @@ class TestImportHandler(DataTestCase):
|
||||||
self.assertIsNone(handler.get_source_title())
|
self.assertIsNone(handler.get_source_title())
|
||||||
|
|
||||||
# which is really using source_key as fallback
|
# which is really using source_key as fallback
|
||||||
handler.source_key = 'csv'
|
handler.source_key = "csv"
|
||||||
self.assertEqual(handler.get_source_title(), 'csv')
|
self.assertEqual(handler.get_source_title(), "csv")
|
||||||
|
|
||||||
# can also use (defined) generic fallback
|
# can also use (defined) generic fallback
|
||||||
handler.generic_source_title = 'CSV'
|
handler.generic_source_title = "CSV"
|
||||||
self.assertEqual(handler.get_source_title(), 'CSV')
|
self.assertEqual(handler.get_source_title(), "CSV")
|
||||||
|
|
||||||
# or can set explicitly
|
# or can set explicitly
|
||||||
handler.source_title = 'XXX'
|
handler.source_title = "XXX"
|
||||||
self.assertEqual(handler.get_source_title(), 'XXX')
|
self.assertEqual(handler.get_source_title(), "XXX")
|
||||||
|
|
||||||
def test_get_target_title(self):
|
def test_get_target_title(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
|
@ -72,23 +74,23 @@ class TestImportHandler(DataTestCase):
|
||||||
self.assertIsNone(handler.get_target_title())
|
self.assertIsNone(handler.get_target_title())
|
||||||
|
|
||||||
# which is really using target_key as fallback
|
# which is really using target_key as fallback
|
||||||
handler.target_key = 'wutta'
|
handler.target_key = "wutta"
|
||||||
self.assertEqual(handler.get_target_title(), 'wutta')
|
self.assertEqual(handler.get_target_title(), "wutta")
|
||||||
|
|
||||||
# can also use (defined) generic fallback
|
# can also use (defined) generic fallback
|
||||||
handler.generic_target_title = 'Wutta'
|
handler.generic_target_title = "Wutta"
|
||||||
self.assertEqual(handler.get_target_title(), 'Wutta')
|
self.assertEqual(handler.get_target_title(), "Wutta")
|
||||||
|
|
||||||
# or can set explicitly
|
# or can set explicitly
|
||||||
handler.target_title = 'XXX'
|
handler.target_title = "XXX"
|
||||||
self.assertEqual(handler.get_target_title(), 'XXX')
|
self.assertEqual(handler.get_target_title(), "XXX")
|
||||||
|
|
||||||
def test_process_data(self):
|
def test_process_data(self):
|
||||||
model = self.app.model
|
model = self.app.model
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
|
|
||||||
# empy/no-op should commit (not fail)
|
# empy/no-op should commit (not fail)
|
||||||
with patch.object(handler, 'commit_transaction') as commit_transaction:
|
with patch.object(handler, "commit_transaction") as commit_transaction:
|
||||||
handler.process_data()
|
handler.process_data()
|
||||||
commit_transaction.assert_called_once_with()
|
commit_transaction.assert_called_once_with()
|
||||||
|
|
||||||
|
@ -96,8 +98,8 @@ class TestImportHandler(DataTestCase):
|
||||||
handler.process_data()
|
handler.process_data()
|
||||||
|
|
||||||
# dry-run should rollback
|
# dry-run should rollback
|
||||||
with patch.object(handler, 'commit_transaction') as commit_transaction:
|
with patch.object(handler, "commit_transaction") as commit_transaction:
|
||||||
with patch.object(handler, 'rollback_transaction') as rollback_transaction:
|
with patch.object(handler, "rollback_transaction") as rollback_transaction:
|
||||||
handler.process_data(dry_run=True)
|
handler.process_data(dry_run=True)
|
||||||
self.assertFalse(commit_transaction.called)
|
self.assertFalse(commit_transaction.called)
|
||||||
rollback_transaction.assert_called_once_with()
|
rollback_transaction.assert_called_once_with()
|
||||||
|
@ -106,36 +108,38 @@ class TestImportHandler(DataTestCase):
|
||||||
handler.process_data(dry_run=True)
|
handler.process_data(dry_run=True)
|
||||||
|
|
||||||
# outright error should cause rollback
|
# outright error should cause rollback
|
||||||
with patch.object(handler, 'commit_transaction') as commit_transaction:
|
with patch.object(handler, "commit_transaction") as commit_transaction:
|
||||||
with patch.object(handler, 'rollback_transaction') as rollback_transaction:
|
with patch.object(handler, "rollback_transaction") as rollback_transaction:
|
||||||
with patch.object(handler, 'get_importer', side_effect=RuntimeError):
|
with patch.object(handler, "get_importer", side_effect=RuntimeError):
|
||||||
self.assertRaises(RuntimeError, handler.process_data, 'BlahBlah')
|
self.assertRaises(RuntimeError, handler.process_data, "BlahBlah")
|
||||||
self.assertFalse(commit_transaction.called)
|
self.assertFalse(commit_transaction.called)
|
||||||
rollback_transaction.assert_called_once_with()
|
rollback_transaction.assert_called_once_with()
|
||||||
|
|
||||||
# fake importer class/data
|
# fake importer class/data
|
||||||
mock_source_objects = [{'name': 'foo', 'value': 'bar'}]
|
mock_source_objects = [{"name": "foo", "value": "bar"}]
|
||||||
|
|
||||||
class SettingImporter(ToSqlalchemy):
|
class SettingImporter(ToSqlalchemy):
|
||||||
model_class = model.Setting
|
model_class = model.Setting
|
||||||
target_session = self.session
|
target_session = self.session
|
||||||
|
|
||||||
def get_source_objects(self):
|
def get_source_objects(self):
|
||||||
return mock_source_objects
|
return mock_source_objects
|
||||||
|
|
||||||
# now for a "normal" one
|
# now for a "normal" one
|
||||||
handler.importers['Setting'] = SettingImporter
|
handler.importers["Setting"] = SettingImporter
|
||||||
self.assertEqual(self.session.query(model.Setting).count(), 0)
|
self.assertEqual(self.session.query(model.Setting).count(), 0)
|
||||||
handler.process_data('Setting')
|
handler.process_data("Setting")
|
||||||
self.assertEqual(self.session.query(model.Setting).count(), 1)
|
self.assertEqual(self.session.query(model.Setting).count(), 1)
|
||||||
|
|
||||||
# then add another mock record
|
# then add another mock record
|
||||||
mock_source_objects.append({'name': 'foo2', 'value': 'bar2'})
|
mock_source_objects.append({"name": "foo2", "value": "bar2"})
|
||||||
handler.process_data('Setting')
|
handler.process_data("Setting")
|
||||||
self.assertEqual(self.session.query(model.Setting).count(), 2)
|
self.assertEqual(self.session.query(model.Setting).count(), 2)
|
||||||
|
|
||||||
# nb. even if dry-run, record is added
|
# nb. even if dry-run, record is added
|
||||||
# (rollback would happen later in that case)
|
# (rollback would happen later in that case)
|
||||||
mock_source_objects.append({'name': 'foo3', 'value': 'bar3'})
|
mock_source_objects.append({"name": "foo3", "value": "bar3"})
|
||||||
handler.process_data('Setting', dry_run=True)
|
handler.process_data("Setting", dry_run=True)
|
||||||
self.assertEqual(self.session.query(model.Setting).count(), 3)
|
self.assertEqual(self.session.query(model.Setting).count(), 3)
|
||||||
|
|
||||||
def test_consume_kwargs(self):
|
def test_consume_kwargs(self):
|
||||||
|
@ -148,10 +152,10 @@ class TestImportHandler(DataTestCase):
|
||||||
|
|
||||||
# captures dry-run flag
|
# captures dry-run flag
|
||||||
self.assertFalse(handler.dry_run)
|
self.assertFalse(handler.dry_run)
|
||||||
kw['dry_run'] = True
|
kw["dry_run"] = True
|
||||||
result = handler.consume_kwargs(kw)
|
result = handler.consume_kwargs(kw)
|
||||||
self.assertIs(result, kw)
|
self.assertIs(result, kw)
|
||||||
self.assertTrue(kw['dry_run'])
|
self.assertTrue(kw["dry_run"])
|
||||||
self.assertTrue(handler.dry_run)
|
self.assertTrue(handler.dry_run)
|
||||||
|
|
||||||
def test_define_importers(self):
|
def test_define_importers(self):
|
||||||
|
@ -165,24 +169,23 @@ class TestImportHandler(DataTestCase):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
|
|
||||||
# normal
|
# normal
|
||||||
handler.importers['Setting'] = Importer
|
handler.importers["Setting"] = Importer
|
||||||
importer = handler.get_importer('Setting', model_class=model.Setting)
|
importer = handler.get_importer("Setting", model_class=model.Setting)
|
||||||
self.assertIsInstance(importer, Importer)
|
self.assertIsInstance(importer, Importer)
|
||||||
|
|
||||||
# specifying empty keys
|
# specifying empty keys
|
||||||
handler.importers['Setting'] = Importer
|
handler.importers["Setting"] = Importer
|
||||||
importer = handler.get_importer('Setting', model_class=model.Setting,
|
importer = handler.get_importer("Setting", model_class=model.Setting, keys=None)
|
||||||
keys=None)
|
|
||||||
self.assertIsInstance(importer, Importer)
|
self.assertIsInstance(importer, Importer)
|
||||||
importer = handler.get_importer('Setting', model_class=model.Setting,
|
importer = handler.get_importer("Setting", model_class=model.Setting, keys="")
|
||||||
keys='')
|
|
||||||
self.assertIsInstance(importer, Importer)
|
self.assertIsInstance(importer, Importer)
|
||||||
importer = handler.get_importer('Setting', model_class=model.Setting,
|
importer = handler.get_importer("Setting", model_class=model.Setting, keys=[])
|
||||||
keys=[])
|
|
||||||
self.assertIsInstance(importer, Importer)
|
self.assertIsInstance(importer, Importer)
|
||||||
|
|
||||||
# key not found
|
# key not found
|
||||||
self.assertRaises(KeyError, handler.get_importer, 'BunchOfNonsense', model_class=model.Setting)
|
self.assertRaises(
|
||||||
|
KeyError, handler.get_importer, "BunchOfNonsense", model_class=model.Setting
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestFromFileHandler(DataTestCase):
|
class TestFromFileHandler(DataTestCase):
|
||||||
|
@ -192,8 +195,8 @@ class TestFromFileHandler(DataTestCase):
|
||||||
|
|
||||||
def test_process_data(self):
|
def test_process_data(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
path = self.write_file('data.txt', '')
|
path = self.write_file("data.txt", "")
|
||||||
with patch.object(mod.ImportHandler, 'process_data') as process_data:
|
with patch.object(mod.ImportHandler, "process_data") as process_data:
|
||||||
|
|
||||||
# bare
|
# bare
|
||||||
handler.process_data()
|
handler.process_data()
|
||||||
|
@ -217,7 +220,7 @@ class TestToSqlalchemyHandler(DataTestCase):
|
||||||
|
|
||||||
def test_begin_target_transaction(self):
|
def test_begin_target_transaction(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
with patch.object(handler, 'make_target_session') as make_target_session:
|
with patch.object(handler, "make_target_session") as make_target_session:
|
||||||
make_target_session.return_value = self.session
|
make_target_session.return_value = self.session
|
||||||
self.assertIsNone(handler.target_session)
|
self.assertIsNone(handler.target_session)
|
||||||
handler.begin_target_transaction()
|
handler.begin_target_transaction()
|
||||||
|
@ -225,7 +228,7 @@ class TestToSqlalchemyHandler(DataTestCase):
|
||||||
|
|
||||||
def test_rollback_target_transaction(self):
|
def test_rollback_target_transaction(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
with patch.object(handler, 'make_target_session') as make_target_session:
|
with patch.object(handler, "make_target_session") as make_target_session:
|
||||||
make_target_session.return_value = self.session
|
make_target_session.return_value = self.session
|
||||||
self.assertIsNone(handler.target_session)
|
self.assertIsNone(handler.target_session)
|
||||||
handler.begin_target_transaction()
|
handler.begin_target_transaction()
|
||||||
|
@ -235,7 +238,7 @@ class TestToSqlalchemyHandler(DataTestCase):
|
||||||
|
|
||||||
def test_commit_target_transaction(self):
|
def test_commit_target_transaction(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
with patch.object(handler, 'make_target_session') as make_target_session:
|
with patch.object(handler, "make_target_session") as make_target_session:
|
||||||
make_target_session.return_value = self.session
|
make_target_session.return_value = self.session
|
||||||
self.assertIsNone(handler.target_session)
|
self.assertIsNone(handler.target_session)
|
||||||
handler.begin_target_transaction()
|
handler.begin_target_transaction()
|
||||||
|
@ -250,6 +253,6 @@ class TestToSqlalchemyHandler(DataTestCase):
|
||||||
def test_get_importer_kwargs(self):
|
def test_get_importer_kwargs(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
handler.target_session = self.session
|
handler.target_session = self.session
|
||||||
kw = handler.get_importer_kwargs('Setting')
|
kw = handler.get_importer_kwargs("Setting")
|
||||||
self.assertIn('target_session', kw)
|
self.assertIn("target_session", kw)
|
||||||
self.assertIs(kw['target_session'], self.session)
|
self.assertIs(kw["target_session"], self.session)
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
#-*- coding: utf-8; -*-
|
# -*- coding: utf-8; -*-
|
||||||
|
|
||||||
from wuttasync.importing import model as mod
|
from wuttasync.importing import model as mod
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#-*- coding: utf-8; -*-
|
# -*- coding: utf-8; -*-
|
||||||
|
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
@ -16,22 +16,22 @@ class TestToWuttaHandler(DataTestCase):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
|
|
||||||
# uses app title by default
|
# uses app title by default
|
||||||
self.config.setdefault('wutta.app_title', "What About This")
|
self.config.setdefault("wutta.app_title", "What About This")
|
||||||
self.assertEqual(handler.get_target_title(), 'What About This')
|
self.assertEqual(handler.get_target_title(), "What About This")
|
||||||
|
|
||||||
# or generic default if present
|
# or generic default if present
|
||||||
handler.generic_target_title = "WHATABOUTTHIS"
|
handler.generic_target_title = "WHATABOUTTHIS"
|
||||||
self.assertEqual(handler.get_target_title(), 'WHATABOUTTHIS')
|
self.assertEqual(handler.get_target_title(), "WHATABOUTTHIS")
|
||||||
|
|
||||||
# but prefer specific title if present
|
# but prefer specific title if present
|
||||||
handler.target_title = "what_about_this"
|
handler.target_title = "what_about_this"
|
||||||
self.assertEqual(handler.get_target_title(), 'what_about_this')
|
self.assertEqual(handler.get_target_title(), "what_about_this")
|
||||||
|
|
||||||
def test_make_target_session(self):
|
def test_make_target_session(self):
|
||||||
handler = self.make_handler()
|
handler = self.make_handler()
|
||||||
|
|
||||||
# makes "new" (mocked in our case) app session
|
# makes "new" (mocked in our case) app session
|
||||||
with patch.object(self.app, 'make_session') as make_session:
|
with patch.object(self.app, "make_session") as make_session:
|
||||||
make_session.return_value = self.session
|
make_session.return_value = self.session
|
||||||
session = handler.make_target_session()
|
session = handler.make_target_session()
|
||||||
make_session.assert_called_once_with()
|
make_session.assert_called_once_with()
|
||||||
|
|
|
@ -8,22 +8,24 @@ from wuttasync import util as mod
|
||||||
class TestDataDiffs(TestCase):
|
class TestDataDiffs(TestCase):
|
||||||
|
|
||||||
def test_source_missing_field(self):
|
def test_source_missing_field(self):
|
||||||
source = {'foo': 'bar'}
|
source = {"foo": "bar"}
|
||||||
target = {'baz': 'xyz', 'foo': 'bar'}
|
target = {"baz": "xyz", "foo": "bar"}
|
||||||
self.assertRaises(KeyError, mod.data_diffs, source, target)
|
self.assertRaises(KeyError, mod.data_diffs, source, target)
|
||||||
|
|
||||||
def test_target_missing_field(self):
|
def test_target_missing_field(self):
|
||||||
source = {'foo': 'bar', 'baz': 'xyz'}
|
source = {"foo": "bar", "baz": "xyz"}
|
||||||
target = {'baz': 'xyz'}
|
target = {"baz": "xyz"}
|
||||||
self.assertRaises(KeyError, mod.data_diffs, source, target, fields=['foo', 'baz'])
|
self.assertRaises(
|
||||||
|
KeyError, mod.data_diffs, source, target, fields=["foo", "baz"]
|
||||||
|
)
|
||||||
|
|
||||||
def test_no_diffs(self):
|
def test_no_diffs(self):
|
||||||
source = {'foo': 'bar', 'baz': 'xyz'}
|
source = {"foo": "bar", "baz": "xyz"}
|
||||||
target = {'baz': 'xyz', 'foo': 'bar'}
|
target = {"baz": "xyz", "foo": "bar"}
|
||||||
self.assertFalse(mod.data_diffs(source, target))
|
self.assertFalse(mod.data_diffs(source, target))
|
||||||
|
|
||||||
def test_with_diffs(self):
|
def test_with_diffs(self):
|
||||||
source = {'foo': 'bar', 'baz': 'xyz'}
|
source = {"foo": "bar", "baz": "xyz"}
|
||||||
target = {'baz': 'xyz', 'foo': 'BAR'}
|
target = {"baz": "xyz", "foo": "BAR"}
|
||||||
result = mod.data_diffs(source, target)
|
result = mod.data_diffs(source, target)
|
||||||
self.assertEqual(result, ['foo'])
|
self.assertEqual(result, ["foo"])
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue