diff --git a/docs/conf.py b/docs/conf.py index de9af3d..2b47550 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -8,35 +8,35 @@ from importlib.metadata import version as get_version -project = 'WuttaSync' -copyright = '2024, Lance Edgar' -author = 'Lance Edgar' -release = get_version('WuttaSync') +project = "WuttaSync" +copyright = "2024, Lance Edgar" +author = "Lance Edgar" +release = get_version("WuttaSync") # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.viewcode', - 'sphinx.ext.todo', - 'enum_tools.autoenum', - 'sphinxcontrib.programoutput', + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.viewcode", + "sphinx.ext.todo", + "enum_tools.autoenum", + "sphinxcontrib.programoutput", ] -templates_path = ['_templates'] -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] intersphinx_mapping = { - 'python': ('https://docs.python.org/3/', None), - 'rattail-manual': ('https://docs.wuttaproject.org/rattail-manual/', None), - 'wuttjamaican': ('https://docs.wuttaproject.org/wuttjamaican/', None), + "python": ("https://docs.python.org/3/", None), + "rattail-manual": ("https://docs.wuttaproject.org/rattail-manual/", None), + "wuttjamaican": ("https://docs.wuttaproject.org/wuttjamaican/", None), } # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output -html_theme = 'furo' -html_static_path = ['_static'] +html_theme = "furo" +html_static_path = ["_static"] diff --git a/src/wuttasync/_version.py b/src/wuttasync/_version.py index 6432bbf..690bd4f 100644 --- a/src/wuttasync/_version.py +++ b/src/wuttasync/_version.py @@ -3,4 +3,4 @@ from importlib.metadata import version -__version__ = version('WuttaSync') +__version__ = version("WuttaSync") diff --git a/src/wuttasync/cli/base.py b/src/wuttasync/cli/base.py index 25771ad..f9198d7 100644 --- a/src/wuttasync/cli/base.py +++ b/src/wuttasync/cli/base.py @@ -79,7 +79,7 @@ class ImportCommandHandler(GenericHandler): self.import_handler = import_handler elif callable(import_handler): self.import_handler = import_handler(self.config) - else: # spec + else: # spec factory = self.app.load_object(import_handler) self.import_handler = factory(self.config) @@ -101,20 +101,22 @@ class ImportCommandHandler(GenericHandler): """ # maybe just list models and bail - if params.get('list_models'): + if params.get("list_models"): self.list_models(params) return # otherwise process some data kw = dict(params) - models = kw.pop('models') + models = kw.pop("models") log.debug("using handler: %s", self.import_handler.get_spec()) # TODO: need to use all/default models if none specified # (and should know models by now for logging purposes) - log.debug("running %s %s for: %s", - self.import_handler, - self.import_handler.orientation.value, - ', '.join(models)) + log.debug( + "running %s %s for: %s", + self.import_handler, + self.import_handler.orientation.value, + ", ".join(models), + ) log.debug("params are: %s", kw) self.import_handler.process_data(*models, **kw) @@ -134,72 +136,93 @@ class ImportCommandHandler(GenericHandler): def import_command_template( - - models: Annotated[ - Optional[List[str]], - typer.Argument(help="Model(s) to process. Can specify one or more, " - "or omit to process default models.")] = None, - - list_models: Annotated[ - bool, - typer.Option('--list-models', '-l', - help="List available target models and exit.")] = False, - - create: Annotated[ - bool, - typer.Option(help="Allow new target records to be created. " - "See aso --max-create.")] = True, - - update: Annotated[ - bool, - typer.Option(help="Allow existing target records to be updated. " - "See also --max-update.")] = True, - - delete: Annotated[ - bool, - typer.Option(help="Allow existing target records to be deleted. " - "See also --max-delete.")] = False, - - fields: Annotated[ - str, - typer.Option('--fields', - help="List of fields to process. See also --exclude and --key.")] = None, - - excluded_fields: Annotated[ - str, - typer.Option('--exclude', - help="List of fields *not* to process. See also --fields.")] = None, - - keys: Annotated[ - str, - typer.Option('--key', '--keys', - help="List of fields to use as record key/identifier. " - "See also --fields.")] = None, - - max_create: Annotated[ - int, - typer.Option(help="Max number of target records to create (per model). " - "See also --create.")] = None, - - max_update: Annotated[ - int, - typer.Option(help="Max number of target records to update (per model). " - "See also --update.")] = None, - - max_delete: Annotated[ - int, - typer.Option(help="Max number of target records to delete (per model). " - "See also --delete.")] = None, - - max_total: Annotated[ - int, - typer.Option(help="Max number of *any* target record changes which may occur (per model).")] = None, - - dry_run: Annotated[ - bool, - typer.Option('--dry-run', - help="Go through the motions, but rollback the transaction.")] = False, - + models: Annotated[ + Optional[List[str]], + typer.Argument( + help="Model(s) to process. Can specify one or more, " + "or omit to process default models." + ), + ] = None, + list_models: Annotated[ + bool, + typer.Option( + "--list-models", "-l", help="List available target models and exit." + ), + ] = False, + create: Annotated[ + bool, + typer.Option( + help="Allow new target records to be created. " "See aso --max-create." + ), + ] = True, + update: Annotated[ + bool, + typer.Option( + help="Allow existing target records to be updated. " + "See also --max-update." + ), + ] = True, + delete: Annotated[ + bool, + typer.Option( + help="Allow existing target records to be deleted. " + "See also --max-delete." + ), + ] = False, + fields: Annotated[ + str, + typer.Option( + "--fields", help="List of fields to process. See also --exclude and --key." + ), + ] = None, + excluded_fields: Annotated[ + str, + typer.Option( + "--exclude", help="List of fields *not* to process. See also --fields." + ), + ] = None, + keys: Annotated[ + str, + typer.Option( + "--key", + "--keys", + help="List of fields to use as record key/identifier. " + "See also --fields.", + ), + ] = None, + max_create: Annotated[ + int, + typer.Option( + help="Max number of target records to create (per model). " + "See also --create." + ), + ] = None, + max_update: Annotated[ + int, + typer.Option( + help="Max number of target records to update (per model). " + "See also --update." + ), + ] = None, + max_delete: Annotated[ + int, + typer.Option( + help="Max number of target records to delete (per model). " + "See also --delete." + ), + ] = None, + max_total: Annotated[ + int, + typer.Option( + help="Max number of *any* target record changes which may occur (per model)." + ), + ] = None, + dry_run: Annotated[ + bool, + typer.Option( + "--dry-run", help="Go through the motions, but rollback the transaction." + ), + ] = False, ): """ Stub function which provides a common param signature; used with @@ -248,15 +271,18 @@ def import_command(fn): def file_import_command_template( - - input_file_path: Annotated[ - Path, - typer.Option('--input-path', - exists=True, file_okay=True, dir_okay=True, - help="Path to input file(s). Can be a folder " - "if app logic can guess the filename(s); " - "otherwise must be complete file path.")] = None, - + input_file_path: Annotated[ + Path, + typer.Option( + "--input-path", + exists=True, + file_okay=True, + dir_okay=True, + help="Path to input file(s). Can be a folder " + "if app logic can guess the filename(s); " + "otherwise must be complete file path.", + ), + ] = None, ): """ Stub function to provide signature for import/export commands @@ -278,9 +304,9 @@ def file_import_command(fn): original_sig = inspect.signature(fn) plain_import_sig = inspect.signature(import_command_template) file_import_sig = inspect.signature(file_import_command_template) - desired_params = ( - list(plain_import_sig.parameters.values()) - + list(file_import_sig.parameters.values())) + desired_params = list(plain_import_sig.parameters.values()) + list( + file_import_sig.parameters.values() + ) params = list(original_sig.parameters.values()) for i, param in enumerate(desired_params): diff --git a/src/wuttasync/cli/import_csv.py b/src/wuttasync/cli/import_csv.py index 50c2a83..0b8716c 100644 --- a/src/wuttasync/cli/import_csv.py +++ b/src/wuttasync/cli/import_csv.py @@ -35,14 +35,12 @@ from .base import file_import_command, ImportCommandHandler @wutta_typer.command() @file_import_command -def import_csv( - ctx: typer.Context, - **kwargs -): +def import_csv(ctx: typer.Context, **kwargs): """ Import data from CSV file(s) to Wutta DB """ config = ctx.parent.wutta_config handler = ImportCommandHandler( - config, import_handler='wuttasync.importing.csv:FromCsvToWutta') + config, import_handler="wuttasync.importing.csv:FromCsvToWutta" + ) handler.run(ctx.params) diff --git a/src/wuttasync/importing/base.py b/src/wuttasync/importing/base.py index a0bc070..c9fcc9a 100644 --- a/src/wuttasync/importing/base.py +++ b/src/wuttasync/importing/base.py @@ -191,12 +191,15 @@ class Importer: self.config = config self.app = self.config.get_app() - self.create = kwargs.pop('create', - kwargs.pop('allow_create', self.allow_create)) - self.update = kwargs.pop('update', - kwargs.pop('allow_update', self.allow_update)) - self.delete = kwargs.pop('delete', - kwargs.pop('allow_delete', self.allow_delete)) + self.create = kwargs.pop( + "create", kwargs.pop("allow_create", self.allow_create) + ) + self.update = kwargs.pop( + "update", kwargs.pop("allow_update", self.allow_update) + ) + self.delete = kwargs.pop( + "delete", kwargs.pop("allow_delete", self.allow_delete) + ) self.__dict__.update(kwargs) @@ -207,12 +210,11 @@ class Importer: self.fields = self.config.parse_list(self.fields) # discard any fields caller asked to exclude - excluded = getattr(self, 'excluded_fields', None) + excluded = getattr(self, "excluded_fields", None) if excluded: if isinstance(excluded, str): excluded = self.config.parse_list(excluded) - self.fields = [f for f in self.fields - if f not in excluded] + self.fields = [f for f in self.fields if f not in excluded] @property def orientation(self): @@ -245,7 +247,7 @@ class Importer: """ Returns the display title for the target data model. """ - if hasattr(self, 'model_title'): + if hasattr(self, "model_title"): return self.model_title # TODO: this will fail if not using a model class, obviously.. @@ -264,7 +266,7 @@ class Importer: :returns: Possibly empty list of "simple" field names. """ - if hasattr(self, 'simple_fields'): + if hasattr(self, "simple_fields"): return self.simple_fields fields = get_columns(self.model_class) @@ -287,7 +289,7 @@ class Importer: :returns: List of all "supported" field names. """ - if hasattr(self, 'supported_fields'): + if hasattr(self, "supported_fields"): return self.supported_fields return self.get_simple_fields() @@ -306,7 +308,7 @@ class Importer: :returns: List of "effective" field names. """ - if hasattr(self, 'fields') and self.fields is not None: + if hasattr(self, "fields") and self.fields is not None: return self.fields return self.get_supported_fields() @@ -322,9 +324,9 @@ class Importer: """ keys = None # nb. prefer 'keys' but use 'key' as fallback - if hasattr(self, 'keys'): + if hasattr(self, "keys"): keys = self.keys - elif hasattr(self, 'key'): + elif hasattr(self, "key"): keys = self.key if keys: if isinstance(keys, str): @@ -401,7 +403,7 @@ class Importer: updated = [] deleted = [] - log.debug("using key fields: %s", ', '.join(self.get_keys())) + log.debug("using key fields: %s", ", ".join(self.get_keys())) # get complete set of normalized source data if source_data is None: @@ -411,8 +413,7 @@ class Importer: source_data, source_keys = self.get_unique_data(source_data) model_title = self.get_model_title() - log.debug(f"got %s {model_title} records from source", - len(source_data)) + log.debug(f"got %s {model_title} records from source", len(source_data)) # maybe cache existing target data if self.caches_target: @@ -426,8 +427,10 @@ class Importer: if self.delete: changes = len(created) + len(updated) if self.max_total and changes >= self.max_total: - log.debug("max of %s total changes already reached; skipping deletions", - self.max_total) + log.debug( + "max of %s total changes already reached; skipping deletions", + self.max_total, + ) else: deleted = self.do_delete(source_keys, changes, progress=progress) @@ -480,21 +483,32 @@ class Importer: if diffs: # data differs, so update target object - log.debug("fields (%s) differed for target data: %s and source data: %s", - ','.join(diffs), target_data, source_data) - target_object = self.update_target_object(target_object, - source_data, - target_data=target_data) + log.debug( + "fields (%s) differed for target data: %s and source data: %s", + ",".join(diffs), + target_data, + source_data, + ) + target_object = self.update_target_object( + target_object, source_data, target_data=target_data + ) updated.append((target_object, target_data, source_data)) # stop if we reach max allowed if self.max_update and len(updated) >= self.max_update: - log.warning("max of %s *updated* records has been reached; stopping now", - self.max_update) + log.warning( + "max of %s *updated* records has been reached; stopping now", + self.max_update, + ) raise ImportLimitReached() - elif self.max_total and (len(created) + len(updated)) >= self.max_total: - log.warning("max of %s *total changes* has been reached; stopping now", - self.max_total) + elif ( + self.max_total + and (len(created) + len(updated)) >= self.max_total + ): + log.warning( + "max of %s *total changes* has been reached; stopping now", + self.max_total, + ) raise ImportLimitReached() elif not target_object and self.create: @@ -513,12 +527,19 @@ class Importer: # stop if we reach max allowed if self.max_create and len(created) >= self.max_create: - log.warning("max of %s *created* records has been reached; stopping now", - self.max_create) + log.warning( + "max of %s *created* records has been reached; stopping now", + self.max_create, + ) raise ImportLimitReached() - elif self.max_total and (len(created) + len(updated)) >= self.max_total: - log.warning("max of %s *total changes* has been reached; stopping now", - self.max_total) + elif ( + self.max_total + and (len(created) + len(updated)) >= self.max_total + ): + log.warning( + "max of %s *total changes* has been reached; stopping now", + self.max_total, + ) raise ImportLimitReached() else: @@ -527,8 +548,12 @@ class Importer: actioning = self.actioning.capitalize() target_title = self.handler.get_target_title() try: - self.app.progress_loop(create_update, all_source_data, progress, - message=f"{actioning} {model_title} data to {target_title}") + self.app.progress_loop( + create_update, + all_source_data, + progress, + message=f"{actioning} {model_title} data to {target_title}", + ) except ImportLimitReached: pass @@ -575,27 +600,35 @@ class Importer: def delete(key, i): cached = self.cached_target.pop(key) - obj = cached['object'] + obj = cached["object"] # delete target object log.debug("deleting %s %s: %s", model_title, key, obj) if self.delete_target_object(obj): - deleted.append((obj, cached['data'])) + deleted.append((obj, cached["data"])) # stop if we reach max allowed if self.max_delete and len(deleted) >= self.max_delete: - log.warning("max of %s *deleted* records has been reached; stopping now", - self.max_delete) + log.warning( + "max of %s *deleted* records has been reached; stopping now", + self.max_delete, + ) raise ImportLimitReached() elif self.max_total and (changes + len(deleted)) >= self.max_total: - log.warning("max of %s *total changes* has been reached; stopping now", - self.max_total) + log.warning( + "max of %s *total changes* has been reached; stopping now", + self.max_total, + ) raise ImportLimitReached() try: model_title = self.get_model_title() - self.app.progress_loop(delete, sorted(deletable), progress, - message=f"Deleting {model_title} records") + self.app.progress_loop( + delete, + sorted(deletable), + progress, + message=f"Deleting {model_title} records", + ) except ImportLimitReached: pass @@ -685,8 +718,12 @@ class Importer: model_title = self.get_model_title() source_title = self.handler.get_source_title() - self.app.progress_loop(normalize, source_objects, progress, - message=f"Reading {model_title} data from {source_title}") + self.app.progress_loop( + normalize, + source_objects, + progress, + message=f"Reading {model_title} data from {source_title}", + ) return normalized def get_unique_data(self, source_data): @@ -724,10 +761,12 @@ class Importer: for data in source_data: key = self.get_record_key(data) if key in unique: - log.warning("duplicate %s records detected from %s for key: %s", - self.get_model_title(), - self.handler.get_source_title(), - key) + log.warning( + "duplicate %s records detected from %s for key: %s", + self.get_model_title(), + self.handler.get_source_title(), + key, + ) else: unique[key] = data return list(unique.values()), set(unique) @@ -830,12 +869,16 @@ class Importer: data = self.normalize_target_object(obj) if data: key = self.get_record_key(data) - cached[key] = {'object': obj, 'data': data} + cached[key] = {"object": obj, "data": data} model_title = self.get_model_title() target_title = self.handler.get_target_title() - self.app.progress_loop(cache, objects, progress, - message=f"Reading {model_title} data from {target_title}") + self.app.progress_loop( + cache, + objects, + progress, + message=f"Reading {model_title} data from {target_title}", + ) log.debug(f"cached %s {model_title} records from target", len(cached)) return cached @@ -877,7 +920,7 @@ class Importer: """ if self.caches_target and self.cached_target is not None: cached = self.cached_target.get(key) - return cached['object'] if cached else None + return cached["object"] if cached else None def normalize_target_object(self, obj): """ @@ -901,10 +944,8 @@ class Importer: :returns: Dict of normalized data fields, or ``None``. """ fields = self.get_fields() - fields = [f for f in self.get_simple_fields() - if f in fields] - data = dict([(field, getattr(obj, field)) - for field in fields]) + fields = [f for f in self.get_simple_fields() if f in fields] + data = dict([(field, getattr(obj, field)) for field in fields]) return data def get_deletable_keys(self, progress=None): @@ -930,13 +971,17 @@ class Importer: keys = set() def check(key, i): - data = self.cached_target[key]['data'] - obj = self.cached_target[key]['object'] + data = self.cached_target[key]["data"] + obj = self.cached_target[key]["object"] if self.can_delete_object(obj, data): keys.add(key) - self.app.progress_loop(check, set(self.cached_target), progress, - message="Determining which objects can be deleted") + self.app.progress_loop( + check, + set(self.cached_target), + progress, + message="Determining which objects can be deleted", + ) return keys ############################## @@ -954,7 +999,7 @@ class Importer: :returns: New object for the target side, or ``None``. """ - if source_data.get('__ignoreme__'): + if source_data.get("__ignoreme__"): return obj = self.make_empty_object(key) @@ -1035,9 +1080,11 @@ class Importer: # field is eligible for update generally, so compare # values between records - if (not target_data + if ( + not target_data or field not in target_data - or target_data[field] != source_data[field]): + or target_data[field] != source_data[field] + ): # data mismatch; update field for target object setattr(obj, field, source_data[field]) @@ -1150,7 +1197,7 @@ class FromFile(Importer): :returns: Path to input file. """ - if hasattr(self, 'input_file_path'): + if hasattr(self, "input_file_path"): return self.input_file_path folder = self.get_input_file_dir() @@ -1166,7 +1213,7 @@ class FromFile(Importer): :returns: Path to folder with input file(s). """ - if hasattr(self, 'input_file_dir'): + if hasattr(self, "input_file_dir"): return self.input_file_dir raise NotImplementedError("can't guess path to input file(s) folder") @@ -1180,7 +1227,7 @@ class FromFile(Importer): :returns: Input filename, sans folder path. """ - if hasattr(self, 'input_file_name'): + if hasattr(self, "input_file_name"): return self.input_file_name raise NotImplementedError("can't guess input filename") @@ -1218,7 +1265,7 @@ class ToSqlalchemy(Importer): """ caches_target = True - "" # nb. suppress sphinx docs + "" # nb. suppress sphinx docs def get_target_object(self, key): """ diff --git a/src/wuttasync/importing/csv.py b/src/wuttasync/importing/csv.py index e1937b5..a5db421 100644 --- a/src/wuttasync/importing/csv.py +++ b/src/wuttasync/importing/csv.py @@ -61,7 +61,7 @@ class FromCsv(FromFile): :class:`python:csv.DictReader` instance. """ - csv_encoding = 'utf_8' + csv_encoding = "utf_8" """ Encoding used by the CSV input file. @@ -78,11 +78,11 @@ class FromCsv(FromFile): :meth:`~wuttasync.importing.base.Importer.get_model_title()` to obtain the model name. """ - if hasattr(self, 'input_file_name'): + if hasattr(self, "input_file_name"): return self.input_file_name model_title = self.get_model_title() - return f'{model_title}.csv' + return f"{model_title}.csv" def open_input_file(self): """ @@ -104,7 +104,7 @@ class FromCsv(FromFile): """ path = self.get_input_file_path() log.debug("opening input file: %s", path) - self.input_file = open(path, 'rt', encoding=self.csv_encoding) + self.input_file = open(path, "rt", encoding=self.csv_encoding) self.input_reader = csv.DictReader(self.input_file) # nb. importer may have all supported fields by default, so @@ -112,8 +112,7 @@ class FromCsv(FromFile): fields = self.get_fields() orientation = self.orientation.value log.debug(f"supported fields for {orientation}: %s", fields) - self.fields = [f for f in self.input_reader.fieldnames or [] - if f in fields] + self.fields = [f for f in self.input_reader.fieldnames or [] if f in fields] log.debug("fields present in source data: %s", self.fields) if not self.fields: self.input_file.close() @@ -188,7 +187,8 @@ class FromCsvToSqlalchemyHandlerMixin: This all happens within :meth:`define_importers()`. """ - source_key = 'csv' + + source_key = "csv" generic_source_title = "CSV" FromImporterBase = FromCsv @@ -237,15 +237,18 @@ class FromCsvToSqlalchemyHandlerMixin: # mostly try to make an importer for every data model for name in dir(model): cls = getattr(model, name) - if isinstance(cls, type) and issubclass(cls, model.Base) and cls is not model.Base: + if ( + isinstance(cls, type) + and issubclass(cls, model.Base) + and cls is not model.Base + ): importers[name] = self.make_importer_factory(cls, name) # sort importers according to schema topography topo_sortkey = make_topo_sortkey(model) - importers = OrderedDict([ - (name, importers[name]) - for name in sorted(importers, key=topo_sortkey) - ]) + importers = OrderedDict( + [(name, importers[name]) for name in sorted(importers, key=topo_sortkey)] + ) return importers @@ -269,11 +272,14 @@ class FromCsvToSqlalchemyHandlerMixin: :returns: The new class, meant to process import/export targeting the given data model. """ - return type(f'{name}Importer', - (FromCsvToSqlalchemyMixin, self.FromImporterBase, self.ToImporterBase), { - 'model_class': model_class, - 'key': list(get_primary_keys(model_class)), - }) + return type( + f"{name}Importer", + (FromCsvToSqlalchemyMixin, self.FromImporterBase, self.ToImporterBase), + { + "model_class": model_class, + "key": list(get_primary_keys(model_class)), + }, + ) class FromCsvToWutta(FromCsvToSqlalchemyHandlerMixin, FromFileHandler, ToWuttaHandler): @@ -283,6 +289,7 @@ class FromCsvToWutta(FromCsvToSqlalchemyHandlerMixin, FromFileHandler, ToWuttaHa This uses :class:`FromCsvToSqlalchemyHandlerMixin` for most of the heavy lifting. """ + ToImporterBase = ToWutta def get_target_model(self): diff --git a/src/wuttasync/importing/handlers.py b/src/wuttasync/importing/handlers.py index 03a6179..f9ba772 100644 --- a/src/wuttasync/importing/handlers.py +++ b/src/wuttasync/importing/handlers.py @@ -39,8 +39,9 @@ class Orientation(Enum): """ Enum values for :attr:`ImportHandler.orientation`. """ - IMPORT = 'import' - EXPORT = 'export' + + IMPORT = "import" + EXPORT = "export" class ImportHandler(GenericHandler): @@ -158,7 +159,7 @@ class ImportHandler(GenericHandler): * ``'importing'`` * ``'exporting'`` """ - return f'{self.orientation.value}ing' + return f"{self.orientation.value}ing" @classmethod def get_key(cls): @@ -174,7 +175,7 @@ class ImportHandler(GenericHandler): here; but only one will be configured as the "default" handler for that key. See also :meth:`get_spec()`. """ - return f'to_{cls.target_key}.from_{cls.source_key}.{cls.orientation.value}' + return f"to_{cls.target_key}.from_{cls.source_key}.{cls.orientation.value}" @classmethod def get_spec(cls): @@ -188,7 +189,7 @@ class ImportHandler(GenericHandler): See also :meth:`get_key()`. """ - return f'{cls.__module__}:{cls.__name__}' + return f"{cls.__module__}:{cls.__name__}" def get_title(self): """ @@ -210,9 +211,9 @@ class ImportHandler(GenericHandler): See also :meth:`get_title()` and :meth:`get_target_title()`. """ - if hasattr(self, 'source_title'): + if hasattr(self, "source_title"): return self.source_title - if hasattr(self, 'generic_source_title'): + if hasattr(self, "generic_source_title"): return self.generic_source_title return self.source_key @@ -222,9 +223,9 @@ class ImportHandler(GenericHandler): See also :meth:`get_title()` and :meth:`get_source_title()`. """ - if hasattr(self, 'target_title'): + if hasattr(self, "target_title"): return self.target_title - if hasattr(self, 'generic_target_title'): + if hasattr(self, "generic_target_title"): return self.generic_target_title return self.target_key @@ -269,7 +270,9 @@ class ImportHandler(GenericHandler): msg = "%s: added %d; updated %d; deleted %d %s records" if self.dry_run: msg += " (dry run)" - log.info(msg, self.get_title(), len(created), len(updated), len(deleted), key) + log.info( + msg, self.get_title(), len(created), len(updated), len(deleted), key + ) except: # TODO: what should happen here? @@ -308,8 +311,8 @@ class ImportHandler(GenericHandler): :returns: Dict of kwargs, "post-consumption." """ - if 'dry_run' in kwargs: - self.dry_run = kwargs['dry_run'] + if "dry_run" in kwargs: + self.dry_run = kwargs["dry_run"] return kwargs @@ -485,11 +488,11 @@ class ImportHandler(GenericHandler): raise KeyError(f"unknown {orientation} key: {key}") kwargs = self.get_importer_kwargs(key, **kwargs) - kwargs['handler'] = self + kwargs["handler"] = self # nb. default logic should (normally) determine keys - if 'keys' in kwargs and not kwargs['keys']: - del kwargs['keys'] + if "keys" in kwargs and not kwargs["keys"]: + del kwargs["keys"] factory = self.importers[key] return factory(self.config, **kwargs) @@ -524,12 +527,12 @@ class FromFileHandler(ImportHandler): # interpret file vs. folder path # nb. this assumes FromFile importer/exporter - path = kwargs.pop('input_file_path', None) + path = kwargs.pop("input_file_path", None) if path: - if not kwargs.get('input_file_dir') and os.path.isdir(path): - kwargs['input_file_dir'] = path + if not kwargs.get("input_file_dir") and os.path.isdir(path): + kwargs["input_file_dir"] = path else: - kwargs['input_file_path'] = path + kwargs["input_file_path"] = path # and carry on super().process_data(*keys, **kwargs) @@ -586,5 +589,5 @@ class ToSqlalchemyHandler(ImportHandler): def get_importer_kwargs(self, key, **kwargs): """ """ kwargs = super().get_importer_kwargs(key, **kwargs) - kwargs.setdefault('target_session', self.target_session) + kwargs.setdefault("target_session", self.target_session) return kwargs diff --git a/src/wuttasync/importing/wutta.py b/src/wuttasync/importing/wutta.py index f88f76b..18d4145 100644 --- a/src/wuttasync/importing/wutta.py +++ b/src/wuttasync/importing/wutta.py @@ -33,15 +33,15 @@ class ToWuttaHandler(ToSqlalchemyHandler): database`). """ - target_key = 'wutta' - "" # nb. suppress docs + target_key = "wutta" + "" # nb. suppress docs def get_target_title(self): """ """ # nb. we override parent to use app title as default - if hasattr(self, 'target_title'): + if hasattr(self, "target_title"): return self.target_title - if hasattr(self, 'generic_target_title'): + if hasattr(self, "generic_target_title"): return self.generic_target_title return self.app.get_title() diff --git a/tasks.py b/tasks.py index 78a4ece..56a7e1d 100644 --- a/tasks.py +++ b/tasks.py @@ -15,10 +15,10 @@ def release(c, skip_tests=False): Release a new version of WuttaSync """ if not skip_tests: - c.run('pytest') + c.run("pytest") - if os.path.exists('dist'): - shutil.rmtree('dist') + if os.path.exists("dist"): + shutil.rmtree("dist") - c.run('python -m build --sdist') - c.run('twine upload dist/*') + c.run("python -m build --sdist") + c.run("twine upload dist/*") diff --git a/tests/cli/test_base.py b/tests/cli/test_base.py index 69af1b8..991358e 100644 --- a/tests/cli/test_base.py +++ b/tests/cli/test_base.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8; -*- +# -*- coding: utf-8; -*- import inspect from unittest import TestCase @@ -19,7 +19,7 @@ class TestImportCommandHandler(DataTestCase): handler = self.make_handler() self.assertIsNone(handler.import_handler) - FromCsvToWutta = self.app.load_object('wuttasync.importing.csv:FromCsvToWutta') + FromCsvToWutta = self.app.load_object("wuttasync.importing.csv:FromCsvToWutta") # as spec handler = self.make_handler(import_handler=FromCsvToWutta.get_spec()) @@ -35,26 +35,30 @@ class TestImportCommandHandler(DataTestCase): self.assertIs(handler.import_handler, myhandler) def test_run(self): - handler = self.make_handler(import_handler='wuttasync.importing.csv:FromCsvToWutta') + handler = self.make_handler( + import_handler="wuttasync.importing.csv:FromCsvToWutta" + ) - with patch.object(handler, 'list_models') as list_models: - handler.run({'list_models': True}) - list_models.assert_called_once_with({'list_models': True}) + with patch.object(handler, "list_models") as list_models: + handler.run({"list_models": True}) + list_models.assert_called_once_with({"list_models": True}) - with patch.object(handler, 'import_handler') as import_handler: - handler.run({'models': []}) + with patch.object(handler, "import_handler") as import_handler: + handler.run({"models": []}) import_handler.process_data.assert_called_once_with() def test_list_models(self): - handler = self.make_handler(import_handler='wuttasync.importing.csv:FromCsvToWutta') + handler = self.make_handler( + import_handler="wuttasync.importing.csv:FromCsvToWutta" + ) - with patch.object(mod, 'sys') as sys: + with patch.object(mod, "sys") as sys: handler.list_models({}) # just test a few random things we expect to see - self.assertTrue(sys.stdout.write.has_call('ALL MODELS:\n')) - self.assertTrue(sys.stdout.write.has_call('Person')) - self.assertTrue(sys.stdout.write.has_call('User')) - self.assertTrue(sys.stdout.write.has_call('Upgrade')) + self.assertTrue(sys.stdout.write.has_call("ALL MODELS:\n")) + self.assertTrue(sys.stdout.write.has_call("Person")) + self.assertTrue(sys.stdout.write.has_call("User")) + self.assertTrue(sys.stdout.write.has_call("Upgrade")) class TestImporterCommand(TestCase): @@ -64,12 +68,12 @@ class TestImporterCommand(TestCase): pass sig1 = inspect.signature(myfunc) - self.assertIn('kwargs', sig1.parameters) - self.assertNotIn('dry_run', sig1.parameters) + self.assertIn("kwargs", sig1.parameters) + self.assertNotIn("dry_run", sig1.parameters) wrapt = mod.import_command(myfunc) sig2 = inspect.signature(wrapt) - self.assertNotIn('kwargs', sig2.parameters) - self.assertIn('dry_run', sig2.parameters) + self.assertNotIn("kwargs", sig2.parameters) + self.assertIn("dry_run", sig2.parameters) class TestFileImporterCommand(TestCase): @@ -79,11 +83,11 @@ class TestFileImporterCommand(TestCase): pass sig1 = inspect.signature(myfunc) - self.assertIn('kwargs', sig1.parameters) - self.assertNotIn('dry_run', sig1.parameters) - self.assertNotIn('input_file_path', sig1.parameters) + self.assertIn("kwargs", sig1.parameters) + self.assertNotIn("dry_run", sig1.parameters) + self.assertNotIn("input_file_path", sig1.parameters) wrapt = mod.file_import_command(myfunc) sig2 = inspect.signature(wrapt) - self.assertNotIn('kwargs', sig2.parameters) - self.assertIn('dry_run', sig2.parameters) - self.assertIn('input_file_path', sig2.parameters) + self.assertNotIn("kwargs", sig2.parameters) + self.assertIn("dry_run", sig2.parameters) + self.assertIn("input_file_path", sig2.parameters) diff --git a/tests/cli/test_import_csv.py b/tests/cli/test_import_csv.py index f856947..5623176 100644 --- a/tests/cli/test_import_csv.py +++ b/tests/cli/test_import_csv.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8; -*- +# -*- coding: utf-8; -*- from unittest import TestCase from unittest.mock import MagicMock, patch @@ -6,14 +6,17 @@ from unittest.mock import MagicMock, patch from wuttasync.cli import import_csv as mod, ImportCommandHandler - class TestImportCsv(TestCase): def test_basic(self): - params = {'models': [], - 'create': True, 'update': True, 'delete': False, - 'dry_run': True} + params = { + "models": [], + "create": True, + "update": True, + "delete": False, + "dry_run": True, + } ctx = MagicMock(params=params) - with patch.object(ImportCommandHandler, 'run') as run: + with patch.object(ImportCommandHandler, "run") as run: mod.import_csv(ctx) run.assert_called_once_with(params) diff --git a/tests/importing/test_base.py b/tests/importing/test_base.py index feab115..2ec9164 100644 --- a/tests/importing/test_base.py +++ b/tests/importing/test_base.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8; -*- +# -*- coding: utf-8; -*- from unittest.mock import patch @@ -14,7 +14,7 @@ class TestImporter(DataTestCase): self.handler = ImportHandler(self.config) def make_importer(self, **kwargs): - kwargs.setdefault('handler', self.handler) + kwargs.setdefault("handler", self.handler) return mod.Importer(self.config, **kwargs) def test_constructor(self): @@ -24,11 +24,11 @@ class TestImporter(DataTestCase): imp = self.make_importer(model_class=model.Setting) # fields - self.assertEqual(imp.fields, ['name', 'value']) + self.assertEqual(imp.fields, ["name", "value"]) # orientation etc. self.assertEqual(imp.orientation, Orientation.IMPORT) - self.assertEqual(imp.actioning, 'importing') + self.assertEqual(imp.actioning, "importing") self.assertTrue(imp.create) self.assertTrue(imp.update) self.assertTrue(imp.delete) @@ -38,106 +38,111 @@ class TestImporter(DataTestCase): model = self.app.model # basic importer - imp = self.make_importer(model_class=model.Setting, fields='name') - self.assertEqual(imp.fields, ['name']) + imp = self.make_importer(model_class=model.Setting, fields="name") + self.assertEqual(imp.fields, ["name"]) def test_constructor_excluded_fields(self): model = self.app.model # basic importer - imp = self.make_importer(model_class=model.Setting, excluded_fields='value') - self.assertEqual(imp.fields, ['name']) + imp = self.make_importer(model_class=model.Setting, excluded_fields="value") + self.assertEqual(imp.fields, ["name"]) def test_get_model_title(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - self.assertEqual(imp.get_model_title(), 'Setting') + self.assertEqual(imp.get_model_title(), "Setting") imp.model_title = "SeTtInG" - self.assertEqual(imp.get_model_title(), 'SeTtInG') + self.assertEqual(imp.get_model_title(), "SeTtInG") def test_get_simple_fields(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - self.assertEqual(imp.get_simple_fields(), ['name', 'value']) - imp.simple_fields = ['name'] - self.assertEqual(imp.get_simple_fields(), ['name']) + self.assertEqual(imp.get_simple_fields(), ["name", "value"]) + imp.simple_fields = ["name"] + self.assertEqual(imp.get_simple_fields(), ["name"]) def test_get_supported_fields(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - self.assertEqual(imp.get_supported_fields(), ['name', 'value']) - imp.supported_fields = ['name'] - self.assertEqual(imp.get_supported_fields(), ['name']) + self.assertEqual(imp.get_supported_fields(), ["name", "value"]) + imp.supported_fields = ["name"] + self.assertEqual(imp.get_supported_fields(), ["name"]) def test_get_fields(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - self.assertEqual(imp.get_fields(), ['name', 'value']) - imp.fields = ['name'] - self.assertEqual(imp.get_fields(), ['name']) + self.assertEqual(imp.get_fields(), ["name", "value"]) + imp.fields = ["name"] + self.assertEqual(imp.get_fields(), ["name"]) def test_get_keys(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - self.assertEqual(imp.get_keys(), ['name']) - with patch.multiple(imp, create=True, key='value'): - self.assertEqual(imp.get_keys(), ['value']) - with patch.multiple(imp, create=True, keys=['foo', 'bar']): - self.assertEqual(imp.get_keys(), ['foo', 'bar']) + self.assertEqual(imp.get_keys(), ["name"]) + with patch.multiple(imp, create=True, key="value"): + self.assertEqual(imp.get_keys(), ["value"]) + with patch.multiple(imp, create=True, keys=["foo", "bar"]): + self.assertEqual(imp.get_keys(), ["foo", "bar"]) def test_process_data(self): model = self.app.model - imp = self.make_importer(model_class=model.Setting, caches_target=True, - delete=True) + imp = self.make_importer( + model_class=model.Setting, caches_target=True, delete=True + ) def make_cache(): - setting1 = model.Setting(name='foo1', value='bar1') - setting2 = model.Setting(name='foo2', value='bar2') - setting3 = model.Setting(name='foo3', value='bar3') + setting1 = model.Setting(name="foo1", value="bar1") + setting2 = model.Setting(name="foo2", value="bar2") + setting3 = model.Setting(name="foo3", value="bar3") cache = { - ('foo1',): { - 'object': setting1, - 'data': {'name': 'foo1', 'value': 'bar1'}, + ("foo1",): { + "object": setting1, + "data": {"name": "foo1", "value": "bar1"}, }, - ('foo2',): { - 'object': setting2, - 'data': {'name': 'foo2', 'value': 'bar2'}, + ("foo2",): { + "object": setting2, + "data": {"name": "foo2", "value": "bar2"}, }, - ('foo3',): { - 'object': setting3, - 'data': {'name': 'foo3', 'value': 'bar3'}, + ("foo3",): { + "object": setting3, + "data": {"name": "foo3", "value": "bar3"}, }, } return cache # nb. delete always succeeds - with patch.object(imp, 'delete_target_object', return_value=True): + with patch.object(imp, "delete_target_object", return_value=True): # create + update + delete all as needed - with patch.object(imp, 'get_target_cache', return_value=make_cache()): - created, updated, deleted = imp.process_data([ - {'name': 'foo3', 'value': 'BAR3'}, - {'name': 'foo4', 'value': 'BAR4'}, - {'name': 'foo5', 'value': 'BAR5'}, - ]) + with patch.object(imp, "get_target_cache", return_value=make_cache()): + created, updated, deleted = imp.process_data( + [ + {"name": "foo3", "value": "BAR3"}, + {"name": "foo4", "value": "BAR4"}, + {"name": "foo5", "value": "BAR5"}, + ] + ) self.assertEqual(len(created), 2) self.assertEqual(len(updated), 1) self.assertEqual(len(deleted), 2) # same but with --max-total so delete gets skipped - with patch.object(imp, 'get_target_cache', return_value=make_cache()): - with patch.object(imp, 'max_total', new=3): - created, updated, deleted = imp.process_data([ - {'name': 'foo3', 'value': 'BAR3'}, - {'name': 'foo4', 'value': 'BAR4'}, - {'name': 'foo5', 'value': 'BAR5'}, - ]) + with patch.object(imp, "get_target_cache", return_value=make_cache()): + with patch.object(imp, "max_total", new=3): + created, updated, deleted = imp.process_data( + [ + {"name": "foo3", "value": "BAR3"}, + {"name": "foo4", "value": "BAR4"}, + {"name": "foo5", "value": "BAR5"}, + ] + ) self.assertEqual(len(created), 2) self.assertEqual(len(updated), 1) self.assertEqual(len(deleted), 0) # delete all if source data empty - with patch.object(imp, 'get_target_cache', return_value=make_cache()): + with patch.object(imp, "get_target_cache", return_value=make_cache()): created, updated, deleted = imp.process_data() self.assertEqual(len(created), 0) self.assertEqual(len(updated), 0) @@ -148,120 +153,140 @@ class TestImporter(DataTestCase): imp = self.make_importer(model_class=model.Setting, caches_target=True) def make_cache(): - setting1 = model.Setting(name='foo1', value='bar1') - setting2 = model.Setting(name='foo2', value='bar2') + setting1 = model.Setting(name="foo1", value="bar1") + setting2 = model.Setting(name="foo2", value="bar2") cache = { - ('foo1',): { - 'object': setting1, - 'data': {'name': 'foo1', 'value': 'bar1'}, + ("foo1",): { + "object": setting1, + "data": {"name": "foo1", "value": "bar1"}, }, - ('foo2',): { - 'object': setting2, - 'data': {'name': 'foo2', 'value': 'bar2'}, + ("foo2",): { + "object": setting2, + "data": {"name": "foo2", "value": "bar2"}, }, } return cache # change nothing if data matches with patch.multiple(imp, create=True, cached_target=make_cache()): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'bar1'}, - {'name': 'foo2', 'value': 'bar2'}, - ]) + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "bar1"}, + {"name": "foo2", "value": "bar2"}, + ] + ) self.assertEqual(len(created), 0) self.assertEqual(len(updated), 0) # update all as needed with patch.multiple(imp, create=True, cached_target=make_cache()): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'BAR1'}, - {'name': 'foo2', 'value': 'BAR2'}, - ]) + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "BAR1"}, + {"name": "foo2", "value": "BAR2"}, + ] + ) self.assertEqual(len(created), 0) self.assertEqual(len(updated), 2) # update all, with --max-update with patch.multiple(imp, create=True, cached_target=make_cache(), max_update=1): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'BAR1'}, - {'name': 'foo2', 'value': 'BAR2'}, - ]) + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "BAR1"}, + {"name": "foo2", "value": "BAR2"}, + ] + ) self.assertEqual(len(created), 0) self.assertEqual(len(updated), 1) # update all, with --max-total with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'BAR1'}, - {'name': 'foo2', 'value': 'BAR2'}, - ]) + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "BAR1"}, + {"name": "foo2", "value": "BAR2"}, + ] + ) self.assertEqual(len(created), 0) self.assertEqual(len(updated), 1) # create all as needed with patch.multiple(imp, create=True, cached_target=make_cache()): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'bar1'}, - {'name': 'foo2', 'value': 'bar2'}, - {'name': 'foo3', 'value': 'BAR3'}, - {'name': 'foo4', 'value': 'BAR4'}, - ]) + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "bar1"}, + {"name": "foo2", "value": "bar2"}, + {"name": "foo3", "value": "BAR3"}, + {"name": "foo4", "value": "BAR4"}, + ] + ) self.assertEqual(len(created), 2) self.assertEqual(len(updated), 0) # what happens when create gets skipped with patch.multiple(imp, create=True, cached_target=make_cache()): - with patch.object(imp, 'create_target_object', return_value=None): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'bar1'}, - {'name': 'foo2', 'value': 'bar2'}, - {'name': 'foo3', 'value': 'BAR3'}, - {'name': 'foo4', 'value': 'BAR4'}, - ]) + with patch.object(imp, "create_target_object", return_value=None): + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "bar1"}, + {"name": "foo2", "value": "bar2"}, + {"name": "foo3", "value": "BAR3"}, + {"name": "foo4", "value": "BAR4"}, + ] + ) self.assertEqual(len(created), 0) self.assertEqual(len(updated), 0) # create all, with --max-create with patch.multiple(imp, create=True, cached_target=make_cache(), max_create=1): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'bar1'}, - {'name': 'foo2', 'value': 'bar2'}, - {'name': 'foo3', 'value': 'BAR3'}, - {'name': 'foo4', 'value': 'BAR4'}, - ]) + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "bar1"}, + {"name": "foo2", "value": "bar2"}, + {"name": "foo3", "value": "BAR3"}, + {"name": "foo4", "value": "BAR4"}, + ] + ) self.assertEqual(len(created), 1) self.assertEqual(len(updated), 0) # create all, with --max-total with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'bar1'}, - {'name': 'foo2', 'value': 'bar2'}, - {'name': 'foo3', 'value': 'BAR3'}, - {'name': 'foo4', 'value': 'BAR4'}, - ]) + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "bar1"}, + {"name": "foo2", "value": "bar2"}, + {"name": "foo3", "value": "BAR3"}, + {"name": "foo4", "value": "BAR4"}, + ] + ) self.assertEqual(len(created), 1) self.assertEqual(len(updated), 0) # create + update all as needed with patch.multiple(imp, create=True, cached_target=make_cache()): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'BAR1'}, - {'name': 'foo2', 'value': 'BAR2'}, - {'name': 'foo3', 'value': 'BAR3'}, - {'name': 'foo4', 'value': 'BAR4'}, - ]) + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "BAR1"}, + {"name": "foo2", "value": "BAR2"}, + {"name": "foo3", "value": "BAR3"}, + {"name": "foo4", "value": "BAR4"}, + ] + ) self.assertEqual(len(created), 2) self.assertEqual(len(updated), 2) # create + update all, with --max-total with patch.multiple(imp, create=True, cached_target=make_cache(), max_total=1): - created, updated = imp.do_create_update([ - {'name': 'foo1', 'value': 'BAR1'}, - {'name': 'foo2', 'value': 'BAR2'}, - {'name': 'foo3', 'value': 'BAR3'}, - {'name': 'foo4', 'value': 'BAR4'}, - ]) + created, updated = imp.do_create_update( + [ + {"name": "foo1", "value": "BAR1"}, + {"name": "foo2", "value": "BAR2"}, + {"name": "foo3", "value": "BAR3"}, + {"name": "foo4", "value": "BAR4"}, + ] + ) # nb. foo1 is updated first self.assertEqual(len(created), 0) self.assertEqual(len(updated), 1) @@ -270,21 +295,21 @@ class TestImporter(DataTestCase): model = self.app.model # this requires a mock target cache - setting1 = model.Setting(name='foo1', value='bar1') - setting2 = model.Setting(name='foo2', value='bar2') + setting1 = model.Setting(name="foo1", value="bar1") + setting2 = model.Setting(name="foo2", value="bar2") imp = self.make_importer(model_class=model.Setting, caches_target=True) cache = { - ('foo1',): { - 'object': setting1, - 'data': {'name': 'foo1', 'value': 'bar1'}, + ("foo1",): { + "object": setting1, + "data": {"name": "foo1", "value": "bar1"}, }, - ('foo2',): { - 'object': setting2, - 'data': {'name': 'foo2', 'value': 'bar2'}, + ("foo2",): { + "object": setting2, + "data": {"name": "foo2", "value": "bar2"}, }, } - with patch.object(imp, 'delete_target_object') as delete_target_object: + with patch.object(imp, "delete_target_object") as delete_target_object: # delete nothing if source has same keys with patch.multiple(imp, create=True, cached_target=dict(cache)): @@ -305,7 +330,7 @@ class TestImporter(DataTestCase): delete_target_object.reset_mock() with patch.multiple(imp, create=True, cached_target=dict(cache)): source_keys = set() - with patch.object(imp, 'max_delete', new=1): + with patch.object(imp, "max_delete", new=1): result = imp.do_delete(source_keys) self.assertEqual(delete_target_object.call_count, 1) self.assertEqual(len(result), 1) @@ -314,7 +339,7 @@ class TestImporter(DataTestCase): delete_target_object.reset_mock() with patch.multiple(imp, create=True, cached_target=dict(cache)): source_keys = set() - with patch.object(imp, 'max_total', new=1): + with patch.object(imp, "max_total", new=1): result = imp.do_delete(source_keys) self.assertEqual(delete_target_object.call_count, 1) self.assertEqual(len(result), 1) @@ -322,25 +347,25 @@ class TestImporter(DataTestCase): def test_get_record_key(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - record = {'name': 'foo', 'value': 'bar'} - self.assertEqual(imp.get_record_key(record), ('foo',)) - imp.key = ('name', 'value') - self.assertEqual(imp.get_record_key(record), ('foo', 'bar')) + record = {"name": "foo", "value": "bar"} + self.assertEqual(imp.get_record_key(record), ("foo",)) + imp.key = ("name", "value") + self.assertEqual(imp.get_record_key(record), ("foo", "bar")) def test_data_diffs(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) # 2 identical records - rec1 = {'name': 'foo', 'value': 'bar'} - rec2 = {'name': 'foo', 'value': 'bar'} + rec1 = {"name": "foo", "value": "bar"} + rec2 = {"name": "foo", "value": "bar"} result = imp.data_diffs(rec1, rec2) self.assertEqual(result, []) # now they're different - rec2['value'] = 'baz' + rec2["value"] = "baz" result = imp.data_diffs(rec1, rec2) - self.assertEqual(result, ['value']) + self.assertEqual(result, ["value"]) def test_normalize_source_data(self): model = self.app.model @@ -351,7 +376,7 @@ class TestImporter(DataTestCase): self.assertEqual(data, []) # now with 1 record - setting = model.Setting(name='foo', value='bar') + setting = model.Setting(name="foo", value="bar") data = imp.normalize_source_data(source_objects=[setting]) self.assertEqual(len(data), 1) # nb. default normalizer returns object as-is @@ -361,17 +386,17 @@ class TestImporter(DataTestCase): model = self.app.model imp = self.make_importer(model_class=model.Setting) - setting1 = model.Setting(name='foo', value='bar1') - setting2 = model.Setting(name='foo', value='bar2') + setting1 = model.Setting(name="foo", value="bar1") + setting2 = model.Setting(name="foo", value="bar2") result = imp.get_unique_data([setting2, setting1]) self.assertIsInstance(result, tuple) self.assertEqual(len(result), 2) self.assertIsInstance(result[0], list) self.assertEqual(len(result[0]), 1) - self.assertIs(result[0][0], setting2) # nb. not setting1 + self.assertIs(result[0][0], setting2) # nb. not setting1 self.assertIsInstance(result[1], set) - self.assertEqual(result[1], {('foo',)}) + self.assertEqual(result[1], {("foo",)}) def test_get_source_objects(self): model = self.app.model @@ -397,7 +422,7 @@ class TestImporter(DataTestCase): model = self.app.model imp = self.make_importer(model_class=model.Setting) - with patch.object(imp, 'get_target_objects') as get_target_objects: + with patch.object(imp, "get_target_objects") as get_target_objects: get_target_objects.return_value = [] # empty cache @@ -405,16 +430,16 @@ class TestImporter(DataTestCase): self.assertEqual(cache, {}) # cache w/ one record - setting = model.Setting(name='foo', value='bar') + setting = model.Setting(name="foo", value="bar") get_target_objects.return_value = [setting] cache = imp.get_target_cache() self.assertEqual(len(cache), 1) - self.assertIn(('foo',), cache) - foo = cache[('foo',)] + self.assertIn(("foo",), cache) + foo = cache[("foo",)] self.assertEqual(len(foo), 2) - self.assertEqual(set(foo), {'object', 'data'}) - self.assertIs(foo['object'], setting) - self.assertEqual(foo['data'], {'name': 'foo', 'value': 'bar'}) + self.assertEqual(set(foo), {"object", "data"}) + self.assertIs(foo["object"], setting) + self.assertEqual(foo["data"], {"name": "foo", "value": "bar"}) def test_get_target_objects(self): model = self.app.model @@ -423,36 +448,36 @@ class TestImporter(DataTestCase): def test_get_target_object(self): model = self.app.model - setting = model.Setting(name='foo', value='bar') + setting = model.Setting(name="foo", value="bar") # nb. must mock up a target cache for this one imp = self.make_importer(model_class=model.Setting, caches_target=True) imp.cached_target = { - ('foo',): { - 'object': setting, - 'data': {'name': 'foo', 'value': 'bar'}, + ("foo",): { + "object": setting, + "data": {"name": "foo", "value": "bar"}, }, } # returns same object - result = imp.get_target_object(('foo',)) + result = imp.get_target_object(("foo",)) self.assertIs(result, setting) # and one more time just for kicks - result = imp.get_target_object(('foo',)) + result = imp.get_target_object(("foo",)) self.assertIs(result, setting) # but then not if cache flag is off imp.caches_target = False - result = imp.get_target_object(('foo',)) + result = imp.get_target_object(("foo",)) self.assertIsNone(result) def test_normalize_target_object(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - setting = model.Setting(name='foo', value='bar') + setting = model.Setting(name="foo", value="bar") data = imp.normalize_target_object(setting) - self.assertEqual(data, {'name': 'foo', 'value': 'bar'}) + self.assertEqual(data, {"name": "foo", "value": "bar"}) def test_get_deletable_keys(self): model = self.app.model @@ -463,11 +488,11 @@ class TestImporter(DataTestCase): self.assertIsInstance(result, set) self.assertEqual(result, set()) - setting = model.Setting(name='foo', value='bar') + setting = model.Setting(name="foo", value="bar") cache = { - ('foo',): { - 'object': setting, - 'data': {'name': 'foo', 'value': 'bar'}, + ("foo",): { + "object": setting, + "data": {"name": "foo", "value": "bar"}, }, } @@ -475,10 +500,10 @@ class TestImporter(DataTestCase): # all are deletable by default result = imp.get_deletable_keys() - self.assertEqual(result, {('foo',)}) + self.assertEqual(result, {("foo",)}) # but some maybe can't be deleted - with patch.object(imp, 'can_delete_object', return_value=False): + with patch.object(imp, "can_delete_object", return_value=False): result = imp.get_deletable_keys() self.assertEqual(result, set()) @@ -487,22 +512,23 @@ class TestImporter(DataTestCase): imp = self.make_importer(model_class=model.Setting) # basic - setting = imp.create_target_object(('foo',), {'name': 'foo', 'value': 'bar'}) + setting = imp.create_target_object(("foo",), {"name": "foo", "value": "bar"}) self.assertIsInstance(setting, model.Setting) - self.assertEqual(setting.name, 'foo') - self.assertEqual(setting.value, 'bar') + self.assertEqual(setting.name, "foo") + self.assertEqual(setting.value, "bar") # will skip if magic delete flag is set - setting = imp.create_target_object(('foo',), {'name': 'foo', 'value': 'bar', - '__ignoreme__': True}) + setting = imp.create_target_object( + ("foo",), {"name": "foo", "value": "bar", "__ignoreme__": True} + ) self.assertIsNone(setting) def test_make_empty_object(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - obj = imp.make_empty_object(('foo',)) + obj = imp.make_empty_object(("foo",)) self.assertIsInstance(obj, model.Setting) - self.assertEqual(obj.name, 'foo') + self.assertEqual(obj.name, "foo") def test_make_object(self): model = self.app.model @@ -513,23 +539,23 @@ class TestImporter(DataTestCase): def test_update_target_object(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - setting = model.Setting(name='foo') + setting = model.Setting(name="foo") # basic logic for updating *new* object - obj = imp.update_target_object(setting, {'name': 'foo', 'value': 'bar'}) + obj = imp.update_target_object(setting, {"name": "foo", "value": "bar"}) self.assertIs(obj, setting) - self.assertEqual(setting.value, 'bar') + self.assertEqual(setting.value, "bar") def test_can_delete_object(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - setting = model.Setting(name='foo') + setting = model.Setting(name="foo") self.assertTrue(imp.can_delete_object(setting)) def test_delete_target_object(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - setting = model.Setting(name='foo') + setting = model.Setting(name="foo") # nb. default implementation always returns false self.assertFalse(imp.delete_target_object(setting)) @@ -541,20 +567,20 @@ class TestFromFile(DataTestCase): self.handler = ImportHandler(self.config) def make_importer(self, **kwargs): - kwargs.setdefault('handler', self.handler) + kwargs.setdefault("handler", self.handler) return mod.FromFile(self.config, **kwargs) def test_setup(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - with patch.object(imp, 'open_input_file') as open_input_file: + with patch.object(imp, "open_input_file") as open_input_file: imp.setup() open_input_file.assert_called_once_with() def test_teardown(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) - with patch.object(imp, 'close_input_file') as close_input_file: + with patch.object(imp, "close_input_file") as close_input_file: imp.teardown() close_input_file.assert_called_once_with() @@ -563,13 +589,13 @@ class TestFromFile(DataTestCase): imp = self.make_importer(model_class=model.Setting) # path is guessed from dir+filename - path = self.write_file('data.txt', '') + path = self.write_file("data.txt", "") imp.input_file_dir = self.tempdir - imp.input_file_name = 'data.txt' + imp.input_file_name = "data.txt" self.assertEqual(imp.get_input_file_path(), path) # path can be explicitly set - path2 = self.write_file('data2.txt', '') + path2 = self.write_file("data2.txt", "") imp.input_file_path = path2 self.assertEqual(imp.get_input_file_path(), path2) @@ -592,8 +618,8 @@ class TestFromFile(DataTestCase): self.assertRaises(NotImplementedError, imp.get_input_file_name) # name can be explicitly set - imp.input_file_name = 'data.txt' - self.assertEqual(imp.get_input_file_name(), 'data.txt') + imp.input_file_name = "data.txt" + self.assertEqual(imp.get_input_file_name(), "data.txt") def test_open_input_file(self): model = self.app.model @@ -604,10 +630,10 @@ class TestFromFile(DataTestCase): model = self.app.model imp = self.make_importer(model_class=model.Setting) - path = self.write_file('data.txt', '') - with open(path, 'rt') as f: + path = self.write_file("data.txt", "") + with open(path, "rt") as f: imp.input_file = f - with patch.object(f, 'close') as close: + with patch.object(f, "close") as close: imp.close_input_file() close.assert_called_once_with() @@ -619,16 +645,16 @@ class TestToSqlalchemy(DataTestCase): self.handler = ImportHandler(self.config) def make_importer(self, **kwargs): - kwargs.setdefault('handler', self.handler) + kwargs.setdefault("handler", self.handler) return mod.ToSqlalchemy(self.config, **kwargs) def test_get_target_objects(self): model = self.app.model imp = self.make_importer(model_class=model.Setting, target_session=self.session) - setting1 = model.Setting(name='foo', value='bar') + setting1 = model.Setting(name="foo", value="bar") self.session.add(setting1) - setting2 = model.Setting(name='foo2', value='bar2') + setting2 = model.Setting(name="foo2", value="bar2") self.session.add(setting2) self.session.commit() @@ -638,60 +664,60 @@ class TestToSqlalchemy(DataTestCase): def test_get_target_object(self): model = self.app.model - setting = model.Setting(name='foo', value='bar') + setting = model.Setting(name="foo", value="bar") # nb. must mock up a target cache for this one imp = self.make_importer(model_class=model.Setting, caches_target=True) imp.cached_target = { - ('foo',): { - 'object': setting, - 'data': {'name': 'foo', 'value': 'bar'}, + ("foo",): { + "object": setting, + "data": {"name": "foo", "value": "bar"}, }, } # returns same object - result = imp.get_target_object(('foo',)) + result = imp.get_target_object(("foo",)) self.assertIs(result, setting) # and one more time just for kicks - result = imp.get_target_object(('foo',)) + result = imp.get_target_object(("foo",)) self.assertIs(result, setting) # now let's put a 2nd setting in the db - setting2 = model.Setting(name='foo2', value='bar2') + setting2 = model.Setting(name="foo2", value="bar2") self.session.add(setting2) self.session.commit() # nb. disable target cache - with patch.multiple(imp, create=True, - target_session=self.session, - caches_target=False): + with patch.multiple( + imp, create=True, target_session=self.session, caches_target=False + ): # now we should be able to fetch that via query - result = imp.get_target_object(('foo2',)) + result = imp.get_target_object(("foo2",)) self.assertIsInstance(result, model.Setting) self.assertIs(result, setting2) # but sometimes it will not be found - result = imp.get_target_object(('foo3',)) + result = imp.get_target_object(("foo3",)) self.assertIsNone(result) def test_create_target_object(self): model = self.app.model imp = self.make_importer(model_class=model.Setting, target_session=self.session) - setting = model.Setting(name='foo', value='bar') + setting = model.Setting(name="foo", value="bar") # new object is added to session - setting = imp.create_target_object(('foo',), {'name': 'foo', 'value': 'bar'}) + setting = imp.create_target_object(("foo",), {"name": "foo", "value": "bar"}) self.assertIsInstance(setting, model.Setting) - self.assertEqual(setting.name, 'foo') - self.assertEqual(setting.value, 'bar') + self.assertEqual(setting.name, "foo") + self.assertEqual(setting.value, "bar") self.assertIn(setting, self.session) def test_delete_target_object(self): model = self.app.model - setting = model.Setting(name='foo', value='bar') + setting = model.Setting(name="foo", value="bar") self.session.add(setting) self.assertEqual(self.session.query(model.Setting).count(), 1) diff --git a/tests/importing/test_csv.py b/tests/importing/test_csv.py index dc65e54..acd5f8e 100644 --- a/tests/importing/test_csv.py +++ b/tests/importing/test_csv.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8; -*- +# -*- coding: utf-8; -*- import csv import uuid as _uuid @@ -6,7 +6,12 @@ from unittest.mock import patch from wuttjamaican.testing import DataTestCase -from wuttasync.importing import csv as mod, ImportHandler, ToSqlalchemyHandler, ToSqlalchemy +from wuttasync.importing import ( + csv as mod, + ImportHandler, + ToSqlalchemyHandler, + ToSqlalchemy, +) class TestFromCsv(DataTestCase): @@ -15,14 +20,17 @@ class TestFromCsv(DataTestCase): self.setup_db() self.handler = ImportHandler(self.config) - self.data_path = self.write_file('data.txt', """\ + self.data_path = self.write_file( + "data.txt", + """\ name,value foo,bar foo2,bar2 -""") +""", + ) def make_importer(self, **kwargs): - kwargs.setdefault('handler', self.handler) + kwargs.setdefault("handler", self.handler) return mod.FromCsv(self.config, **kwargs) def test_get_input_file_name(self): @@ -30,39 +38,41 @@ foo2,bar2 imp = self.make_importer(model_class=model.Setting) # name can be guessed - self.assertEqual(imp.get_input_file_name(), 'Setting.csv') + self.assertEqual(imp.get_input_file_name(), "Setting.csv") # name can be explicitly set - imp.input_file_name = 'data.txt' - self.assertEqual(imp.get_input_file_name(), 'data.txt') + imp.input_file_name = "data.txt" + self.assertEqual(imp.get_input_file_name(), "data.txt") def test_open_input_file(self): model = self.app.model imp = self.make_importer(model_class=model.Setting) # normal operation, input file includes all fields - imp = self.make_importer(model_class=model.Setting, input_file_path=self.data_path) - self.assertEqual(imp.fields, ['name', 'value']) + imp = self.make_importer( + model_class=model.Setting, input_file_path=self.data_path + ) + self.assertEqual(imp.fields, ["name", "value"]) imp.open_input_file() self.assertEqual(imp.input_file.name, self.data_path) self.assertIsInstance(imp.input_reader, csv.DictReader) - self.assertEqual(imp.fields, ['name', 'value']) + self.assertEqual(imp.fields, ["name", "value"]) imp.input_file.close() # this file is missing a field, plus we'll pretend more are # supported - but should wind up with just the one field - missing = self.write_file('missing.txt', 'name') + missing = self.write_file("missing.txt", "name") imp = self.make_importer(model_class=model.Setting, input_file_path=missing) - imp.fields.extend(['lots', 'more']) - self.assertEqual(imp.fields, ['name', 'value', 'lots', 'more']) + imp.fields.extend(["lots", "more"]) + self.assertEqual(imp.fields, ["name", "value", "lots", "more"]) imp.open_input_file() - self.assertEqual(imp.fields, ['name']) + self.assertEqual(imp.fields, ["name"]) imp.input_file.close() # and what happens when no known fields are found - bogus = self.write_file('bogus.txt', 'blarg') + bogus = self.write_file("bogus.txt", "blarg") imp = self.make_importer(model_class=model.Setting, input_file_path=bogus) - self.assertEqual(imp.fields, ['name', 'value']) + self.assertEqual(imp.fields, ["name", "value"]) self.assertRaises(ValueError, imp.open_input_file) def test_close_input_file(self): @@ -72,8 +82,8 @@ foo2,bar2 imp.input_file_path = self.data_path imp.open_input_file() imp.close_input_file() - self.assertFalse(hasattr(imp, 'input_reader')) - self.assertFalse(hasattr(imp, 'input_file')) + self.assertFalse(hasattr(imp, "input_reader")) + self.assertFalse(hasattr(imp, "input_file")) def test_get_source_objects(self): model = self.app.model @@ -84,8 +94,8 @@ foo2,bar2 objects = imp.get_source_objects() imp.close_input_file() self.assertEqual(len(objects), 2) - self.assertEqual(objects[0], {'name': 'foo', 'value': 'bar'}) - self.assertEqual(objects[1], {'name': 'foo2', 'value': 'bar2'}) + self.assertEqual(objects[0], {"name": "foo", "value": "bar"}) + self.assertEqual(objects[1], {"name": "foo2", "value": "bar2"}) class MockMixinImporter(mod.FromCsvToSqlalchemyMixin, mod.FromCsv, ToSqlalchemy): @@ -99,7 +109,7 @@ class TestFromCsvToSqlalchemyMixin(DataTestCase): self.handler = ImportHandler(self.config) def make_importer(self, **kwargs): - kwargs.setdefault('handler', self.handler) + kwargs.setdefault("handler", self.handler) return MockMixinImporter(self.config, **kwargs) def test_constructor(self): @@ -112,31 +122,50 @@ class TestFromCsvToSqlalchemyMixin(DataTestCase): # typical # nb. as of now Upgrade is the only table using proper UUID imp = self.make_importer(model_class=model.Upgrade) - self.assertEqual(imp.uuid_keys, ['uuid']) + self.assertEqual(imp.uuid_keys, ["uuid"]) def test_normalize_source_object(self): model = self.app.model # no uuid keys imp = self.make_importer(model_class=model.Setting) - result = imp.normalize_source_object({'name': 'foo', 'value': 'bar'}) - self.assertEqual(result, {'name': 'foo', 'value': 'bar'}) + result = imp.normalize_source_object({"name": "foo", "value": "bar"}) + self.assertEqual(result, {"name": "foo", "value": "bar"}) # source has proper UUID # nb. as of now Upgrade is the only table using proper UUID - imp = self.make_importer(model_class=model.Upgrade, fields=['uuid', 'description']) - result = imp.normalize_source_object({'uuid': _uuid.UUID('06753693-d892-77f0-8000-ce71bf7ebbba'), - 'description': 'testing'}) - self.assertEqual(result, {'uuid': _uuid.UUID('06753693-d892-77f0-8000-ce71bf7ebbba'), - 'description': 'testing'}) + imp = self.make_importer( + model_class=model.Upgrade, fields=["uuid", "description"] + ) + result = imp.normalize_source_object( + { + "uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"), + "description": "testing", + } + ) + self.assertEqual( + result, + { + "uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"), + "description": "testing", + }, + ) # source has string uuid # nb. as of now Upgrade is the only table using proper UUID - imp = self.make_importer(model_class=model.Upgrade, fields=['uuid', 'description']) - result = imp.normalize_source_object({'uuid': '06753693d89277f08000ce71bf7ebbba', - 'description': 'testing'}) - self.assertEqual(result, {'uuid': _uuid.UUID('06753693-d892-77f0-8000-ce71bf7ebbba'), - 'description': 'testing'}) + imp = self.make_importer( + model_class=model.Upgrade, fields=["uuid", "description"] + ) + result = imp.normalize_source_object( + {"uuid": "06753693d89277f08000ce71bf7ebbba", "description": "testing"} + ) + self.assertEqual( + result, + { + "uuid": _uuid.UUID("06753693-d892-77f0-8000-ce71bf7ebbba"), + "description": "testing", + }, + ) class MockMixinHandler(mod.FromCsvToSqlalchemyHandlerMixin, ToSqlalchemyHandler): @@ -149,27 +178,33 @@ class TestFromCsvToSqlalchemyHandlerMixin(DataTestCase): return MockMixinHandler(self.config, **kwargs) def test_get_target_model(self): - with patch.object(mod.FromCsvToSqlalchemyHandlerMixin, 'define_importers', return_value={}): + with patch.object( + mod.FromCsvToSqlalchemyHandlerMixin, "define_importers", return_value={} + ): handler = self.make_handler() self.assertRaises(NotImplementedError, handler.get_target_model) def test_define_importers(self): model = self.app.model - with patch.object(mod.FromCsvToSqlalchemyHandlerMixin, 'get_target_model', return_value=model): + with patch.object( + mod.FromCsvToSqlalchemyHandlerMixin, "get_target_model", return_value=model + ): handler = self.make_handler() importers = handler.define_importers() - self.assertIn('Setting', importers) - self.assertTrue(issubclass(importers['Setting'], mod.FromCsv)) - self.assertTrue(issubclass(importers['Setting'], ToSqlalchemy)) - self.assertIn('User', importers) - self.assertIn('Person', importers) - self.assertIn('Role', importers) + self.assertIn("Setting", importers) + self.assertTrue(issubclass(importers["Setting"], mod.FromCsv)) + self.assertTrue(issubclass(importers["Setting"], ToSqlalchemy)) + self.assertIn("User", importers) + self.assertIn("Person", importers) + self.assertIn("Role", importers) def test_make_importer_factory(self): model = self.app.model - with patch.object(mod.FromCsvToSqlalchemyHandlerMixin, 'define_importers', return_value={}): + with patch.object( + mod.FromCsvToSqlalchemyHandlerMixin, "define_importers", return_value={} + ): handler = self.make_handler() - factory = handler.make_importer_factory(model.Setting, 'Setting') + factory = handler.make_importer_factory(model.Setting, "Setting") self.assertTrue(issubclass(factory, mod.FromCsv)) self.assertTrue(issubclass(factory, ToSqlalchemy)) diff --git a/tests/importing/test_handlers.py b/tests/importing/test_handlers.py index 3c2fe49..9bd0157 100644 --- a/tests/importing/test_handlers.py +++ b/tests/importing/test_handlers.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8; -*- +# -*- coding: utf-8; -*- from collections import OrderedDict from unittest.mock import patch @@ -17,34 +17,36 @@ class TestImportHandler(DataTestCase): handler = self.make_handler() self.assertEqual(str(handler), "None → None") - handler.source_title = 'CSV' - handler.target_title = 'Wutta' + handler.source_title = "CSV" + handler.target_title = "Wutta" self.assertEqual(str(handler), "CSV → Wutta") def test_actioning(self): handler = self.make_handler() - self.assertEqual(handler.actioning, 'importing') + self.assertEqual(handler.actioning, "importing") handler.orientation = mod.Orientation.EXPORT - self.assertEqual(handler.actioning, 'exporting') + self.assertEqual(handler.actioning, "exporting") def test_get_key(self): handler = self.make_handler() - self.assertEqual(handler.get_key(), 'to_None.from_None.import') + self.assertEqual(handler.get_key(), "to_None.from_None.import") - with patch.multiple(mod.ImportHandler, source_key='csv', target_key='wutta'): - self.assertEqual(handler.get_key(), 'to_wutta.from_csv.import') + with patch.multiple(mod.ImportHandler, source_key="csv", target_key="wutta"): + self.assertEqual(handler.get_key(), "to_wutta.from_csv.import") def test_get_spec(self): handler = self.make_handler() - self.assertEqual(handler.get_spec(), 'wuttasync.importing.handlers:ImportHandler') + self.assertEqual( + handler.get_spec(), "wuttasync.importing.handlers:ImportHandler" + ) def test_get_title(self): handler = self.make_handler() self.assertEqual(handler.get_title(), "None → None") - handler.source_title = 'CSV' - handler.target_title = 'Wutta' + handler.source_title = "CSV" + handler.target_title = "Wutta" self.assertEqual(handler.get_title(), "CSV → Wutta") def test_get_source_title(self): @@ -54,16 +56,16 @@ class TestImportHandler(DataTestCase): self.assertIsNone(handler.get_source_title()) # which is really using source_key as fallback - handler.source_key = 'csv' - self.assertEqual(handler.get_source_title(), 'csv') + handler.source_key = "csv" + self.assertEqual(handler.get_source_title(), "csv") # can also use (defined) generic fallback - handler.generic_source_title = 'CSV' - self.assertEqual(handler.get_source_title(), 'CSV') + handler.generic_source_title = "CSV" + self.assertEqual(handler.get_source_title(), "CSV") # or can set explicitly - handler.source_title = 'XXX' - self.assertEqual(handler.get_source_title(), 'XXX') + handler.source_title = "XXX" + self.assertEqual(handler.get_source_title(), "XXX") def test_get_target_title(self): handler = self.make_handler() @@ -72,23 +74,23 @@ class TestImportHandler(DataTestCase): self.assertIsNone(handler.get_target_title()) # which is really using target_key as fallback - handler.target_key = 'wutta' - self.assertEqual(handler.get_target_title(), 'wutta') + handler.target_key = "wutta" + self.assertEqual(handler.get_target_title(), "wutta") # can also use (defined) generic fallback - handler.generic_target_title = 'Wutta' - self.assertEqual(handler.get_target_title(), 'Wutta') + handler.generic_target_title = "Wutta" + self.assertEqual(handler.get_target_title(), "Wutta") # or can set explicitly - handler.target_title = 'XXX' - self.assertEqual(handler.get_target_title(), 'XXX') + handler.target_title = "XXX" + self.assertEqual(handler.get_target_title(), "XXX") def test_process_data(self): model = self.app.model handler = self.make_handler() # empy/no-op should commit (not fail) - with patch.object(handler, 'commit_transaction') as commit_transaction: + with patch.object(handler, "commit_transaction") as commit_transaction: handler.process_data() commit_transaction.assert_called_once_with() @@ -96,8 +98,8 @@ class TestImportHandler(DataTestCase): handler.process_data() # dry-run should rollback - with patch.object(handler, 'commit_transaction') as commit_transaction: - with patch.object(handler, 'rollback_transaction') as rollback_transaction: + with patch.object(handler, "commit_transaction") as commit_transaction: + with patch.object(handler, "rollback_transaction") as rollback_transaction: handler.process_data(dry_run=True) self.assertFalse(commit_transaction.called) rollback_transaction.assert_called_once_with() @@ -106,36 +108,38 @@ class TestImportHandler(DataTestCase): handler.process_data(dry_run=True) # outright error should cause rollback - with patch.object(handler, 'commit_transaction') as commit_transaction: - with patch.object(handler, 'rollback_transaction') as rollback_transaction: - with patch.object(handler, 'get_importer', side_effect=RuntimeError): - self.assertRaises(RuntimeError, handler.process_data, 'BlahBlah') + with patch.object(handler, "commit_transaction") as commit_transaction: + with patch.object(handler, "rollback_transaction") as rollback_transaction: + with patch.object(handler, "get_importer", side_effect=RuntimeError): + self.assertRaises(RuntimeError, handler.process_data, "BlahBlah") self.assertFalse(commit_transaction.called) rollback_transaction.assert_called_once_with() # fake importer class/data - mock_source_objects = [{'name': 'foo', 'value': 'bar'}] + mock_source_objects = [{"name": "foo", "value": "bar"}] + class SettingImporter(ToSqlalchemy): model_class = model.Setting target_session = self.session + def get_source_objects(self): return mock_source_objects # now for a "normal" one - handler.importers['Setting'] = SettingImporter + handler.importers["Setting"] = SettingImporter self.assertEqual(self.session.query(model.Setting).count(), 0) - handler.process_data('Setting') + handler.process_data("Setting") self.assertEqual(self.session.query(model.Setting).count(), 1) # then add another mock record - mock_source_objects.append({'name': 'foo2', 'value': 'bar2'}) - handler.process_data('Setting') + mock_source_objects.append({"name": "foo2", "value": "bar2"}) + handler.process_data("Setting") self.assertEqual(self.session.query(model.Setting).count(), 2) # nb. even if dry-run, record is added # (rollback would happen later in that case) - mock_source_objects.append({'name': 'foo3', 'value': 'bar3'}) - handler.process_data('Setting', dry_run=True) + mock_source_objects.append({"name": "foo3", "value": "bar3"}) + handler.process_data("Setting", dry_run=True) self.assertEqual(self.session.query(model.Setting).count(), 3) def test_consume_kwargs(self): @@ -148,10 +152,10 @@ class TestImportHandler(DataTestCase): # captures dry-run flag self.assertFalse(handler.dry_run) - kw['dry_run'] = True + kw["dry_run"] = True result = handler.consume_kwargs(kw) self.assertIs(result, kw) - self.assertTrue(kw['dry_run']) + self.assertTrue(kw["dry_run"]) self.assertTrue(handler.dry_run) def test_define_importers(self): @@ -165,24 +169,23 @@ class TestImportHandler(DataTestCase): handler = self.make_handler() # normal - handler.importers['Setting'] = Importer - importer = handler.get_importer('Setting', model_class=model.Setting) + handler.importers["Setting"] = Importer + importer = handler.get_importer("Setting", model_class=model.Setting) self.assertIsInstance(importer, Importer) # specifying empty keys - handler.importers['Setting'] = Importer - importer = handler.get_importer('Setting', model_class=model.Setting, - keys=None) + handler.importers["Setting"] = Importer + importer = handler.get_importer("Setting", model_class=model.Setting, keys=None) self.assertIsInstance(importer, Importer) - importer = handler.get_importer('Setting', model_class=model.Setting, - keys='') + importer = handler.get_importer("Setting", model_class=model.Setting, keys="") self.assertIsInstance(importer, Importer) - importer = handler.get_importer('Setting', model_class=model.Setting, - keys=[]) + importer = handler.get_importer("Setting", model_class=model.Setting, keys=[]) self.assertIsInstance(importer, Importer) # key not found - self.assertRaises(KeyError, handler.get_importer, 'BunchOfNonsense', model_class=model.Setting) + self.assertRaises( + KeyError, handler.get_importer, "BunchOfNonsense", model_class=model.Setting + ) class TestFromFileHandler(DataTestCase): @@ -192,8 +195,8 @@ class TestFromFileHandler(DataTestCase): def test_process_data(self): handler = self.make_handler() - path = self.write_file('data.txt', '') - with patch.object(mod.ImportHandler, 'process_data') as process_data: + path = self.write_file("data.txt", "") + with patch.object(mod.ImportHandler, "process_data") as process_data: # bare handler.process_data() @@ -217,7 +220,7 @@ class TestToSqlalchemyHandler(DataTestCase): def test_begin_target_transaction(self): handler = self.make_handler() - with patch.object(handler, 'make_target_session') as make_target_session: + with patch.object(handler, "make_target_session") as make_target_session: make_target_session.return_value = self.session self.assertIsNone(handler.target_session) handler.begin_target_transaction() @@ -225,7 +228,7 @@ class TestToSqlalchemyHandler(DataTestCase): def test_rollback_target_transaction(self): handler = self.make_handler() - with patch.object(handler, 'make_target_session') as make_target_session: + with patch.object(handler, "make_target_session") as make_target_session: make_target_session.return_value = self.session self.assertIsNone(handler.target_session) handler.begin_target_transaction() @@ -235,7 +238,7 @@ class TestToSqlalchemyHandler(DataTestCase): def test_commit_target_transaction(self): handler = self.make_handler() - with patch.object(handler, 'make_target_session') as make_target_session: + with patch.object(handler, "make_target_session") as make_target_session: make_target_session.return_value = self.session self.assertIsNone(handler.target_session) handler.begin_target_transaction() @@ -250,6 +253,6 @@ class TestToSqlalchemyHandler(DataTestCase): def test_get_importer_kwargs(self): handler = self.make_handler() handler.target_session = self.session - kw = handler.get_importer_kwargs('Setting') - self.assertIn('target_session', kw) - self.assertIs(kw['target_session'], self.session) + kw = handler.get_importer_kwargs("Setting") + self.assertIn("target_session", kw) + self.assertIs(kw["target_session"], self.session) diff --git a/tests/importing/test_model.py b/tests/importing/test_model.py index ea74a43..d27abc2 100644 --- a/tests/importing/test_model.py +++ b/tests/importing/test_model.py @@ -1,3 +1,3 @@ -#-*- coding: utf-8; -*- +# -*- coding: utf-8; -*- from wuttasync.importing import model as mod diff --git a/tests/importing/test_wutta.py b/tests/importing/test_wutta.py index ec5df50..4d6fdd2 100644 --- a/tests/importing/test_wutta.py +++ b/tests/importing/test_wutta.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8; -*- +# -*- coding: utf-8; -*- from unittest.mock import patch @@ -16,22 +16,22 @@ class TestToWuttaHandler(DataTestCase): handler = self.make_handler() # uses app title by default - self.config.setdefault('wutta.app_title', "What About This") - self.assertEqual(handler.get_target_title(), 'What About This') + self.config.setdefault("wutta.app_title", "What About This") + self.assertEqual(handler.get_target_title(), "What About This") # or generic default if present handler.generic_target_title = "WHATABOUTTHIS" - self.assertEqual(handler.get_target_title(), 'WHATABOUTTHIS') + self.assertEqual(handler.get_target_title(), "WHATABOUTTHIS") # but prefer specific title if present handler.target_title = "what_about_this" - self.assertEqual(handler.get_target_title(), 'what_about_this') + self.assertEqual(handler.get_target_title(), "what_about_this") def test_make_target_session(self): handler = self.make_handler() # makes "new" (mocked in our case) app session - with patch.object(self.app, 'make_session') as make_session: + with patch.object(self.app, "make_session") as make_session: make_session.return_value = self.session session = handler.make_target_session() make_session.assert_called_once_with() diff --git a/tests/test_util.py b/tests/test_util.py index fc0476c..4b01777 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -8,22 +8,24 @@ from wuttasync import util as mod class TestDataDiffs(TestCase): def test_source_missing_field(self): - source = {'foo': 'bar'} - target = {'baz': 'xyz', 'foo': 'bar'} + source = {"foo": "bar"} + target = {"baz": "xyz", "foo": "bar"} self.assertRaises(KeyError, mod.data_diffs, source, target) def test_target_missing_field(self): - source = {'foo': 'bar', 'baz': 'xyz'} - target = {'baz': 'xyz'} - self.assertRaises(KeyError, mod.data_diffs, source, target, fields=['foo', 'baz']) + source = {"foo": "bar", "baz": "xyz"} + target = {"baz": "xyz"} + self.assertRaises( + KeyError, mod.data_diffs, source, target, fields=["foo", "baz"] + ) def test_no_diffs(self): - source = {'foo': 'bar', 'baz': 'xyz'} - target = {'baz': 'xyz', 'foo': 'bar'} + source = {"foo": "bar", "baz": "xyz"} + target = {"baz": "xyz", "foo": "bar"} self.assertFalse(mod.data_diffs(source, target)) def test_with_diffs(self): - source = {'foo': 'bar', 'baz': 'xyz'} - target = {'baz': 'xyz', 'foo': 'BAR'} + source = {"foo": "bar", "baz": "xyz"} + target = {"baz": "xyz", "foo": "BAR"} result = mod.data_diffs(source, target) - self.assertEqual(result, ['foo']) + self.assertEqual(result, ["foo"])