Add use_lists arg for cache.cache_model(), plus CacheKeyNotSupported
				
					
				
			This commit is contained in:
		
							parent
							
								
									0b71aa8441
								
							
						
					
					
						commit
						c27c3914ae
					
				
					 1 changed files with 26 additions and 9 deletions
				
			
		|  | @ -1,8 +1,8 @@ | |||
| # -*- coding: utf-8 -*- | ||||
| # -*- coding: utf-8; -*- | ||||
| ################################################################################ | ||||
| # | ||||
| #  Rattail -- Retail Software Framework | ||||
| #  Copyright © 2010-2015 Lance Edgar | ||||
| #  Copyright © 2010-2017 Lance Edgar | ||||
| # | ||||
| #  This file is part of Rattail. | ||||
| # | ||||
|  | @ -37,13 +37,22 @@ from rattail.db import model | |||
| log = logging.getLogger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class CacheKeyNotSupported(Exception): | ||||
|     """ | ||||
|     Special error which a model cacher should raise when generating the cache | ||||
|     key for a given record/object, but when it is discovered the object does | ||||
|     not have sufficient data to generate the key. | ||||
|     """ | ||||
| 
 | ||||
| 
 | ||||
| class ModelCacher(object): | ||||
|     """ | ||||
|     Generic model data caching class. | ||||
|     """ | ||||
| 
 | ||||
|     def __init__(self, session, model_class, key='uuid', omit_duplicates=False, | ||||
|                  query=None, order_by=None, query_options=None, normalizer=None): | ||||
|                  query=None, order_by=None, query_options=None, normalizer=None, | ||||
|                  use_lists=False): | ||||
|         self.session = session | ||||
|         self.model_class = model_class | ||||
|         self.key = key | ||||
|  | @ -56,6 +65,7 @@ class ModelCacher(object): | |||
|             self.normalize = lambda d: d | ||||
|         else: | ||||
|             self.normalize = normalizer | ||||
|         self.use_lists = use_lists | ||||
|      | ||||
|     @property | ||||
|     def model_name(self): | ||||
|  | @ -107,14 +117,21 @@ class ModelCacher(object): | |||
| 
 | ||||
|     def cache_instance(self, instance): | ||||
|         normalized = self.normalize(instance) | ||||
|         key = self.get_key(instance, normalized) | ||||
|         if key not in self.instances: | ||||
|             self.instances[key] = normalized | ||||
|         try: | ||||
|             key = self.get_key(instance, normalized) | ||||
|         except CacheKeyNotSupported: | ||||
|             # this means the object doesn't belong in our cache | ||||
|             return | ||||
|         if self.use_lists: | ||||
|             self.instances.setdefault(key, []).append(normalized) | ||||
|         else: | ||||
|             self.duplicate_keys.add(key) | ||||
|             if not self.omit_duplicates: | ||||
|                 log.debug("cache already contained key, but overwriting: {}".format(repr(key))) | ||||
|             if key not in self.instances: | ||||
|                 self.instances[key] = normalized | ||||
|             else: | ||||
|                 self.duplicate_keys.add(key) | ||||
|                 if not self.omit_duplicates: | ||||
|                     log.debug("cache already contained key, but overwriting: {}".format(repr(key))) | ||||
|                     self.instances[key] = normalized | ||||
| 
 | ||||
| 
 | ||||
| def cache_model(session, model_class, key='uuid', progress=None, **kwargs): | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Lance Edgar
						Lance Edgar