2011-09-26 14:19:34 -05:00
|
|
|
# ------------------------------------------------------------------------------
|
2014-05-14 08:10:41 -05:00
|
|
|
import os.path, time
|
2014-02-26 16:40:27 -06:00
|
|
|
from appy.fields.file import FileInfo
|
2014-05-14 08:10:41 -05:00
|
|
|
from appy.shared import utils as sutils
|
2011-09-26 14:19:34 -05:00
|
|
|
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
class Migrator:
|
|
|
|
'''This class is responsible for performing migrations, when, on
|
|
|
|
installation, we've detected a new Appy version.'''
|
|
|
|
def __init__(self, installer):
|
|
|
|
self.installer = installer
|
2012-02-02 10:30:54 -06:00
|
|
|
self.logger = installer.logger
|
|
|
|
self.app = installer.app
|
2014-02-26 03:40:27 -06:00
|
|
|
self.tool = self.app.config.appy()
|
2011-09-26 14:19:34 -05:00
|
|
|
|
2014-02-26 16:40:27 -06:00
|
|
|
@staticmethod
|
2014-05-14 08:10:41 -05:00
|
|
|
def migrateBinaryFields(obj):
|
|
|
|
'''Ensures all file and frozen pod fields on p_obj are FileInfo
|
|
|
|
instances.'''
|
2014-02-26 16:40:27 -06:00
|
|
|
migrated = 0 # Count the number of migrated fields
|
|
|
|
for field in obj.fields:
|
2014-05-14 08:10:41 -05:00
|
|
|
if field.type == 'File':
|
|
|
|
oldValue = getattr(obj, field.name)
|
|
|
|
if oldValue and not isinstance(oldValue, FileInfo):
|
|
|
|
# A legacy File object. Convert it to a FileInfo instance
|
|
|
|
# and extract the binary to the filesystem.
|
|
|
|
setattr(obj, field.name, oldValue)
|
|
|
|
migrated += 1
|
|
|
|
elif field.type == 'Pod':
|
|
|
|
frozen = getattr(obj.o, field.name, None)
|
|
|
|
if frozen:
|
|
|
|
# Dump this file on disk.
|
|
|
|
tempFolder = sutils.getOsTempFolder()
|
|
|
|
fmt = os.path.splitext(frozen.filename)[1][1:]
|
|
|
|
fileName = os.path.join(tempFolder,
|
|
|
|
'%f.%s' % (time.time(), fmt))
|
|
|
|
f = file(fileName, 'wb')
|
|
|
|
if frozen.data.__class__.__name__ == 'Pdata':
|
|
|
|
# The file content is splitted in several chunks.
|
|
|
|
f.write(frozen.data.data)
|
|
|
|
nextPart = frozen.data.next
|
|
|
|
while nextPart:
|
|
|
|
f.write(nextPart.data)
|
|
|
|
nextPart = nextPart.next
|
|
|
|
else:
|
|
|
|
# Only one chunk
|
|
|
|
f.write(frozen.data)
|
|
|
|
f.close()
|
|
|
|
f = file(fileName)
|
|
|
|
field.freeze(obj, template=field.template[0], format=fmt,
|
|
|
|
noSecurity=True, upload=f,
|
|
|
|
freezeOdtOnError=False)
|
|
|
|
f.close()
|
|
|
|
# Remove the legacy in-zodb file object
|
|
|
|
setattr(obj.o, field.name, None)
|
|
|
|
migrated += 1
|
2014-02-26 16:40:27 -06:00
|
|
|
return migrated
|
|
|
|
|
2014-02-26 03:40:27 -06:00
|
|
|
def migrateTo_0_9_0(self):
|
|
|
|
'''Migrates this DB to Appy 0.9.x.'''
|
2014-02-26 16:40:27 -06:00
|
|
|
# Put all binaries to the filesystem
|
|
|
|
tool = self.tool
|
2014-05-14 08:10:41 -05:00
|
|
|
tool.log('Migrating binary fields...')
|
|
|
|
context = {'migrate': self.migrateBinaryFields, 'nb': 0}
|
2014-02-26 16:40:27 -06:00
|
|
|
for className in tool.o.getAllClassNames():
|
|
|
|
tool.compute(className, context=context, noSecurity=True,
|
|
|
|
expression="ctx['nb'] += ctx['migrate'](obj)")
|
2014-05-14 08:10:41 -05:00
|
|
|
tool.log('Migrated %d binary field(s).' % context['nb'])
|
2011-09-26 14:19:34 -05:00
|
|
|
|
2014-02-26 16:40:27 -06:00
|
|
|
def run(self, force=False):
|
|
|
|
'''Executes a migration when relevant, or do it for sure if p_force is
|
|
|
|
True.'''
|
2014-02-26 03:40:27 -06:00
|
|
|
appyVersion = self.tool.appyVersion
|
2014-02-26 16:40:27 -06:00
|
|
|
if force or not appyVersion or (appyVersion < '0.9.0'):
|
2012-02-02 10:30:54 -06:00
|
|
|
# Migration is required.
|
2014-05-13 09:41:59 -05:00
|
|
|
self.logger.info('Appy version (DB) is %s' % appyVersion)
|
2012-02-02 10:30:54 -06:00
|
|
|
startTime = time.time()
|
2014-02-26 03:40:27 -06:00
|
|
|
self.migrateTo_0_9_0()
|
2012-02-02 10:30:54 -06:00
|
|
|
stopTime = time.time()
|
|
|
|
elapsed = (stopTime-startTime) / 60.0
|
|
|
|
self.logger.info('Migration done in %d minute(s).' % elapsed)
|
2011-09-26 14:19:34 -05:00
|
|
|
# ------------------------------------------------------------------------------
|