add python3 suppport based on 2to3 script
This commit is contained in:
parent
caef0e85d0
commit
4f91a30fec
31
__init__.py
31
__init__.py
|
@ -22,26 +22,27 @@ class Object:
|
|||
'''At every place we need an object, but without any requirement on its
|
||||
class (methods, attributes,...) we will use this minimalist class.'''
|
||||
def __init__(self, **fields):
|
||||
for k, v in fields.iteritems():
|
||||
for k, v in fields.items():
|
||||
setattr(self, k, v)
|
||||
def __repr__(self):
|
||||
res = u'<Object '
|
||||
for attrName, attrValue in self.__dict__.iteritems():
|
||||
res = '<Object '
|
||||
for attrName, attrValue in self.__dict__.items():
|
||||
v = attrValue
|
||||
if hasattr(v, '__repr__'):
|
||||
v = v.__repr__()
|
||||
try:
|
||||
res += u'%s=%s ' % (attrName, v)
|
||||
res += '%s=%s ' % (attrName, v)
|
||||
except UnicodeDecodeError:
|
||||
res += u'%s=<encoding problem> ' % attrName
|
||||
res += '%s=<encoding problem> ' % attrName
|
||||
res = res.strip() + '>'
|
||||
return res.encode('utf-8')
|
||||
def __nonzero__(self): return bool(self.__dict__)
|
||||
def __bool__(self):
|
||||
return bool(self.__dict__)
|
||||
def get(self, name, default=None): return getattr(self, name, default)
|
||||
def __getitem__(self, k): return getattr(self, k)
|
||||
def update(self, other):
|
||||
'''Includes information from p_other into p_self'''
|
||||
for k, v in other.__dict__.iteritems():
|
||||
'''Includes information from p_other into p_self.'''
|
||||
for k, v in other.__dict__.items():
|
||||
setattr(self, k, v)
|
||||
def clone(self):
|
||||
res = Object()
|
||||
|
@ -59,11 +60,11 @@ class Hack:
|
|||
"_base_<initial_method_name>_". In the patched method, one may use
|
||||
Hack.base to call the base method. If p_method is static, you must
|
||||
specify its class in p_klass.'''
|
||||
# Get the class on which the surgery will take place
|
||||
# Get the class on which the surgery will take place.
|
||||
isStatic = klass
|
||||
klass = klass or method.im_class
|
||||
# On this class, store m_method under its "base" name
|
||||
name = isStatic and method.func_name or method.im_func.__name__
|
||||
klass = klass or method.__self__.__class__
|
||||
# On this class, store m_method under its "base" name.
|
||||
name = isStatic and method.__name__ or method.__func__.__name__
|
||||
baseName = '_base_%s_' % name
|
||||
if isStatic:
|
||||
# If "staticmethod" isn't called hereafter, the static functions
|
||||
|
@ -78,8 +79,8 @@ class Hack:
|
|||
'''Allows to call the base (replaced) method. If p_method is static,
|
||||
you must specify its p_klass.'''
|
||||
isStatic = klass
|
||||
klass = klass or method.im_class
|
||||
name = isStatic and method.func_name or method.im_func.__name__
|
||||
klass = klass or method.__self__.__class__
|
||||
name = isStatic and method.__name__ or method.__func__.__name__
|
||||
return getattr(klass, '_base_%s_' % name)
|
||||
|
||||
@staticmethod
|
||||
|
@ -87,7 +88,7 @@ class Hack:
|
|||
'''Injects any method or attribute from p_patchClass into klass.'''
|
||||
patched = []
|
||||
added = []
|
||||
for name, attr in patchClass.__dict__.items():
|
||||
for name, attr in patchClass.__dict__.iteritems():
|
||||
if name.startswith('__'): continue # Ignore special methods
|
||||
# Unwrap functions from static methods
|
||||
if attr.__class__.__name__ == 'staticmethod':
|
||||
|
|
|
@ -73,10 +73,10 @@ class AskSap:
|
|||
# Return info about a given function.
|
||||
info = sap.getFunctionInfo(sapElement)
|
||||
prefix = 'Function'
|
||||
print('%s: %s' % (prefix, sapElement))
|
||||
print(('%s: %s' % (prefix, sapElement)))
|
||||
print(info)
|
||||
sap.disconnect()
|
||||
except SapError, se:
|
||||
except SapError as se:
|
||||
sys.stderr.write(str(se))
|
||||
sys.stderr.write('\n')
|
||||
sys.exit(ERROR_CODE)
|
||||
|
|
|
@ -4,7 +4,7 @@ from optparse import OptionParser
|
|||
import ZODB.FileStorage
|
||||
import ZODB.serialize
|
||||
from DateTime import DateTime
|
||||
from StringIO import StringIO
|
||||
from io import StringIO
|
||||
folderName = os.path.dirname(__file__)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -87,9 +87,9 @@ class ZodbBackuper:
|
|||
w('Try to create backup folder for logs "%s"...' % \
|
||||
self.logsBackupFolder)
|
||||
os.mkdir(self.logsBackupFolder)
|
||||
except IOError, ioe:
|
||||
except IOError as ioe:
|
||||
w(folderCreateError % str(ioe))
|
||||
except OSError, oe:
|
||||
except OSError as oe:
|
||||
w(folderCreateError % str(oe))
|
||||
if os.path.exists(self.logsBackupFolder):
|
||||
# Ok, we can make the backup of the log files.
|
||||
|
@ -175,7 +175,7 @@ class ZodbBackuper:
|
|||
except smtplib.SMTPException, sme:
|
||||
w('Error while contacting SMTP server %s (%s).' % \
|
||||
(self.options.smtpServer, str(se)))
|
||||
except socket.error, se:
|
||||
except socket.error as se:
|
||||
w('Could not connect to SMTP server %s (%s).' % \
|
||||
(self.options.smtpServer, str(se)))
|
||||
|
||||
|
@ -191,12 +191,12 @@ class ZodbBackuper:
|
|||
for fileName in os.listdir(self.tempFolder):
|
||||
ext = os.path.splitext(fileName)[1]
|
||||
if ext in self.toRemoveExts:
|
||||
exec '%sCount += 1' % ext[1:]
|
||||
exec('%sCount += 1' % ext[1:])
|
||||
fullFileName = os.path.join(self.tempFolder, fileName)
|
||||
#w('Removing "%s"...' % fullFileName)
|
||||
try:
|
||||
os.remove(fullFileName)
|
||||
except OSError, oe:
|
||||
except OSError as oe:
|
||||
w('Could not remove "%s" (%s).' % (fullFileName, str(oe)))
|
||||
w('%d .pdf, %d .doc, %d .rtf and %d .odt file(s) removed.' % \
|
||||
(pdfCount, docCount, rtfCount, odtCount))
|
||||
|
@ -268,7 +268,7 @@ class ZodbBackuper:
|
|||
if self.emails:
|
||||
self.sendEmails()
|
||||
self.logFile.close()
|
||||
print(self.logMem.getvalue())
|
||||
print((self.logMem.getvalue()))
|
||||
self.logMem.close()
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -302,7 +302,7 @@ class ZodbBackupScript:
|
|||
f.write('Hello.')
|
||||
f.close()
|
||||
os.remove(fileName)
|
||||
except OSError, oe:
|
||||
except OSError as oe:
|
||||
raise BackupError('I do not have the right to write in ' \
|
||||
'folder "%s".' % args[1])
|
||||
# Check temp folder
|
||||
|
@ -401,7 +401,7 @@ class ZodbBackupScript:
|
|||
self.checkArgs(options, args)
|
||||
backuper = ZodbBackuper(args[0], args[1], options)
|
||||
backuper.run()
|
||||
except BackupError, be:
|
||||
except BackupError as be:
|
||||
sys.stderr.write(str(be) + '\nRun the script without any ' \
|
||||
'argument for getting help.\n')
|
||||
sys.exit(ERROR_CODE)
|
||||
|
|
|
@ -21,7 +21,7 @@ class LdapTester:
|
|||
def __init__(self):
|
||||
# Get params from shell args.
|
||||
if len(sys.argv) != 8:
|
||||
print(LdapTester.__doc__)
|
||||
print((LdapTester.__doc__))
|
||||
sys.exit(0)
|
||||
s = self
|
||||
s.uri,s.login,s.password,s.base,s.attrs,s.filter,s.scope = sys.argv[1:]
|
||||
|
@ -33,15 +33,15 @@ class LdapTester:
|
|||
|
||||
def test(self):
|
||||
# Connect the the LDAP
|
||||
print('Connecting to... %s' % self.uri)
|
||||
print(('Connecting to... %s' % self.uri))
|
||||
connector = LdapConnector(self.uri)
|
||||
success, msg = connector.connect(self.login, self.password)
|
||||
if not success: return
|
||||
# Perform the query.
|
||||
print ('Querying %s...' % self.base)
|
||||
print(('Querying %s...' % self.base))
|
||||
res = connector.search(self.base, self.scope, self.filter,
|
||||
self.attributes)
|
||||
print('Got %d results' % len(res))
|
||||
print(('Got %d results' % len(res)))
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
if __name__ == '__main__': LdapTester().test()
|
||||
|
|
|
@ -110,12 +110,12 @@ class EggifyScript:
|
|||
eggFullName = j(self.eggFolder, self.eggName)
|
||||
if os.path.exists(eggFullName):
|
||||
os.remove(eggFullName)
|
||||
print('Existing "%s" was removed.' % eggFullName)
|
||||
print(('Existing "%s" was removed.' % eggFullName))
|
||||
# Create a temp folder where to store the egg
|
||||
eggTempFolder = os.path.splitext(eggFullName)[0]
|
||||
if os.path.exists(eggTempFolder):
|
||||
FolderDeleter.delete(eggTempFolder)
|
||||
print('Removed "%s" that was in my way.' % eggTempFolder)
|
||||
print(('Removed "%s" that was in my way.' % eggTempFolder))
|
||||
os.mkdir(eggTempFolder)
|
||||
# Create the "Products" sub-folder if we must wrap the package in this
|
||||
# namespace
|
||||
|
@ -170,7 +170,7 @@ class EggifyScript:
|
|||
try:
|
||||
self.checkArgs(options, args)
|
||||
self.eggify()
|
||||
except EggifierError, ee:
|
||||
except EggifierError as ee:
|
||||
sys.stderr.write(str(ee) + '\nRun eggify.py -h for getting help.\n')
|
||||
sys.exit(ERROR_CODE)
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ class GeneratorScript:
|
|||
sys.exit(ERROR_CODE)
|
||||
# Check existence of application
|
||||
if not os.path.exists(args[0]):
|
||||
print(APP_NOT_FOUND % args[0])
|
||||
print((APP_NOT_FOUND % args[0]))
|
||||
sys.exit(ERROR_CODE)
|
||||
# Convert app path to an absolute path
|
||||
args[0] = os.path.abspath(args[0])
|
||||
|
@ -55,8 +55,8 @@ class GeneratorScript:
|
|||
(options, args) = optParser.parse_args()
|
||||
try:
|
||||
self.manageArgs(optParser, options, args)
|
||||
print('Appy version: %s' % appy.version.verbose)
|
||||
print('Generating Zope product in %s/zope...' % args[0])
|
||||
print(('Appy version: %s' % appy.version.verbose))
|
||||
print(('Generating Zope product in %s/zope...' % args[0]))
|
||||
ZopeGenerator(args[0], options).run()
|
||||
# Give the user some statistics about its code
|
||||
LinesCounter(args[0], excludes=['%szope' % os.sep]).run()
|
||||
|
@ -71,7 +71,7 @@ class GeneratorScript:
|
|||
f.close()
|
||||
version = version[:version.find('build')-1]
|
||||
Debianizer(app, appDir, appVersion=version).run()
|
||||
except GeneratorError, ge:
|
||||
except GeneratorError as ge:
|
||||
sys.stderr.write(str(ge))
|
||||
sys.stderr.write('\n')
|
||||
optParser.print_help()
|
||||
|
|
|
@ -82,6 +82,6 @@ else:
|
|||
targetObject = getattr(targetObject, elem)
|
||||
# Execute the method on the target object
|
||||
if args: args = args.split('*')
|
||||
exec 'targetObject.%s(*args)' % toolMethod
|
||||
exec('targetObject.%s(*args)' % toolMethod)
|
||||
transaction.commit()
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
18
bin/new.py
18
bin/new.py
|
@ -106,17 +106,17 @@ class ZopeInstanceCreator:
|
|||
f = file('bin/zopectl', 'w')
|
||||
f.write(zopeCtl % self.instancePath)
|
||||
f.close()
|
||||
os.chmod('bin/zopectl', 0744) # Make it executable by owner.
|
||||
os.chmod('bin/zopectl', 0o744) # Make it executable by owner.
|
||||
# Create bin/runzope
|
||||
f = file('bin/runzope', 'w')
|
||||
f.write(runZope % self.instancePath)
|
||||
f.close()
|
||||
os.chmod('bin/runzope', 0744) # Make it executable by owner.
|
||||
os.chmod('bin/runzope', 0o744) # Make it executable by owner.
|
||||
# Create bin/startoo
|
||||
f = file('bin/startoo', 'w')
|
||||
f.write(ooStart)
|
||||
f.close()
|
||||
os.chmod('bin/startoo', 0744) # Make it executable by owner.
|
||||
os.chmod('bin/startoo', 0o744) # Make it executable by owner.
|
||||
# Create etc/zope.conf
|
||||
os.mkdir('etc')
|
||||
f = file('etc/zope.conf', 'w')
|
||||
|
@ -138,10 +138,10 @@ class ZopeInstanceCreator:
|
|||
password = binascii.b2a_base64(sha('admin').digest())[:-1]
|
||||
f.write('admin:{SHA}%s\n' % password)
|
||||
f.close()
|
||||
os.chmod('inituser', 0644)
|
||||
os.chmod('inituser', 0o644)
|
||||
# User "zope" must own this instance
|
||||
os.system('chown -R zope %s' % self.instancePath)
|
||||
print('Zope instance created in %s.' % self.instancePath)
|
||||
print(('Zope instance created in %s.' % self.instancePath))
|
||||
os.chdir(curdir)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -229,7 +229,7 @@ class NewScript:
|
|||
j = os.path.join
|
||||
# As eggs have been deleted, versions of components are lost. Reify
|
||||
# them from p_versions.
|
||||
dVersions = ['"%s":"%s"' % (n, v) for n, v in versions.iteritems()]
|
||||
dVersions = ['"%s":"%s"' % (n, v) for n, v in versions.items()]
|
||||
sVersions = 'appyVersions = {' + ','.join(dVersions) + '}'
|
||||
codeFile = "%s/pkg_resources.py" % self.libFolder
|
||||
f = file(codeFile)
|
||||
|
@ -326,7 +326,7 @@ class NewScript:
|
|||
action = 'Copying'
|
||||
if linksForProducts:
|
||||
action = 'Symlinking'
|
||||
print('%s Plone stuff in the Zope instance...' % action)
|
||||
print(('%s Plone stuff in the Zope instance...' % action))
|
||||
if self.ploneVersion in ('plone25', 'plone30'):
|
||||
self.installPlone25or30Stuff(linksForProducts)
|
||||
elif self.ploneVersion in ('plone3x', 'plone4'):
|
||||
|
@ -379,11 +379,11 @@ class NewScript:
|
|||
try:
|
||||
self.manageArgs(args)
|
||||
if self.ploneVersion != 'zope':
|
||||
print('Creating new %s instance...' % self.ploneVersion)
|
||||
print(('Creating new %s instance...' % self.ploneVersion))
|
||||
self.createInstance(linksForProducts)
|
||||
else:
|
||||
ZopeInstanceCreator(self.instancePath).run()
|
||||
except NewError, ne:
|
||||
except NewError as ne:
|
||||
optParser.print_help()
|
||||
sys.stderr.write(str(ne))
|
||||
sys.stderr.write('\n')
|
||||
|
|
|
@ -52,7 +52,7 @@ class OdfGrep:
|
|||
# Run "grep" in this folder
|
||||
match = self.callGrep(tempFolder)
|
||||
if match:
|
||||
print('Found in %s' % fileName)
|
||||
print(('Found in %s' % fileName))
|
||||
FolderDeleter.delete(tempFolder)
|
||||
|
||||
def run(self):
|
||||
|
@ -65,7 +65,7 @@ class OdfGrep:
|
|||
if os.path.splitext(name)[1] in self.toUnzip:
|
||||
self.grepFile(os.path.join(dir, name))
|
||||
else:
|
||||
print('%s does not exist.' % self.fileOrFolder)
|
||||
print(('%s does not exist.' % self.fileOrFolder))
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -85,8 +85,8 @@ class FtpFolder:
|
|||
def clean(self, site):
|
||||
'''Cleans this folder'''
|
||||
# First, clean subFolders if they exist
|
||||
print('Cleaning %s %d subFolders' % \
|
||||
(self.getFullName(), len(self.subFolders)))
|
||||
print(('Cleaning %s %d subFolders' % \
|
||||
(self.getFullName(), len(self.subFolders))))
|
||||
for subFolder in self.subFolders:
|
||||
subFolder.clean(site)
|
||||
# Remove the subFolder
|
||||
|
@ -95,7 +95,7 @@ class FtpFolder:
|
|||
for f in self.files:
|
||||
fileName = '%s/%s' % (self.getFullName(), f)
|
||||
site.delete(fileName)
|
||||
print('%s removed.' % fileName)
|
||||
print(('%s removed.' % fileName))
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
class AppySite:
|
||||
|
@ -148,11 +148,11 @@ class AppySite:
|
|||
fileExt = os.path.splitext(fileName)[1]
|
||||
if fileExt in self.textExtensions:
|
||||
# Make a transfer in text mode
|
||||
print('Transfer file %s (text mode)' % fileName)
|
||||
print(('Transfer file %s (text mode)' % fileName))
|
||||
self.site.storlines(cmd, localFile)
|
||||
else:
|
||||
# Make a transfer in binary mode
|
||||
print('Transfer file %s (binary mode)' % fileName)
|
||||
print(('Transfer file %s (binary mode)' % fileName))
|
||||
self.site.storbinary(cmd, localFile)
|
||||
|
||||
def publish(self):
|
||||
|
@ -257,7 +257,7 @@ class Publisher:
|
|||
yesNo = '[Y/n]'
|
||||
else:
|
||||
yesNo = '[y/N]'
|
||||
print(question + ' ' + yesNo + ' ')
|
||||
print((question + ' ' + yesNo + ' '))
|
||||
response = sys.stdin.readline().strip().lower()
|
||||
res = False
|
||||
if response in ('y', 'yes'):
|
||||
|
@ -274,7 +274,7 @@ class Publisher:
|
|||
|
||||
def executeCommand(self, cmd):
|
||||
'''Executes the system command p_cmd.'''
|
||||
print('Executing %s...' % cmd)
|
||||
print(('Executing %s...' % cmd))
|
||||
os.system(cmd)
|
||||
|
||||
distExcluded = ('appy/doc', 'appy/temp', 'appy/versions', 'appy/gen/test')
|
||||
|
@ -340,7 +340,7 @@ class Publisher:
|
|||
newZipRelease, default='yes'):
|
||||
print('Publication canceled.')
|
||||
sys.exit(1)
|
||||
print('Removing obsolete %s...' % newZipRelease)
|
||||
print(('Removing obsolete %s...' % newZipRelease))
|
||||
os.remove(newZipRelease)
|
||||
zipFile = zipfile.ZipFile(newZipRelease, 'w', zipfile.ZIP_DEFLATED)
|
||||
curdir = os.getcwd()
|
||||
|
@ -474,7 +474,7 @@ class Publisher:
|
|||
if minimalist:
|
||||
FolderDeleter.delete('%s/pod/test' % genSrcFolder)
|
||||
# Write the appy version into the code itself (in appy/version.py)'''
|
||||
print('Publishing version %s...' % self.versionShort)
|
||||
print(('Publishing version %s...' % self.versionShort))
|
||||
# Dump version info in appy/version.py
|
||||
f = file('%s/version.py' % genSrcFolder, 'w')
|
||||
f.write('short = "%s"\n' % self.versionShort)
|
||||
|
@ -495,7 +495,7 @@ class Publisher:
|
|||
Cleaner().run(verbose=False)
|
||||
# Perform a small analysis on the Appy code
|
||||
LinesCounter(appy).run()
|
||||
print('Generating site in %s...' % self.genFolder)
|
||||
print(('Generating site in %s...' % self.genFolder))
|
||||
minimalist = self.askQuestion('Minimalist (shipped without tests)?',
|
||||
default='no')
|
||||
self.prepareGenFolder(minimalist)
|
||||
|
|
|
@ -21,10 +21,10 @@ class ZodbRestorer:
|
|||
datePart = '-D %s' % self.restoreDate
|
||||
repozoCmd = '%s %s -Rv -r %s %s -o %s' % (self.python,
|
||||
self.repozo, self.backupFolder, datePart, self.storageLocation)
|
||||
print('Executing %s...' % repozoCmd)
|
||||
print(('Executing %s...' % repozoCmd))
|
||||
os.system(repozoCmd)
|
||||
stopTime = time.time()
|
||||
print('Done in %d minute(s).' % ((stopTime-startTime)/60))
|
||||
print(('Done in %d minute(s).' % ((stopTime-startTime)/60)))
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
class ZodbRestoreScript:
|
||||
|
@ -56,7 +56,7 @@ class ZodbRestoreScript:
|
|||
f.write('Hello.')
|
||||
f.close()
|
||||
os.remove(args[0])
|
||||
except OSError, oe:
|
||||
except OSError as oe:
|
||||
raise RestoreError('I do not have the right to write file ' \
|
||||
'"%s".' % args[0])
|
||||
|
||||
|
@ -81,7 +81,7 @@ class ZodbRestoreScript:
|
|||
self.checkArgs(options, args)
|
||||
backuper = ZodbRestorer(args[0], args[1], options)
|
||||
backuper.run()
|
||||
except RestoreError, be:
|
||||
except RestoreError as be:
|
||||
sys.stderr.write(str(be))
|
||||
sys.stderr.write('\n')
|
||||
optParser.print_help()
|
||||
|
|
12
bin/zip.py
12
bin/zip.py
|
@ -8,29 +8,29 @@ class Zipper:
|
|||
def __init__(self):
|
||||
self.zipFileName = '%s/Desktop/appy.zip' % os.environ['HOME']
|
||||
def createZipFile(self):
|
||||
print('Creating %s...' % self.zipFileName)
|
||||
print(('Creating %s...' % self.zipFileName))
|
||||
zipFile = zipfile.ZipFile(self.zipFileName, 'w', zipfile.ZIP_DEFLATED)
|
||||
for dir, dirnames, filenames in os.walk(appyPath):
|
||||
for f in filenames:
|
||||
fileName = os.path.join(dir, f)
|
||||
arcName = fileName[fileName.find('appy/'):]
|
||||
print('Adding %s' % fileName)
|
||||
print(('Adding %s' % fileName))
|
||||
zipFile.write(fileName, arcName)
|
||||
zipFile.close()
|
||||
|
||||
def run(self):
|
||||
# Where to put the zip file ?
|
||||
print("Where do you want to put appy.zip ? [Default is %s] " % \
|
||||
os.path.dirname(self.zipFileName))
|
||||
print(("Where do you want to put appy.zip ? [Default is %s] " % \
|
||||
os.path.dirname(self.zipFileName)))
|
||||
response = sys.stdin.readline().strip()
|
||||
if response:
|
||||
if os.path.exists(response) and os.path.isdir(response):
|
||||
self.zipFileName = '%s/appy.zip' % response
|
||||
else:
|
||||
print('%s is not a folder.' % response)
|
||||
print(('%s is not a folder.' % response))
|
||||
sys.exit(1)
|
||||
if os.path.exists(self.zipFileName):
|
||||
print('Removing existing %s...' % self.zipFileName)
|
||||
print(('Removing existing %s...' % self.zipFileName))
|
||||
os.remove(self.zipFileName)
|
||||
Cleaner().run(verbose=False)
|
||||
self.createZipFile()
|
||||
|
|
|
@ -21,9 +21,9 @@ from appy.gen.layout import Table, defaultFieldLayouts
|
|||
from appy.gen import utils as gutils
|
||||
from appy.px import Px
|
||||
from appy.shared import utils as sutils
|
||||
from group import Group
|
||||
from search import Search
|
||||
from page import Page
|
||||
from .group import Group
|
||||
from .page import Page
|
||||
import collections
|
||||
|
||||
# In this file, names "list" and "dict" refer to sub-modules. To use Python
|
||||
# builtin types, use __builtins__['list'] and __builtins__['dict']
|
||||
|
@ -370,7 +370,7 @@ class Field:
|
|||
labelName = name
|
||||
trPrefix = None
|
||||
if self.label:
|
||||
if isinstance(self.label, basestring): trPrefix = self.label
|
||||
if isinstance(self.label, str): trPrefix = self.label
|
||||
else: # It is a tuple (trPrefix, name)
|
||||
if self.label[1]: labelName = self.label[1]
|
||||
if self.label[0]: trPrefix = self.label[0]
|
||||
|
@ -382,16 +382,16 @@ class Field:
|
|||
self.helpId = self.labelId + '_help'
|
||||
# Determine read and write permissions for this field
|
||||
rp = self.specificReadPermission
|
||||
if rp and not isinstance(rp, basestring):
|
||||
if rp and not isinstance(rp, str):
|
||||
self.readPermission = '%s: Read %s %s' % (appName, prefix, name)
|
||||
elif rp and isinstance(rp, basestring):
|
||||
elif rp and isinstance(rp, str):
|
||||
self.readPermission = rp
|
||||
else:
|
||||
self.readPermission = 'read'
|
||||
wp = self.specificWritePermission
|
||||
if wp and not isinstance(wp, basestring):
|
||||
if wp and not isinstance(wp, str):
|
||||
self.writePermission = '%s: Write %s %s' % (appName, prefix, name)
|
||||
elif wp and isinstance(wp, basestring):
|
||||
elif wp and isinstance(wp, str):
|
||||
self.writePermission = wp
|
||||
else:
|
||||
self.writePermission = 'write'
|
||||
|
@ -442,7 +442,7 @@ class Field:
|
|||
self.readPermission
|
||||
if not obj.allows(perm): return
|
||||
# Evaluate self.show
|
||||
if callable(self.show):
|
||||
if isinstance(self.show, collections.Callable):
|
||||
res = self.callMethod(obj, self.show)
|
||||
else:
|
||||
res = self.show
|
||||
|
@ -480,7 +480,7 @@ class Field:
|
|||
if not masterData: return True
|
||||
else:
|
||||
master, masterValue = masterData
|
||||
if masterValue and callable(masterValue): return True
|
||||
if masterValue and isinstance(masterValue, collections.Callable): return True
|
||||
reqValue = master.getRequestValue(obj)
|
||||
# reqValue can be a list or not
|
||||
if type(reqValue) not in sutils.sequenceTypes:
|
||||
|
@ -496,8 +496,8 @@ class Field:
|
|||
if isinstance(mapping, __builtins__['dict']):
|
||||
# Is it a dict like {'label':..., 'descr':...}, or is it directly a
|
||||
# dict with a mapping?
|
||||
for k, v in mapping.iteritems():
|
||||
if (k not in self.labelTypes) or isinstance(v, basestring):
|
||||
for k, v in mapping.items():
|
||||
if (k not in self.labelTypes) or isinstance(v, str):
|
||||
# It is already a mapping
|
||||
return {'label':mapping, 'descr':mapping, 'help':mapping}
|
||||
# If we are here, we have {'label':..., 'descr':...}. Complete
|
||||
|
@ -520,7 +520,7 @@ class Field:
|
|||
areDefault = True
|
||||
layouts = self.computeDefaultLayouts()
|
||||
else:
|
||||
if isinstance(layouts, basestring):
|
||||
if isinstance(layouts, str):
|
||||
# The user specified a single layoutString (the "edit" one)
|
||||
layouts = {'edit': layouts}
|
||||
elif isinstance(layouts, Table):
|
||||
|
@ -541,8 +541,8 @@ class Field:
|
|||
# We have now a dict of layouts in p_layouts. Ensure now that a Table
|
||||
# instance is created for every layout (=value from the dict). Indeed,
|
||||
# a layout could have been expressed as a simple layout string.
|
||||
for layoutType in layouts.iterkeys():
|
||||
if isinstance(layouts[layoutType], basestring):
|
||||
for layoutType in layouts.keys():
|
||||
if isinstance(layouts[layoutType], str):
|
||||
layouts[layoutType] = Table(layouts[layoutType])
|
||||
# Derive "view", "search" and "cell" layouts from the "edit" layout
|
||||
# when relevant.
|
||||
|
@ -566,11 +566,11 @@ class Field:
|
|||
if areDefault and not self.group and \
|
||||
not ((self.type == 'String') and (self.format == self.XHTML)) and \
|
||||
not (self.type == 'Ref'):
|
||||
for layoutType in layouts.iterkeys():
|
||||
for layoutType in layouts.keys():
|
||||
layouts[layoutType].width = ''
|
||||
# Remove letters "r" from the layouts if the field is not required.
|
||||
if not self.required:
|
||||
for layoutType in layouts.iterkeys():
|
||||
for layoutType in layouts.keys():
|
||||
layouts[layoutType].removeElement('r')
|
||||
# Derive some boolean values from the layouts.
|
||||
self.hasLabel = self.hasLayoutElement('l', layouts)
|
||||
|
@ -597,7 +597,7 @@ class Field:
|
|||
def hasLayoutElement(self, element, layouts):
|
||||
'''This method returns True if the given layout p_element can be found
|
||||
at least once among the various p_layouts defined for this field.'''
|
||||
for layout in layouts.itervalues():
|
||||
for layout in layouts.values():
|
||||
if element in layout.layoutString: return True
|
||||
return False
|
||||
|
||||
|
@ -610,7 +610,7 @@ class Field:
|
|||
'''Gets, as a string, the layouts as could have been specified as input
|
||||
value for the Field constructor.'''
|
||||
res = '{'
|
||||
for k, v in self.layouts.iteritems():
|
||||
for k, v in self.layouts.items():
|
||||
res += '"%s":"%s",' % (k, v.layoutString)
|
||||
res += '}'
|
||||
return res
|
||||
|
@ -650,7 +650,7 @@ class Field:
|
|||
if self.isEmptyValue(obj, value):
|
||||
# If there is no value, get the default value if any: return
|
||||
# self.default, of self.default() if it is a method.
|
||||
if callable(self.default):
|
||||
if isinstance(self.default, collections.Callable):
|
||||
try:
|
||||
# Caching a default value can lead to problems. For example,
|
||||
# the process of creating an object from another one, or
|
||||
|
@ -660,7 +660,7 @@ class Field:
|
|||
# but it they depend on values set at (b), and are cached
|
||||
# and indexed, (c) will get the wrong, cached value.
|
||||
return self.callMethod(obj, self.default, cache=False)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# Already logged. Here I do not raise the exception,
|
||||
# because it can be raised as the result of reindexing
|
||||
# the object in situations that are not foreseen by
|
||||
|
@ -723,12 +723,13 @@ class Field:
|
|||
# Start by getting the field value on p_obj
|
||||
res = self.getValue(obj)
|
||||
# Zope catalog does not like unicode strings
|
||||
if isinstance(res, unicode): res = res.encode('utf-8')
|
||||
if isinstance(value, str):
|
||||
res = value.encode('utf-8')
|
||||
if forSearch and (res != None):
|
||||
if type(res) in sutils.sequenceTypes:
|
||||
vals = []
|
||||
for v in res:
|
||||
if isinstance(v, unicode): vals.append(v.encode('utf-8'))
|
||||
if isinstance(v, str): vals.append(v.encode('utf-8'))
|
||||
else: vals.append(str(v))
|
||||
res = ' '.join(vals)
|
||||
else:
|
||||
|
@ -824,7 +825,7 @@ class Field:
|
|||
this field is the slave of another field.'''
|
||||
if not self.master: return ''
|
||||
res = 'slave*%s*' % self.masterName
|
||||
if not callable(self.masterValue):
|
||||
if not isinstance(self.masterValue, collections.Callable):
|
||||
res += '*'.join(self.masterValue)
|
||||
else:
|
||||
res += '+'
|
||||
|
@ -866,7 +867,7 @@ class Field:
|
|||
def securityCheck(self, obj, value):
|
||||
'''This method performs some security checks on the p_value that
|
||||
represents user input.'''
|
||||
if not isinstance(value, basestring): return
|
||||
if not isinstance(value, str): return
|
||||
# Search Javascript code in the value (prevent XSS attacks).
|
||||
if '<script' in value:
|
||||
obj.log('Detected Javascript in user input.', type='error')
|
||||
|
@ -900,14 +901,14 @@ class Field:
|
|||
# It is a custom function: execute it
|
||||
try:
|
||||
validValue = self.validator(obj, value)
|
||||
if isinstance(validValue, basestring) and validValue:
|
||||
if isinstance(validValue, str) and validValue:
|
||||
# Validation failed; and p_validValue contains an error
|
||||
# message.
|
||||
return validValue
|
||||
else:
|
||||
if not validValue:
|
||||
return obj.translate('field_invalid')
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
return str(e)
|
||||
except:
|
||||
return obj.translate('field_invalid')
|
||||
|
@ -931,7 +932,7 @@ class Field:
|
|||
obj = obj.appy()
|
||||
try:
|
||||
return gutils.callMethod(obj, method, cache=cache)
|
||||
except TypeError, te:
|
||||
except TypeError as te:
|
||||
# Try a version of the method that would accept self as an
|
||||
# additional parameter. In this case, we do not try to cache the
|
||||
# value (we do not call gutils.callMethod), because the value may
|
||||
|
@ -939,11 +940,11 @@ class Field:
|
|||
tb = sutils.Traceback.get()
|
||||
try:
|
||||
return method(obj, self)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
obj.log(tb, type='error')
|
||||
# Raise the initial error.
|
||||
raise te
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
obj.log(sutils.Traceback.get(), type='error')
|
||||
raise e
|
||||
|
||||
|
@ -951,7 +952,7 @@ class Field:
|
|||
'''Gets the value of attribue p_name on p_self, which can be a simple
|
||||
value or the result of a method call on p_obj.'''
|
||||
res = getattr(self, name)
|
||||
if not callable(res): return res
|
||||
if not isinstance(res, collections.Callable): return res
|
||||
return self.callMethod(obj, res)
|
||||
|
||||
def process(self, obj):
|
||||
|
|
|
@ -123,7 +123,7 @@ class Boolean(Field):
|
|||
|
||||
def getStorableValue(self, obj, value):
|
||||
if not self.isEmptyValue(obj, value):
|
||||
exec 'res = %s' % value
|
||||
exec('res = %s' % value)
|
||||
return res
|
||||
|
||||
def isTrue(self, obj, dbValue):
|
||||
|
@ -131,7 +131,7 @@ class Boolean(Field):
|
|||
not?'''
|
||||
rq = obj.REQUEST
|
||||
# Get the value we must compare (from request or from database)
|
||||
if rq.has_key(self.name):
|
||||
if self.name in rq:
|
||||
return rq.get(self.name) in ('True', 1, '1')
|
||||
return dbValue
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -1263,20 +1263,20 @@ class Calendar(Field):
|
|||
maxDay = (maxYear, maxDate.month(), maxDate.day())
|
||||
# Browse years
|
||||
years = getattr(obj, self.name)
|
||||
for year in years.keys():
|
||||
for year in list(years.keys()):
|
||||
# Don't take this year into account if outside interval
|
||||
if minDate and (year < minYear): continue
|
||||
if maxDate and (year > maxYear): continue
|
||||
months = years[year]
|
||||
# Browse this year's months
|
||||
for month in months.keys():
|
||||
for month in list(months.keys()):
|
||||
# Don't take this month into account if outside interval
|
||||
thisMonth = (year, month)
|
||||
if minDate and (thisMonth < minMonth): continue
|
||||
if maxDate and (thisMonth > maxMonth): continue
|
||||
days = months[month]
|
||||
# Browse this month's days
|
||||
for day in days.keys():
|
||||
for day in list(days.keys()):
|
||||
# Don't take this day into account if outside interval
|
||||
thisDay = (year, month, day)
|
||||
if minDate and (thisDay < minDay): continue
|
||||
|
|
|
@ -47,7 +47,7 @@ class Computed(Field):
|
|||
# the user interface while m_method computes the value stored in the
|
||||
# catalog.
|
||||
self.formatMethod = formatMethod
|
||||
if isinstance(self.method, basestring):
|
||||
if isinstance(self.method, str):
|
||||
# A legacy macro identifier. Raise an exception
|
||||
raise Exception(self.WRONG_METHOD % self.method)
|
||||
# Does field computation produce plain text or XHTML?
|
||||
|
@ -99,6 +99,6 @@ class Computed(Field):
|
|||
res = self.formatMethod(obj, value)
|
||||
else:
|
||||
res = value
|
||||
if not isinstance(res, basestring): res = str(res)
|
||||
if not isinstance(res, str): res = str(res)
|
||||
return res
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -219,7 +219,7 @@ class Date(Field):
|
|||
|
||||
def getSelectableYears(self):
|
||||
'''Gets the list of years one may select for this field.'''
|
||||
res = range(self.startYear, self.endYear + 1)
|
||||
res = list(range(self.startYear, self.endYear + 1))
|
||||
if self.reverseYears: res.reverse()
|
||||
return res
|
||||
|
||||
|
@ -227,7 +227,7 @@ class Date(Field):
|
|||
DateTime = obj.getProductConfig().DateTime
|
||||
try:
|
||||
value = DateTime(value)
|
||||
except DateTime.DateError, ValueError:
|
||||
except DateTime.DateError as ValueError:
|
||||
return obj.translate('bad_date')
|
||||
|
||||
def getFormattedValue(self, obj, value, layoutType='view',
|
||||
|
@ -278,7 +278,7 @@ class Date(Field):
|
|||
# Get the value we must compare (from request or from database)
|
||||
rq = obj.REQUEST
|
||||
partName = '%s_%s' % (self.name, fieldPart)
|
||||
if rq.has_key(partName):
|
||||
if partName in rq:
|
||||
compValue = rq.get(partName)
|
||||
if compValue.isdigit():
|
||||
compValue = int(compValue)
|
||||
|
|
|
@ -108,7 +108,7 @@ class FileInfo:
|
|||
'''Removes the file from the filesystem.'''
|
||||
try:
|
||||
os.remove(osPathJoin(dbFolder, self.fsPath, self.fsName))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# If the current ZODB transaction is re-triggered, the file may
|
||||
# already have been deleted.
|
||||
pass
|
||||
|
@ -207,10 +207,10 @@ class FileInfo:
|
|||
if fileObj.data.__class__.__name__ == 'Pdata':
|
||||
# The file content is splitted in several chunks
|
||||
f.write(fileObj.data.data)
|
||||
nextPart = fileObj.data.next
|
||||
nextPart = fileObj.data.__next__
|
||||
while nextPart:
|
||||
f.write(nextPart.data)
|
||||
nextPart = nextPart.next
|
||||
nextPart = nextPart.__next__
|
||||
else:
|
||||
# Only one chunk
|
||||
f.write(fileObj.data)
|
||||
|
@ -393,7 +393,7 @@ class File(Field):
|
|||
def validateValue(self, obj, value):
|
||||
form = obj.REQUEST.form
|
||||
action = '%s_delete' % self.name
|
||||
if (not value or not value.filename) and form.has_key(action) and \
|
||||
if (not value or not value.filename) and action in form and \
|
||||
not form[action]:
|
||||
# If this key is present but empty, it means that the user selected
|
||||
# "replace the file with a new one". So in this case he must provide
|
||||
|
@ -450,7 +450,7 @@ class File(Field):
|
|||
# Case c
|
||||
fileInfo = (value.name, value.content, value.mimeType)
|
||||
info.writeFile(self.name, fileInfo, dbFolder)
|
||||
elif isinstance(value, basestring):
|
||||
elif isinstance(value, str):
|
||||
# Case d
|
||||
info.copyFile(self.name, value, dbFolder)
|
||||
elif isinstance(value, FileInfo):
|
||||
|
|
|
@ -40,7 +40,7 @@ class Group:
|
|||
# which will be used for HTML param "width".
|
||||
if wide == True:
|
||||
self.wide = '100%'
|
||||
elif isinstance(wide, basestring):
|
||||
elif isinstance(wide, str):
|
||||
self.wide = wide
|
||||
else:
|
||||
self.wide = ''
|
||||
|
@ -106,7 +106,7 @@ class Group:
|
|||
can be a string or a Group instance; this method returns always a
|
||||
Group instance.'''
|
||||
res = groupData
|
||||
if res and isinstance(res, basestring):
|
||||
if res and isinstance(res, str):
|
||||
# Group data is given as a string. 2 more possibilities:
|
||||
# (a) groupData is simply the name of the group;
|
||||
# (b) groupData is of the form <groupName>_<numberOfColumns>.
|
||||
|
@ -336,7 +336,7 @@ class UiGroup:
|
|||
# All p_group attributes become self attributes. This is required
|
||||
# because a UiGroup, in some PXs, must behave like a Field (ie, have
|
||||
# the same attributes, like "master".
|
||||
for name, value in group.__dict__.iteritems():
|
||||
for name, value in group.__dict__.items():
|
||||
if not name.startswith('_'):
|
||||
setattr(self, name, value)
|
||||
self.group = group
|
||||
|
@ -346,7 +346,7 @@ class UiGroup:
|
|||
labelName = self.name
|
||||
prefix = className
|
||||
if group.label:
|
||||
if isinstance(group.label, basestring): prefix = group.label
|
||||
if isinstance(group.label, str): prefix = group.label
|
||||
else: # It is a tuple (className, name)
|
||||
if group.label[1]: labelName = group.label[1]
|
||||
if group.label[0]: prefix = group.label[0]
|
||||
|
|
|
@ -60,7 +60,7 @@ class Integer(Field):
|
|||
height, maxChars, colspan, master, masterValue, focus,
|
||||
historized, mapping, label, sdefault, scolspan, swidth,
|
||||
sheight, persist, view, xml)
|
||||
self.pythonType = long
|
||||
self.pythonType = int
|
||||
|
||||
def validateValue(self, obj, value):
|
||||
try:
|
||||
|
|
|
@ -132,7 +132,7 @@ class List(Field):
|
|||
name = requestName or self.name # A List may be into another List (?)
|
||||
prefix = name + '*' + self.fields[0][0] + '*'
|
||||
res = {}
|
||||
for key in request.keys():
|
||||
for key in list(request.keys()):
|
||||
if not key.startswith(prefix): continue
|
||||
# I have found a row. Gets its index.
|
||||
row = Object()
|
||||
|
@ -145,7 +145,7 @@ class List(Field):
|
|||
setattr(row, subName, v)
|
||||
res[rowIndex] = row
|
||||
# Produce a sorted list
|
||||
keys = res.keys()
|
||||
keys = list(res.keys())
|
||||
keys.sort()
|
||||
res = [res[key] for key in keys]
|
||||
# I store in the request this computed value. This way, when individual
|
||||
|
|
|
@ -83,13 +83,13 @@ class Ogone(Field):
|
|||
# Create a new dict by removing p_keysToIgnore from p_values, and by
|
||||
# upperizing all keys.
|
||||
shaRes = {}
|
||||
for k, v in values.iteritems():
|
||||
for k, v in values.items():
|
||||
if k in keysToIgnore: continue
|
||||
# Ogone: we must not include empty values.
|
||||
if (v == None) or (v == ''): continue
|
||||
shaRes[k.upper()] = v
|
||||
# Create a sorted list of keys
|
||||
keys = shaRes.keys()
|
||||
keys = list(shaRes.keys())
|
||||
keys.sort()
|
||||
shaList = []
|
||||
for k in keys:
|
||||
|
@ -127,7 +127,7 @@ class Ogone(Field):
|
|||
# Ogone's response.
|
||||
res['paramplus'] = 'name=%s' % self.name
|
||||
# Ensure every value is a str
|
||||
for k in res.iterkeys():
|
||||
for k in res.keys():
|
||||
if not isinstance(res[k], str):
|
||||
res[k] = str(res[k])
|
||||
# Compute a SHA-1 key as required by Ogone and add it to the res
|
||||
|
@ -154,7 +154,7 @@ class Ogone(Field):
|
|||
'administrator has been contacted.')
|
||||
# Create a nice object from the form.
|
||||
response = Object()
|
||||
for k, v in obj.REQUEST.form.iteritems():
|
||||
for k, v in obj.REQUEST.form.items():
|
||||
setattr(response, k, v)
|
||||
# Call the field method that handles the response received from Ogone.
|
||||
url = self.responseMethod(obj.appy(), response)
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
|
||||
# ------------------------------------------------------------------------------
|
||||
from appy import Object
|
||||
import collections
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
class Page:
|
||||
|
@ -53,7 +54,7 @@ class Page:
|
|||
(c) a Page instance.
|
||||
This method returns always a Page instance.'''
|
||||
res = pageData
|
||||
if res and isinstance(res, basestring):
|
||||
if res and isinstance(res, str):
|
||||
# Page data is given as a string.
|
||||
pageElems = pageData.rsplit('_', 1)
|
||||
if len(pageElems) == 1: # We have case (a)
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
# ------------------------------------------------------------------------------
|
||||
import time, os, os.path
|
||||
from file import FileInfo
|
||||
from .file import FileInfo
|
||||
from appy import Object
|
||||
from appy.fields import Field
|
||||
from appy.px import Px
|
||||
|
@ -25,6 +25,7 @@ from appy.gen import utils as gutils
|
|||
from appy.pod import PodError
|
||||
from appy.pod.renderer import Renderer
|
||||
from appy.shared import utils as sutils
|
||||
import collections
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
class Mailing:
|
||||
|
@ -170,7 +171,7 @@ class Pod(Field):
|
|||
# of your app will be referred as "Test.odt" in self.template. If it is
|
||||
# stored within sub-folder "pod", it will be referred as "pod/Test.odt".
|
||||
if not template: raise Exception(Pod.NO_TEMPLATE)
|
||||
if isinstance(template, basestring):
|
||||
if isinstance(template, str):
|
||||
self.template = [template]
|
||||
elif isinstance(template, tuple):
|
||||
self.template = list(template)
|
||||
|
@ -396,7 +397,7 @@ class Pod(Field):
|
|||
formats = self.showTemplate(obj, template)
|
||||
if not formats: continue
|
||||
elif isinstance(formats, bool): formats = self.formats
|
||||
elif isinstance(formats, basestring): formats = (formats,)
|
||||
elif isinstance(formats, str): formats = (formats,)
|
||||
res.append(Object(template=template, formats=formats,
|
||||
freezeFormats=self.getFreezeFormats(obj, template)))
|
||||
return res
|
||||
|
@ -525,7 +526,7 @@ class Pod(Field):
|
|||
templatePath = self.getTemplatePath(diskFolder, template)
|
||||
# Get or compute the specific POD context
|
||||
specificContext = None
|
||||
if callable(self.context):
|
||||
if isinstance(self.context, collections.Callable):
|
||||
specificContext = self.callMethod(obj, self.context)
|
||||
else:
|
||||
specificContext = self.context
|
||||
|
@ -544,7 +545,7 @@ class Pod(Field):
|
|||
# Retrieve query params
|
||||
cmd = ', '.join(Pod.queryParams)
|
||||
cmd += " = queryData.split(';')"
|
||||
exec cmd
|
||||
exec(cmd)
|
||||
# (re-)execute the query, but without any limit on the number of
|
||||
# results; return Appy objects.
|
||||
objs = tool.o.executeQuery(obj.o.portal_type, searchName=search,
|
||||
|
@ -559,7 +560,7 @@ class Pod(Field):
|
|||
# when generating frozen documents).
|
||||
if '_checked' not in podContext: podContext['_checked'] = Object()
|
||||
# Define a potential global styles mapping
|
||||
if callable(self.stylesMapping):
|
||||
if isinstance(self.stylesMapping, collections.Callable):
|
||||
stylesMapping = self.callMethod(obj, self.stylesMapping)
|
||||
else:
|
||||
stylesMapping = self.stylesMapping
|
||||
|
@ -575,7 +576,7 @@ class Pod(Field):
|
|||
try:
|
||||
renderer = Renderer(**rendererParams)
|
||||
renderer.run()
|
||||
except PodError, pe:
|
||||
except PodError as pe:
|
||||
if not os.path.exists(result):
|
||||
# In some (most?) cases, when OO returns an error, the result is
|
||||
# nevertheless generated.
|
||||
|
@ -643,7 +644,7 @@ class Pod(Field):
|
|||
# Generate the document
|
||||
doc = self.getValue(obj, template=template, format=format,
|
||||
result=result)
|
||||
if isinstance(doc, basestring):
|
||||
if isinstance(doc, str):
|
||||
# An error occurred, the document was not generated.
|
||||
obj.log(self.FREEZE_ERROR % (format, self.name, doc),
|
||||
type='error')
|
||||
|
@ -658,7 +659,7 @@ class Pod(Field):
|
|||
obj.log('freeze: overwriting %s...' % result)
|
||||
doc = self.getValue(obj, template=template, format='odt',
|
||||
result=result)
|
||||
if isinstance(doc, basestring):
|
||||
if isinstance(doc, str):
|
||||
self.log(self.FREEZE_ERROR % ('odt', self.name, doc),
|
||||
type='error')
|
||||
raise Exception(self.FREEZE_FATAL_ERROR)
|
||||
|
@ -762,7 +763,7 @@ class Pod(Field):
|
|||
res = self.getValue(obj, template=template, format=format,
|
||||
queryData=rq.get('queryData'),
|
||||
customContext=self.getCustomContext(obj, rq))
|
||||
if isinstance(res, basestring):
|
||||
if isinstance(res, str):
|
||||
# An error has occurred, and p_res contains the error message
|
||||
obj.say(res)
|
||||
return tool.goto(rq.get('HTTP_REFERER'))
|
||||
|
|
|
@ -23,6 +23,7 @@ from appy.px import Px
|
|||
from appy.gen.layout import Table
|
||||
from appy.gen import utils as gutils
|
||||
from appy.shared import utils as sutils
|
||||
import collections
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
class Ref(Field):
|
||||
|
@ -826,7 +827,7 @@ class Ref(Field):
|
|||
# If this field is an ajax-updatable slave, no need to compute
|
||||
# possible values: it will be overridden by method self.masterValue
|
||||
# by a subsequent ajax request (=the "if" statement above).
|
||||
if self.masterValue and callable(self.masterValue):
|
||||
if self.masterValue and isinstance(self.masterValue, collections.Callable):
|
||||
objects = []
|
||||
else:
|
||||
if not self.select:
|
||||
|
@ -921,7 +922,7 @@ class Ref(Field):
|
|||
res = self.getAttribute(obj, 'numbered')
|
||||
if not res: return res
|
||||
# Returns the column width.
|
||||
if not isinstance(res, basestring): return '15px'
|
||||
if not isinstance(res, str): return '15px'
|
||||
return res
|
||||
|
||||
def getMenuUrl(self, zobj, tied):
|
||||
|
@ -996,14 +997,14 @@ class Ref(Field):
|
|||
# Also ensure that multiplicities are enforced.
|
||||
if not value:
|
||||
nbOfRefs = 0
|
||||
elif isinstance(value, basestring):
|
||||
elif isinstance(value, str):
|
||||
nbOfRefs = 1
|
||||
else:
|
||||
nbOfRefs = len(value)
|
||||
minRef = self.multiplicity[0]
|
||||
maxRef = self.multiplicity[1]
|
||||
if maxRef == None:
|
||||
maxRef = sys.maxint
|
||||
maxRef = sys.maxsize
|
||||
if nbOfRefs < minRef:
|
||||
return obj.translate('min_ref_violated')
|
||||
elif nbOfRefs > maxRef:
|
||||
|
@ -1119,7 +1120,7 @@ class Ref(Field):
|
|||
if type(objects) not in sutils.sequenceTypes: objects = [objects]
|
||||
tool = obj.getTool()
|
||||
for i in range(len(objects)):
|
||||
if isinstance(objects[i], basestring):
|
||||
if isinstance(objects[i], str):
|
||||
# We have an UID here
|
||||
objects[i] = tool.getObject(objects[i], appy=True)
|
||||
else:
|
||||
|
|
|
@ -19,7 +19,7 @@ from appy.px import Px
|
|||
from appy.gen import utils as gutils
|
||||
from appy.gen.indexer import defaultIndexes
|
||||
from appy.shared import utils as sutils
|
||||
from group import Group
|
||||
from .group import Group
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
class Search:
|
||||
|
@ -85,12 +85,12 @@ class Search:
|
|||
(fieldName == 'SearchableText'):
|
||||
# For TextIndex indexes. We must split p_fieldValue into keywords.
|
||||
res = gutils.Keywords(fieldValue).get()
|
||||
elif isinstance(fieldValue, basestring) and fieldValue.endswith('*'):
|
||||
elif isinstance(fieldValue, str) and fieldValue.endswith('*'):
|
||||
v = fieldValue[:-1]
|
||||
# Warning: 'z' is higher than 'Z'!
|
||||
res = {'query':(v,v+'z'), 'range':'min:max'}
|
||||
elif type(fieldValue) in sutils.sequenceTypes:
|
||||
if fieldValue and isinstance(fieldValue[0], basestring):
|
||||
if fieldValue and isinstance(fieldValue[0], str):
|
||||
# We have a list of string values (ie: we need to
|
||||
# search v1 or v2 or...)
|
||||
res = fieldValue
|
||||
|
@ -118,7 +118,7 @@ class Search:
|
|||
sortBy and sortOrder (and not "resolve" them to Zope's sort_on and
|
||||
sort_order).'''
|
||||
# Put search criteria in p_criteria
|
||||
for name, value in self.fields.iteritems():
|
||||
for name, value in self.fields.items():
|
||||
# Management of searches restricted to objects linked through a
|
||||
# Ref field: not implemented yet.
|
||||
if name == '_ref': continue
|
||||
|
|
|
@ -25,6 +25,7 @@ from appy.shared.data import countries
|
|||
from appy.shared.xml_parser import XhtmlCleaner
|
||||
from appy.shared.diff import HtmlDiff
|
||||
from appy.shared import utils as sutils
|
||||
import collections
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
digit = re.compile('[0-9]')
|
||||
|
@ -471,7 +472,7 @@ class String(Field):
|
|||
res = True
|
||||
if type(self.validator) in (list, tuple):
|
||||
for elem in self.validator:
|
||||
if not isinstance(elem, basestring):
|
||||
if not isinstance(elem, str):
|
||||
res = False
|
||||
break
|
||||
else:
|
||||
|
@ -533,7 +534,7 @@ class String(Field):
|
|||
if not value:
|
||||
if self.isMultiValued(): return emptyTuple
|
||||
else: return value
|
||||
if isinstance(value, basestring) and self.isMultiValued():
|
||||
if isinstance(value, str) and self.isMultiValued():
|
||||
value = [value]
|
||||
elif isinstance(value, tuple):
|
||||
value = list(value)
|
||||
|
@ -602,8 +603,7 @@ class String(Field):
|
|||
identifies the language-specific part we will work on.'''
|
||||
res = None
|
||||
lastEvent = None
|
||||
name = language and ('%s-%s' % (self.name, language)) or self.name
|
||||
for event in obj.workflow_history['appy']:
|
||||
for event in obj.workflow_history.values()[0]:
|
||||
if event['action'] != '_datachange_': continue
|
||||
if name not in event['changes']: continue
|
||||
if res == None:
|
||||
|
@ -660,7 +660,7 @@ class String(Field):
|
|||
res = obj.formatText(res, format='html')
|
||||
# If value starts with a carriage return, add a space; else, it will
|
||||
# be ignored.
|
||||
if isinstance(res, basestring) and \
|
||||
if isinstance(res, str) and \
|
||||
(res.startswith('\n') or res.startswith('\r\n')): res = ' ' + res
|
||||
return res
|
||||
|
||||
|
@ -774,7 +774,7 @@ class String(Field):
|
|||
# If this field is an ajax-updatable slave, no need to compute
|
||||
# possible values: it will be overridden by method self.masterValue
|
||||
# by a subsequent ajax request (=the "if" statement above).
|
||||
if self.masterValue and callable(self.masterValue) and \
|
||||
if self.masterValue and isinstance(self.masterValue, collections.Callable) and \
|
||||
not ignoreMasterValues: return []
|
||||
if isinstance(self.validator, Selection):
|
||||
# We need to call self.methodName for getting the (dynamic)
|
||||
|
@ -808,9 +808,9 @@ class String(Field):
|
|||
obj = brains[0].getObject()
|
||||
# Do we need to call the method on the object or on the wrapper?
|
||||
if methodName.startswith('_appy_'):
|
||||
exec 'res = obj.%s(*args)' % methodName
|
||||
exec('res = obj.%s(*args)' % methodName)
|
||||
else:
|
||||
exec 'res = obj.appy().%s(*args)' % methodName
|
||||
exec('res = obj.appy().%s(*args)' % methodName)
|
||||
if not withTranslations: res = [v[0] for v in res]
|
||||
elif isinstance(res, list): res = res[:]
|
||||
else:
|
||||
|
@ -847,7 +847,7 @@ class String(Field):
|
|||
elif self.isSelect:
|
||||
# Check that the value is among possible values
|
||||
possibleValues = self.getPossibleValues(obj,ignoreMasterValues=True)
|
||||
if isinstance(value, basestring):
|
||||
if isinstance(value, str):
|
||||
error = value not in possibleValues
|
||||
else:
|
||||
error = False
|
||||
|
@ -872,7 +872,7 @@ class String(Field):
|
|||
return value
|
||||
|
||||
def getUnilingualStorableValue(self, obj, value):
|
||||
isString = isinstance(value, basestring)
|
||||
isString = isinstance(value, str)
|
||||
isEmpty = Field.isEmptyValue(self, obj, value)
|
||||
# Apply transform if required
|
||||
if isString and not isEmpty and (self.transform != 'none'):
|
||||
|
@ -1021,7 +1021,7 @@ class String(Field):
|
|||
if self.allowImageUpload:
|
||||
ckAttrs['filebrowserUploadUrl'] = '%s/upload' % obj.absolute_url()
|
||||
ck = []
|
||||
for k, v in ckAttrs.iteritems():
|
||||
for k, v in ckAttrs.items():
|
||||
if isinstance(v, int): sv = str(v)
|
||||
if isinstance(v, bool): sv = str(v).lower()
|
||||
else: sv = '"%s"' % v
|
||||
|
@ -1059,7 +1059,7 @@ class String(Field):
|
|||
name containing a row number from a field within a list field.'''
|
||||
rq = obj.REQUEST
|
||||
# Get the value we must compare (from request or from database)
|
||||
if rq.has_key(fieldName):
|
||||
if fieldName in rq:
|
||||
compValue = rq.get(fieldName)
|
||||
else:
|
||||
compValue = dbValue
|
||||
|
|
|
@ -14,10 +14,12 @@
|
|||
# You should have received a copy of the GNU General Public License along with
|
||||
# Appy. If not, see <http://www.gnu.org/licenses/>.
|
||||
# ------------------------------------------------------------------------------
|
||||
import types, string
|
||||
from group import Group
|
||||
import types
|
||||
import .string
|
||||
from .group import Group
|
||||
from appy.px import Px
|
||||
from appy.gen.utils import User
|
||||
import collections
|
||||
|
||||
# Default Appy permissions -----------------------------------------------------
|
||||
r, w, d = ('read', 'write', 'delete')
|
||||
|
@ -73,7 +75,7 @@ class State:
|
|||
exists, or creates a Role instance, puts it in self.usedRoles and
|
||||
returns it else. If it is a Role instance, the method stores it in
|
||||
self.usedRoles if it is not in it yet and returns it.'''
|
||||
if isinstance(role, basestring):
|
||||
if isinstance(role, str):
|
||||
if role in self.usedRoles:
|
||||
return self.usedRoles[role]
|
||||
else:
|
||||
|
@ -88,24 +90,24 @@ class State:
|
|||
def standardizeRoles(self):
|
||||
'''This method converts, within self.permissions, every role to a
|
||||
Role instance. Every used role is stored in self.usedRoles.'''
|
||||
for permission, roles in self.permissions.iteritems():
|
||||
for permission, roles in self.permissions.items():
|
||||
if not roles: continue # Nobody may have this permission
|
||||
if isinstance(roles, basestring) or isinstance(roles, Role):
|
||||
if isinstance(roles, str) or isinstance(roles, Role):
|
||||
self.permissions[permission] = [self.getRole(roles)]
|
||||
elif isinstance(roles, list):
|
||||
for i in range(len(roles)): roles[i] = self.getRole(roles[i])
|
||||
else: # A tuple
|
||||
self.permissions[permission] = [self.getRole(r) for r in roles]
|
||||
|
||||
def getUsedRoles(self): return self.usedRoles.values()
|
||||
def getUsedRoles(self): return list(self.usedRoles.values())
|
||||
|
||||
def addRoles(self, roleNames, permissions=()):
|
||||
'''Adds p_roleNames in self.permissions. If p_permissions is specified,
|
||||
roles are added to those permissions only. Else, roles are added for
|
||||
every permission within self.permissions.'''
|
||||
if isinstance(roleNames, basestring): roleNames = (roleNames,)
|
||||
if isinstance(permissions, basestring): permissions = (permissions,)
|
||||
for perm, roles in self.permissions.iteritems():
|
||||
if isinstance(roleNames, str): roleNames = (roleNames,)
|
||||
if isinstance(permissions, str): permissions = (permissions,)
|
||||
for perm, roles in self.permissions.items():
|
||||
if permissions and (perm not in permissions): continue
|
||||
for roleName in roleNames:
|
||||
# Do nothing if p_roleName is already almong roles.
|
||||
|
@ -124,9 +126,9 @@ class State:
|
|||
'''Removes p_roleNames within dict self.permissions. If p_permissions is
|
||||
specified, removal is restricted to those permissions. Else, removal
|
||||
occurs throughout the whole dict self.permissions.'''
|
||||
if isinstance(roleNames, basestring): roleNames = (roleNames,)
|
||||
if isinstance(permissions, basestring): permissions = (permissions,)
|
||||
for perm, roles in self.permissions.iteritems():
|
||||
if isinstance(roleNames, str): roleNames = (roleNames,)
|
||||
if isinstance(permissions, str): permissions = (permissions,)
|
||||
for perm, roles in self.permissions.items():
|
||||
if permissions and (perm not in permissions): continue
|
||||
for roleName in roleNames:
|
||||
# Remove this role if present in roles for this permission.
|
||||
|
@ -138,9 +140,9 @@ class State:
|
|||
def setRoles(self, roleNames, permissions=()):
|
||||
'''Sets p_rolesNames for p_permissions if not empty, for every
|
||||
permission in self.permissions else.'''
|
||||
if isinstance(roleNames, basestring): roleNames = (roleNames,)
|
||||
if isinstance(permissions, basestring): permissions = (permissions,)
|
||||
for perm in self.permissions.iterkeys():
|
||||
if isinstance(roleNames, str): roleNames = (roleNames,)
|
||||
if isinstance(permissions, str): permissions = (permissions,)
|
||||
for perm in self.permissions.keys():
|
||||
if permissions and (perm not in permissions): continue
|
||||
roles = self.permissions[perm] = []
|
||||
for roleName in roleNames:
|
||||
|
@ -150,8 +152,8 @@ class State:
|
|||
'''Replaces p_oldRoleName by p_newRoleName. If p_permissions is
|
||||
specified, the replacement is restricted to those permissions. Else,
|
||||
replacements apply to the whole dict self.permissions.'''
|
||||
if isinstance(permissions, basestring): permissions = (permissions,)
|
||||
for perm, roles in self.permissions.iteritems():
|
||||
if isinstance(permissions, str): permissions = (permissions,)
|
||||
for perm, roles in self.permissions.items():
|
||||
if permissions and (perm not in permissions): continue
|
||||
# Find and delete p_oldRoleName
|
||||
for role in roles:
|
||||
|
@ -170,7 +172,7 @@ class State:
|
|||
worklflow, this method will always return True (I mean: in this case,
|
||||
having an isolated state does not mean the state has been
|
||||
deactivated).'''
|
||||
for tr in wf.__dict__.itervalues():
|
||||
for tr in wf.__dict__.values():
|
||||
if not isinstance(tr, Transition): continue
|
||||
if not tr.hasState(self, True): continue
|
||||
# Transition "tr" has this state as start state. If the end state is
|
||||
|
@ -200,7 +202,7 @@ class Transition:
|
|||
# be useful for "undo" transitions, for example.
|
||||
self.states = self.standardiseStates(states)
|
||||
self.condition = condition
|
||||
if isinstance(condition, basestring):
|
||||
if isinstance(condition, str):
|
||||
# The condition specifies the name of a role.
|
||||
self.condition = Role(condition)
|
||||
self.action = action
|
||||
|
@ -276,7 +278,7 @@ class Transition:
|
|||
|
||||
def isShowable(self, workflow, obj):
|
||||
'''Is this transition showable?'''
|
||||
if callable(self.show):
|
||||
if isinstance(self.show, collections.Callable):
|
||||
return self.show(workflow, obj.appy())
|
||||
else:
|
||||
return self.show
|
||||
|
@ -330,7 +332,7 @@ class Transition:
|
|||
for condition in self.condition:
|
||||
# "Unwrap" role names from Role instances
|
||||
if isinstance(condition, Role): condition = condition.name
|
||||
if isinstance(condition, basestring): # It is a role
|
||||
if isinstance(condition, str): # It is a role
|
||||
if hasRole == None:
|
||||
hasRole = False
|
||||
if user.has_role(condition, obj):
|
||||
|
@ -450,7 +452,7 @@ class Transition:
|
|||
transition = getattr(workflow, transition)
|
||||
# Browse all transitions and find the one starting at p_transition's end
|
||||
# state and coming back to p_transition's start state.
|
||||
for trName, tr in workflow.__dict__.iteritems():
|
||||
for trName, tr in workflow.__dict__.items():
|
||||
if not isinstance(tr, Transition) or (tr == transition): continue
|
||||
if transition.isSingle():
|
||||
if tr.hasState(transition.states[1], True) and \
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
import types, copy
|
||||
import appy.gen as gen
|
||||
import po
|
||||
from model import ModelClass
|
||||
from utils import produceNiceMessage, getClassName
|
||||
from . import po
|
||||
from .model import ModelClass
|
||||
from .utils import produceNiceMessage, getClassName
|
||||
TABS = 4 # Number of blanks in a Python indentation.
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -130,7 +130,7 @@ class ClassDescriptor(Descriptor):
|
|||
def isAbstract(self):
|
||||
'''Is self.klass abstract?'''
|
||||
res = False
|
||||
if self.klass.__dict__.has_key('abstract'):
|
||||
if 'abstract' in self.klass.__dict__:
|
||||
res = self.klass.__dict__['abstract']
|
||||
return res
|
||||
|
||||
|
@ -139,7 +139,7 @@ class ClassDescriptor(Descriptor):
|
|||
concept into the application. For example, creating instances
|
||||
of such classes will be easy from the user interface.'''
|
||||
res = False
|
||||
if self.klass.__dict__.has_key('root'):
|
||||
if 'root' in self.klass.__dict__:
|
||||
res = self.klass.__dict__['root']
|
||||
return res
|
||||
|
||||
|
@ -150,7 +150,7 @@ class ClassDescriptor(Descriptor):
|
|||
theClass = self.klass
|
||||
if klass:
|
||||
theClass = klass
|
||||
if theClass.__dict__.has_key('folder'):
|
||||
if 'folder' in theClass.__dict__:
|
||||
res = theClass.__dict__['folder']
|
||||
else:
|
||||
if theClass.__bases__:
|
||||
|
@ -176,14 +176,14 @@ class ClassDescriptor(Descriptor):
|
|||
def getCreateMean(self, type='Import'):
|
||||
'''Returns the mean for this class that corresponds to p_type, or
|
||||
None if the class does not support this create mean.'''
|
||||
if not self.klass.__dict__.has_key('create'): return
|
||||
if 'create' not in self.klass.__dict__: return
|
||||
else:
|
||||
means = self.klass.create
|
||||
if not means: return
|
||||
if not isinstance(means, tuple) and not isinstance(means, list):
|
||||
means = [means]
|
||||
for mean in means:
|
||||
exec 'found = isinstance(mean, %s)' % type
|
||||
exec('found = isinstance(mean, %s)' % type)
|
||||
if found: return mean
|
||||
|
||||
@staticmethod
|
||||
|
@ -192,7 +192,7 @@ class ClassDescriptor(Descriptor):
|
|||
p_tool is given, we are at execution time (not a generation time),
|
||||
and we may potentially execute search.show methods that allow to
|
||||
conditionnaly include a search or not.'''
|
||||
if klass.__dict__.has_key('search'):
|
||||
if 'search' in klass.__dict__:
|
||||
searches = klass.__dict__['search']
|
||||
if not tool: return searches
|
||||
# Evaluate attributes "show" for every search.
|
||||
|
@ -229,10 +229,10 @@ class ClassDescriptor(Descriptor):
|
|||
|
||||
def addField(self, fieldName, fieldType):
|
||||
'''Adds a new field to the Tool.'''
|
||||
exec "self.modelClass.%s = fieldType" % fieldName
|
||||
exec("self.modelClass.%s = fieldType" % fieldName)
|
||||
if fieldName in self.modelClass._appy_attributes:
|
||||
print('Warning, field "%s" is already existing on class "%s"' % \
|
||||
(fieldName, self.modelClass.__name__))
|
||||
print(('Warning, field "%s" is already existing on class "%s"' % \
|
||||
(fieldName, self.modelClass.__name__)))
|
||||
return
|
||||
self.modelClass._appy_attributes.append(fieldName)
|
||||
self.orderedAttributes.append(fieldName)
|
||||
|
@ -488,9 +488,9 @@ class TranslationClassDescriptor(ClassDescriptor):
|
|||
maxLine = 100 # We suppose a line is 100 characters long.
|
||||
width = 0
|
||||
height = 0
|
||||
for fileName, poFile in i18nFiles.iteritems():
|
||||
for fileName, poFile in i18nFiles.items():
|
||||
if not fileName.startswith('%s-' % appName) or \
|
||||
not i18nFiles[fileName].messagesDict.has_key(messageId):
|
||||
messageId not in i18nFiles[fileName].messagesDict:
|
||||
# In this case this is not one of our Appy-managed translation
|
||||
# files.
|
||||
continue
|
||||
|
|
|
@ -3,10 +3,11 @@ import os, os.path, re, sys, parser, symbol, token, types
|
|||
import appy, appy.pod.renderer
|
||||
from appy.shared.utils import FolderDeleter
|
||||
import appy.gen as gen
|
||||
import po
|
||||
from descriptors import *
|
||||
from utils import getClassName
|
||||
from model import ModelClass, User, Group, Tool, Translation, Page
|
||||
from . import po
|
||||
from .descriptors import *
|
||||
from .utils import getClassName
|
||||
from .model import ModelClass, User, Group, Tool, Translation, Page
|
||||
import collections
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
class GeneratorError(Exception): pass
|
||||
|
@ -160,7 +161,7 @@ class Generator:
|
|||
appy.fields.Field, it will be considered a gen-class. If p_klass
|
||||
declares at least one static attribute that is a appy.gen.State,
|
||||
it will be considered a gen-workflow.'''
|
||||
for attr in klass.__dict__.itervalues():
|
||||
for attr in klass.__dict__.values():
|
||||
if isinstance(attr, gen.Field): return 'class'
|
||||
elif isinstance(attr, gen.State): return 'workflow'
|
||||
|
||||
|
@ -173,11 +174,11 @@ class Generator:
|
|||
self.totalNumberOfTests += 1
|
||||
res = True
|
||||
# Count also docstring in methods
|
||||
if type(moduleOrClass) == types.ClassType:
|
||||
for name, elem in moduleOrClass.__dict__.iteritems():
|
||||
if type(moduleOrClass) == type:
|
||||
for name, elem in moduleOrClass.__dict__.items():
|
||||
if type(elem) in (staticmethod, classmethod):
|
||||
elem = elem.__get__(name)
|
||||
if callable(elem) and (type(elem) != types.ClassType) and \
|
||||
if isinstance(elem, collections.Callable) and (type(elem) != type) and \
|
||||
hasattr(elem, '__doc__') and elem.__doc__ and \
|
||||
(elem.__doc__.find('>>>') != -1):
|
||||
res = True
|
||||
|
@ -198,8 +199,8 @@ class Generator:
|
|||
self.modulesWithTests.add(module.__name__)
|
||||
classType = type(Generator)
|
||||
# Find all classes in this module
|
||||
for name in module.__dict__.keys():
|
||||
exec 'moduleElem = module.%s' % name
|
||||
for name in list(module.__dict__.keys()):
|
||||
exec('moduleElem = module.%s' % name)
|
||||
# Ignore non-classes module elements or classes that were imported
|
||||
# from other modules.
|
||||
if (type(moduleElem) != classType) or \
|
||||
|
@ -213,7 +214,7 @@ class Generator:
|
|||
# Collect non-parsable attrs = back references added
|
||||
# programmatically
|
||||
moreAttrs = []
|
||||
for eName, eValue in moduleElem.__dict__.iteritems():
|
||||
for eName, eValue in moduleElem.__dict__.items():
|
||||
if isinstance(eValue, gen.Field) and (eName not in attrs):
|
||||
moreAttrs.append(eName)
|
||||
# Sort them in alphabetical order: else, order would be random
|
||||
|
@ -257,7 +258,7 @@ class Generator:
|
|||
# What is the name of the application ?
|
||||
appName = os.path.basename(self.application)
|
||||
# Get the app-specific config if any
|
||||
exec 'import %s as appModule' % appName
|
||||
exec('import %s as appModule' % appName)
|
||||
if hasattr (appModule, 'Config'):
|
||||
self.config = appModule.Config
|
||||
if not issubclass(self.config, gen.Config):
|
||||
|
@ -273,7 +274,7 @@ class Generator:
|
|||
# Ignore non Python files
|
||||
if not fileName.endswith('.py'): continue
|
||||
moduleName = '%s.%s' % (appName, os.path.splitext(fileName)[0])
|
||||
exec 'import %s' % moduleName
|
||||
exec('import %s' % moduleName)
|
||||
modules.append(eval(moduleName))
|
||||
# Parse imported modules
|
||||
for module in modules:
|
||||
|
@ -321,7 +322,7 @@ class Generator:
|
|||
fileContent = f.read()
|
||||
f.close()
|
||||
if not fileName.endswith('.png'):
|
||||
for rKey, rValue in replacements.iteritems():
|
||||
for rKey, rValue in replacements.items():
|
||||
fileContent = fileContent.replace(
|
||||
'<!%s!>' % rKey, str(rValue))
|
||||
f = file(resultPath, 'w')
|
||||
|
@ -343,7 +344,7 @@ class Generator:
|
|||
msg = ''
|
||||
if self.totalNumberOfTests:
|
||||
msg = ' (number of tests found: %d)' % self.totalNumberOfTests
|
||||
print('Done%s.' % msg)
|
||||
print(('Done%s.' % msg))
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
class ZopeGenerator(Generator):
|
||||
|
@ -413,7 +414,7 @@ class ZopeGenerator(Generator):
|
|||
f.close()
|
||||
# Generate i18n pot file
|
||||
potFileName = '%s.pot' % self.applicationName
|
||||
if self.i18nFiles.has_key(potFileName):
|
||||
if potFileName in self.i18nFiles:
|
||||
potFile = self.i18nFiles[potFileName]
|
||||
else:
|
||||
fullName = os.path.join(self.application, 'tr', potFileName)
|
||||
|
@ -427,14 +428,14 @@ class ZopeGenerator(Generator):
|
|||
self.options.i18nClean, keepExistingOrder=False)
|
||||
potFile.generate()
|
||||
if removedLabels:
|
||||
print('Warning: %d messages were removed from translation ' \
|
||||
'files: %s' % (len(removedLabels), str(removedLabels)))
|
||||
print(('Warning: %d messages were removed from translation ' \
|
||||
'files: %s' % (len(removedLabels), str(removedLabels))))
|
||||
# Generate i18n po files
|
||||
for language in self.config.languages:
|
||||
# I must generate (or update) a po file for the language(s)
|
||||
# specified in the configuration.
|
||||
poFileName = potFile.getPoFileName(language)
|
||||
if self.i18nFiles.has_key(poFileName):
|
||||
if poFileName in self.i18nFiles:
|
||||
poFile = self.i18nFiles[poFileName]
|
||||
else:
|
||||
fullName = os.path.join(self.application, 'tr', poFileName)
|
||||
|
@ -501,7 +502,7 @@ class ZopeGenerator(Generator):
|
|||
for role in creators:
|
||||
if role.name not in allRoles:
|
||||
allRoles[role.name] = role
|
||||
res = allRoles.values()
|
||||
res = list(allRoles.values())
|
||||
# Filter the result according to parameters
|
||||
for p in ('appy', 'local', 'grantable'):
|
||||
if eval(p) != None:
|
||||
|
@ -621,12 +622,12 @@ class ZopeGenerator(Generator):
|
|||
else:
|
||||
# If a child of this class is already present, we must insert
|
||||
# this klass before it.
|
||||
lowestChildIndex = sys.maxint
|
||||
lowestChildIndex = sys.maxsize
|
||||
for resClass in resClasses:
|
||||
if klass in resClass.__bases__:
|
||||
lowestChildIndex = min(lowestChildIndex,
|
||||
resClasses.index(resClass))
|
||||
if lowestChildIndex != sys.maxint:
|
||||
if lowestChildIndex != sys.maxsize:
|
||||
res.insert(lowestChildIndex, classDescr)
|
||||
resClasses.insert(lowestChildIndex, klass)
|
||||
else:
|
||||
|
@ -745,7 +746,7 @@ class ZopeGenerator(Generator):
|
|||
'''Is called each time an Appy class is found in the application, for
|
||||
generating the corresponding Archetype class.'''
|
||||
k = classDescr.klass
|
||||
print('Generating %s.%s (gen-class)...' % (k.__module__, k.__name__))
|
||||
print(('Generating %s.%s (gen-class)...' % (k.__module__, k.__name__)))
|
||||
# Determine base Zope class
|
||||
isFolder = classDescr.isFolder()
|
||||
baseClass = isFolder and 'Folder' or 'SimpleItem'
|
||||
|
@ -772,7 +773,7 @@ class ZopeGenerator(Generator):
|
|||
'''This method creates the i18n labels related to the workflow described
|
||||
by p_wfDescr.'''
|
||||
k = wfDescr.klass
|
||||
print('Generating %s.%s (gen-workflow)...' % (k.__module__, k.__name__))
|
||||
print(('Generating %s.%s (gen-workflow)...' % (k.__module__, k.__name__)))
|
||||
# Identify workflow name
|
||||
wfName = WorkflowDescriptor.getWorkflowName(wfDescr.klass)
|
||||
# Add i18n messages for states
|
||||
|
|
|
@ -33,7 +33,7 @@ def updateIndexes(installer, indexInfo):
|
|||
'''This function updates the indexes defined in the catalog.'''
|
||||
catalog = installer.app.catalog
|
||||
logger = installer.logger
|
||||
for indexName, indexType in indexInfo.iteritems():
|
||||
for indexName, indexType in indexInfo.items():
|
||||
indexRealType = indexType
|
||||
if indexType in ('XhtmlIndex', 'TextIndex', 'ListIndex'):
|
||||
indexRealType = 'ZCTextIndex'
|
||||
|
|
|
@ -43,7 +43,7 @@ class FakeZCatalog:
|
|||
def onDelSession(sessionObject, container):
|
||||
'''This function is called when a session expires.'''
|
||||
rq = container.REQUEST
|
||||
if rq.cookies.has_key('_appy_') and rq.cookies.has_key('_ZopeId') and \
|
||||
if '_appy_' in rq.cookies and '_ZopeId' in rq.cookies and \
|
||||
(rq['_ZopeId'] == sessionObject.token):
|
||||
# The request comes from a guy whose session has expired.
|
||||
resp = rq.RESPONSE
|
||||
|
@ -155,7 +155,7 @@ class ZopeInstaller:
|
|||
# Create or update Appy-wide indexes and field-related indexes
|
||||
indexInfo = defaultIndexes.copy()
|
||||
tool = self.app.config
|
||||
for className in self.config.attributes.iterkeys():
|
||||
for className in self.config.attributes.keys():
|
||||
wrapperClass = tool.getAppyClass(className, wrapper=True)
|
||||
indexInfo.update(wrapperClass.getIndexes(includeDefaults=False))
|
||||
updateIndexes(self, indexInfo)
|
||||
|
@ -196,7 +196,7 @@ class ZopeInstaller:
|
|||
if hasattr(appyTool, 'beforeInstall'): appyTool.beforeInstall()
|
||||
|
||||
# Create the default users if they do not exist.
|
||||
for login, roles in self.defaultUsers.iteritems():
|
||||
for login, roles in self.defaultUsers.items():
|
||||
if not appyTool.count('User', noSecurity=True, login=login):
|
||||
appyTool.create('users', noSecurity=True, id=login, login=login,
|
||||
password3=login, password4=login,
|
||||
|
@ -277,7 +277,7 @@ class ZopeInstaller:
|
|||
name = klass.__name__
|
||||
module = klass.__module__
|
||||
wrapper = klass.wrapperClass
|
||||
exec 'from %s import manage_add%s as ctor' % (module, name)
|
||||
exec('from %s import manage_add%s as ctor' % (module, name))
|
||||
self.zopeContext.registerClass(meta_type=name,
|
||||
constructors = (ctor,), permission = None)
|
||||
# Create workflow prototypical instances in __instance__ attributes
|
||||
|
@ -316,7 +316,7 @@ class ZopeInstaller:
|
|||
# Post-initialise every Appy type
|
||||
for baseClass in klass.wrapperClass.__bases__:
|
||||
if baseClass.__name__ == 'AbstractWrapper': continue
|
||||
for name, appyType in baseClass.__dict__.iteritems():
|
||||
for name, appyType in baseClass.__dict__.items():
|
||||
if not isinstance(appyType, gen.Field) or \
|
||||
(isinstance(appyType, gen.Ref) and appyType.isBack):
|
||||
continue # Back refs are initialised within fw refs
|
||||
|
|
|
@ -39,9 +39,9 @@
|
|||
|
||||
# ------------------------------------------------------------------------------
|
||||
rowDelimiters = {'-':'middle', '=':'top', '_':'bottom'}
|
||||
rowDelms = ''.join(rowDelimiters.keys())
|
||||
rowDelms = ''.join(list(rowDelimiters.keys()))
|
||||
cellDelimiters = {'|': 'center', ';': 'left', '!': 'right'}
|
||||
cellDelms = ''.join(cellDelimiters.keys())
|
||||
cellDelms = ''.join(list(cellDelimiters.keys()))
|
||||
|
||||
pxDict = {
|
||||
# Page-related elements
|
||||
|
@ -138,7 +138,7 @@ class Table:
|
|||
# Initialise simple params, either from the true params, either from
|
||||
# the p_other Table instance.
|
||||
for param in Table.simpleParams:
|
||||
exec 'self.%s = %s%s' % (param, source, param)
|
||||
exec('self.%s = %s%s' % (param, source, param))
|
||||
# The following attribute will store a special Row instance used for
|
||||
# defining column properties.
|
||||
self.headerRow = None
|
||||
|
|
|
@ -120,10 +120,10 @@ def sendMail(config, to, subject, body, attachments=None, log=None):
|
|||
if res and log:
|
||||
log('could not send mail to some recipients. %s' % str(res),
|
||||
type='warning')
|
||||
except smtplib.SMTPException, e:
|
||||
except smtplib.SMTPException as e:
|
||||
if log:
|
||||
log('%s: mail sending failed (%s)' % (config, str(e)), type='error')
|
||||
except socket.error, se:
|
||||
except socket.error as se:
|
||||
if log:
|
||||
log('%s: mail sending failed (%s)' % (config, str(se)),type='error')
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -38,10 +38,10 @@ class Migrator:
|
|||
if frozen.data.__class__.__name__ == 'Pdata':
|
||||
# The file content is splitted in several chunks.
|
||||
f.write(frozen.data.data)
|
||||
nextPart = frozen.data.next
|
||||
nextPart = frozen.data.__next__
|
||||
while nextPart:
|
||||
f.write(nextPart.data)
|
||||
nextPart = nextPart.next
|
||||
nextPart = nextPart.__next__
|
||||
else:
|
||||
# Only one chunk
|
||||
f.write(frozen.data)
|
||||
|
|
|
@ -13,18 +13,18 @@ class TestMixin:
|
|||
'''Returns the list of sub-modules of p_app that are non-empty.'''
|
||||
res = []
|
||||
try:
|
||||
exec 'import %s' % moduleName
|
||||
exec 'moduleObj = %s' % moduleName
|
||||
exec('import %s' % moduleName)
|
||||
exec('moduleObj = %s' % moduleName)
|
||||
moduleFile = moduleObj.__file__
|
||||
if moduleFile.endswith('.pyc'):
|
||||
moduleFile = moduleFile[:-1]
|
||||
except ImportError, ie:
|
||||
except ImportError as ie:
|
||||
return res
|
||||
except SyntaxError, se:
|
||||
except SyntaxError as se:
|
||||
return res
|
||||
# Include the module if not empty. "Emptyness" is determined by the
|
||||
# absence of names beginning with other chars than "__".
|
||||
for elem in moduleObj.__dict__.iterkeys():
|
||||
for elem in moduleObj.__dict__.keys():
|
||||
if not elem.startswith('__'):
|
||||
res.append(moduleObj)
|
||||
break
|
||||
|
@ -66,10 +66,10 @@ def afterTest(test):
|
|||
'''Is executed after every test.'''
|
||||
g = test.globs
|
||||
appName = g['tool'].o.getAppName()
|
||||
exec 'from Products.%s import cov, covFolder, totalNumberOfTests, ' \
|
||||
'countTest' % appName
|
||||
exec('from Products.%s import cov, covFolder, totalNumberOfTests, ' \
|
||||
'countTest' % appName)
|
||||
countTest()
|
||||
exec 'from Products.%s import numberOfExecutedTests' % appName
|
||||
exec('from Products.%s import numberOfExecutedTests' % appName)
|
||||
if cov and (numberOfExecutedTests == totalNumberOfTests):
|
||||
cov.stop()
|
||||
appModules = test.getNonEmptySubModules(appName)
|
||||
|
|
|
@ -13,6 +13,7 @@ from appy.shared import mimeTypes
|
|||
from appy.shared import utils as sutils
|
||||
from appy.shared.data import languages
|
||||
from appy.shared.ldap_connector import LdapConnector
|
||||
import collections
|
||||
try:
|
||||
from AccessControl.ZopeSecurityPolicy import _noroles
|
||||
except ImportError:
|
||||
|
@ -36,7 +37,7 @@ class ToolMixin(BaseMixin):
|
|||
p_metaTypeOrAppyType.'''
|
||||
appName = self.getProductConfig().PROJECTNAME
|
||||
res = metaTypeOrAppyClass
|
||||
if not isinstance(metaTypeOrAppyClass, basestring):
|
||||
if not isinstance(metaTypeOrAppyClass, str):
|
||||
res = gutils.getClassName(metaTypeOrAppyClass, appName)
|
||||
if res.find('_wrappers') != -1:
|
||||
elems = res.split('_')
|
||||
|
@ -439,7 +440,7 @@ class ToolMixin(BaseMixin):
|
|||
|
||||
def quote(self, s, escapeWithEntity=True):
|
||||
'''Returns the quoted version of p_s.'''
|
||||
if not isinstance(s, basestring): s = str(s)
|
||||
if not isinstance(s, str): s = str(s)
|
||||
repl = escapeWithEntity and ''' or "\\'"
|
||||
s = s.replace('\r\n', '').replace('\n', '').replace("'", repl)
|
||||
return "'%s'" % s
|
||||
|
@ -452,7 +453,7 @@ class ToolMixin(BaseMixin):
|
|||
|
||||
def getZopeClass(self, name):
|
||||
'''Returns the Zope class whose name is p_name.'''
|
||||
exec 'from Products.%s.%s import %s as C'% (self.getAppName(),name,name)
|
||||
exec('from Products.%s.%s import %s as C'% (self.getAppName(),name,name))
|
||||
return C
|
||||
|
||||
def getAppyClass(self, zopeName, wrapper=False):
|
||||
|
@ -476,12 +477,12 @@ class ToolMixin(BaseMixin):
|
|||
'''Gets the different ways objects of p_klass can be created (currently:
|
||||
via a web form or programmatically only). Result is a list.'''
|
||||
res = []
|
||||
if not klass.__dict__.has_key('create'):
|
||||
if 'create' not in klass.__dict__:
|
||||
return ['form']
|
||||
else:
|
||||
means = klass.create
|
||||
if means:
|
||||
if isinstance(means, basestring): res = [means]
|
||||
if isinstance(means, str): res = [means]
|
||||
else: res = means
|
||||
return res
|
||||
|
||||
|
@ -511,7 +512,7 @@ class ToolMixin(BaseMixin):
|
|||
else:
|
||||
creators = self.getProductConfig().appConfig.defaultCreators
|
||||
# Resolve case (3): if "creators" is a method, execute it.
|
||||
if callable(creators): creators = creators(self.appy())
|
||||
if isinstance(creators, collections.Callable): creators = creators(self.appy())
|
||||
# Resolve case (2)
|
||||
if isinstance(creators, bool) or not creators: return creators
|
||||
# Resolve case (1): checks whether the user has at least one of the
|
||||
|
@ -595,7 +596,7 @@ class ToolMixin(BaseMixin):
|
|||
rq = self.REQUEST
|
||||
# Store the search criteria in the session
|
||||
criteria = self._getDefaultSearchCriteria()
|
||||
for name in rq.form.keys():
|
||||
for name in list(rq.form.keys()):
|
||||
if name.startswith('w_') and not self._searchValueIsEmpty(name):
|
||||
hasStar = name.find('*') != -1
|
||||
fieldName = not hasStar and name[2:] or name[2:name.find('*')]
|
||||
|
@ -609,17 +610,17 @@ class ToolMixin(BaseMixin):
|
|||
# The type of the value is encoded after char "*".
|
||||
name, type = name.split('*')
|
||||
if type == 'bool':
|
||||
exec 'value = %s' % value
|
||||
exec('value = %s' % value)
|
||||
elif type in ('int', 'float'):
|
||||
# Get the "from" value
|
||||
if not value: value = None
|
||||
else:
|
||||
exec 'value = %s(value)' % type
|
||||
exec('value = %s(value)' % type)
|
||||
# Get the "to" value
|
||||
toValue = rq.form['%s_to' % name[2:]].strip()
|
||||
if not toValue: toValue = None
|
||||
else:
|
||||
exec 'toValue = %s(toValue)' % type
|
||||
exec('toValue = %s(toValue)' % type)
|
||||
value = (value, toValue)
|
||||
elif type == 'date':
|
||||
prefix = name[2:]
|
||||
|
@ -640,8 +641,8 @@ class ToolMixin(BaseMixin):
|
|||
if len(type) > 6:
|
||||
transform = type.split('-')[1]
|
||||
if (transform != 'none') and value:
|
||||
exec 'value = value.%s()' % \
|
||||
self.transformMethods[transform]
|
||||
exec('value = value.%s()' % \
|
||||
self.transformMethods[transform])
|
||||
if isinstance(value, list):
|
||||
# It is a list of values. Check if we have an operator for
|
||||
# the field, to see if we make an "and" or "or" for all
|
||||
|
@ -697,7 +698,7 @@ class ToolMixin(BaseMixin):
|
|||
it among search criteria in the session.'''
|
||||
if not refInfo and (self.REQUEST.get('search', None) == 'customSearch'):
|
||||
criteria = self.REQUEST.SESSION.get('searchCriteria', None)
|
||||
if criteria and criteria.has_key('_ref'): refInfo = criteria['_ref']
|
||||
if criteria and '_ref' in criteria: refInfo = criteria['_ref']
|
||||
if not refInfo: return None, None
|
||||
objectUid, fieldName = refInfo.split(':')
|
||||
obj = self.getObject(objectUid)
|
||||
|
@ -856,7 +857,7 @@ class ToolMixin(BaseMixin):
|
|||
try:
|
||||
creds = creds.split(' ')[-1]
|
||||
login, password = base64.decodestring(creds).split(':', 1)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
pass
|
||||
# b. Identify the user from the authentication cookie.
|
||||
if not login:
|
||||
|
@ -968,7 +969,7 @@ class ToolMixin(BaseMixin):
|
|||
# Invalidate the user session.
|
||||
try:
|
||||
sdm = self.session_data_manager
|
||||
except AttributeError, ae:
|
||||
except AttributeError as ae:
|
||||
# When ran in test mode, session_data_manager is not there.
|
||||
sdm = None
|
||||
if sdm:
|
||||
|
@ -977,7 +978,7 @@ class ToolMixin(BaseMixin):
|
|||
session.invalidate()
|
||||
self.log('logged out.')
|
||||
# Remove user from variable "loggedUsers"
|
||||
if self.loggedUsers.has_key(userId): del self.loggedUsers[userId]
|
||||
if userId in self.loggedUsers: del self.loggedUsers[userId]
|
||||
return self.goto(self.getApp().absolute_url())
|
||||
|
||||
# This dict stores, for every logged user, the date/time of its last access
|
||||
|
@ -1247,7 +1248,7 @@ class ToolMixin(BaseMixin):
|
|||
if hasattr(klass, 'popup'):
|
||||
res.target = 'appyIFrame'
|
||||
d = klass.popup
|
||||
if isinstance(d, basestring):
|
||||
if isinstance(d, str):
|
||||
# Width only
|
||||
params = d[:-2]
|
||||
else:
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
- mixins/ToolMixin is mixed in with the generated application Tool class.'''
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
import os, os.path, re, sys, types, urllib, cgi
|
||||
import os, os.path, re, sys, types, urllib.request, urllib.parse, urllib.error, cgi
|
||||
from appy import Object
|
||||
from appy.px import Px
|
||||
from appy.fields.workflow import UiTransition
|
||||
|
@ -15,6 +15,7 @@ from appy.shared import utils as sutils
|
|||
from appy.shared.data import rtlLanguages
|
||||
from appy.shared.xml_parser import XmlMarshaller, XmlUnmarshaller
|
||||
from appy.shared.diff import HtmlDiff
|
||||
import collections
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
NUMBERED_ID = re.compile('.+\d{4}$')
|
||||
|
@ -371,9 +372,9 @@ class BaseMixin:
|
|||
# p_errors object. Within this object, for every error message that is
|
||||
# not a string, we replace it with the standard validation error for the
|
||||
# corresponding field.
|
||||
for key, value in errors.__dict__.iteritems():
|
||||
for key, value in errors.__dict__.items():
|
||||
resValue = value
|
||||
if not isinstance(resValue, basestring):
|
||||
if not isinstance(resValue, str):
|
||||
resValue = self.translate('field_invalid')
|
||||
setattr(errors, key, resValue)
|
||||
return msg
|
||||
|
@ -419,7 +420,7 @@ class BaseMixin:
|
|||
# Trigger field-specific validation
|
||||
self.intraFieldValidation(errors, values)
|
||||
if errors.__dict__:
|
||||
for k,v in errors.__dict__.iteritems(): rq.set('%s_error' % k, v)
|
||||
for k,v in errors.__dict__.items(): rq.set('%s_error' % k, v)
|
||||
self.say(errorMessage)
|
||||
return self.gotoEdit()
|
||||
|
||||
|
@ -427,7 +428,7 @@ class BaseMixin:
|
|||
msg = self.interFieldValidation(errors, values)
|
||||
if not msg: msg = errorMessage
|
||||
if errors.__dict__:
|
||||
for k,v in errors.__dict__.iteritems(): rq.set('%s_error' % k, v)
|
||||
for k,v in errors.__dict__.items(): rq.set('%s_error' % k, v)
|
||||
self.say(msg)
|
||||
return self.gotoEdit()
|
||||
|
||||
|
@ -506,7 +507,7 @@ class BaseMixin:
|
|||
# Get the list of indexes that apply on this object. Else, Zope
|
||||
# will reindex all indexes defined in the catalog, and through
|
||||
# acquisition, wrong methods can be called on wrong objects.
|
||||
iNames = self.wrapperClass.getIndexes().keys()
|
||||
iNames = list(self.wrapperClass.getIndexes().keys())
|
||||
catalog.catalog_object(self, path, idxs=iNames)
|
||||
|
||||
def xml(self, action=None):
|
||||
|
@ -529,14 +530,14 @@ class BaseMixin:
|
|||
elif isinstance(methodRes, file):
|
||||
res = methodRes.read()
|
||||
methodRes.close()
|
||||
elif isinstance(methodRes, basestring) and \
|
||||
elif isinstance(methodRes, str) and \
|
||||
methodRes.startswith('<?xml'): # Already XML
|
||||
return methodRes
|
||||
else:
|
||||
marshaller = XmlMarshaller()
|
||||
oType = isinstance(methodRes, Object) and 'popo' or 'appy'
|
||||
res = marshaller.marshall(methodRes, objectType=oType)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
tb = sutils.Traceback.get()
|
||||
res = XmlMarshaller(rootTag='exception').marshall(tb)
|
||||
self.log(tb, type='error')
|
||||
|
@ -548,7 +549,7 @@ class BaseMixin:
|
|||
'''Prints a p_msg in the user interface. p_logLevel may be "info",
|
||||
"warning" or "error".'''
|
||||
rq = self.REQUEST
|
||||
if 'messages' not in rq.SESSION.keys():
|
||||
if 'messages' not in list(rq.SESSION.keys()):
|
||||
plist = self.getProductConfig().PersistentList
|
||||
messages = rq.SESSION['messages'] = plist()
|
||||
else:
|
||||
|
@ -619,7 +620,7 @@ class BaseMixin:
|
|||
For a multilingual string field, p_changes can contain a key for
|
||||
every language, of the form <field name>-<language>.'''
|
||||
# Add to the p_changes dict the field labels
|
||||
for name in changes.keys():
|
||||
for name in list(changes.keys()):
|
||||
# "name" can contain the language for multilingual fields.
|
||||
if '-' in name:
|
||||
fieldName, lg = name.split('-')
|
||||
|
@ -646,7 +647,7 @@ class BaseMixin:
|
|||
historized fields, while p_self already contains the (potentially)
|
||||
modified values.'''
|
||||
# Remove from previousData all values that were not changed
|
||||
for name in previousData.keys():
|
||||
for name in list(previousData.keys()):
|
||||
field = self.getAppyType(name)
|
||||
prev = previousData[name]
|
||||
curr = field.getValue(self)
|
||||
|
@ -655,7 +656,7 @@ class BaseMixin:
|
|||
((prev == '') and (curr == None)):
|
||||
del previousData[name]
|
||||
continue
|
||||
except UnicodeDecodeError, ude:
|
||||
except UnicodeDecodeError as ude:
|
||||
# The string comparisons above may imply silent encoding-related
|
||||
# conversions that may produce this exception.
|
||||
continue
|
||||
|
@ -743,15 +744,15 @@ class BaseMixin:
|
|||
else:
|
||||
klass = self.appy().klass
|
||||
moduleName = klass.__module__
|
||||
exec 'import %s' % moduleName
|
||||
exec 'reload(%s)' % moduleName
|
||||
exec 'res = %s.%s' % (moduleName, klass.__name__)
|
||||
exec('import %s' % moduleName)
|
||||
exec('reload(%s)' % moduleName)
|
||||
exec('res = %s.%s' % (moduleName, klass.__name__))
|
||||
# More manipulations may have occurred in m_update
|
||||
if hasattr(res, 'update'):
|
||||
parentName= res.__bases__[-1].__name__
|
||||
moduleName= 'Products.%s.wrappers' % self.getTool().getAppName()
|
||||
exec 'import %s' % moduleName
|
||||
exec 'parent = %s.%s' % (moduleName, parentName)
|
||||
exec('import %s' % moduleName)
|
||||
exec('parent = %s.%s' % (moduleName, parentName))
|
||||
res.update(parent)
|
||||
return res
|
||||
|
||||
|
@ -839,15 +840,15 @@ class BaseMixin:
|
|||
req = self.REQUEST
|
||||
for field in self.getAllAppyTypes():
|
||||
if field.page.name != pageName: continue
|
||||
if field.masterValue and callable(field.masterValue):
|
||||
if field.masterValue and isinstance(field.masterValue, collections.Callable):
|
||||
# We have a slave field that is updated via ajax requests.
|
||||
name = field.name
|
||||
# Remember the request value for this field if present.
|
||||
if req.has_key(name) and req[name]:
|
||||
if name in req and req[name]:
|
||||
requestValues[name] = req[name]
|
||||
# Remember the validation error for this field if present.
|
||||
errorKey = '%s_error' % name
|
||||
if req.has_key(errorKey):
|
||||
if errorKey in req:
|
||||
errors[name] = req[errorKey]
|
||||
return sutils.getStringDict(requestValues), sutils.getStringDict(errors)
|
||||
|
||||
|
@ -899,7 +900,7 @@ class BaseMixin:
|
|||
del phases[res[i].name]
|
||||
del res[i]
|
||||
# Compute next/previous phases of every phase
|
||||
for ph in phases.itervalues():
|
||||
for ph in phases.values():
|
||||
ph.computeNextPrevious(res)
|
||||
ph.totalNbOfPhases = len(res)
|
||||
# Restrict the result to the current phase if required
|
||||
|
@ -1208,7 +1209,7 @@ class BaseMixin:
|
|||
# fields' old values by their formatted counterparts.
|
||||
event = history[i].copy()
|
||||
event['changes'] = {}
|
||||
for name, oldValue in history[i]['changes'].iteritems():
|
||||
for name, oldValue in history[i]['changes'].items():
|
||||
# "name" can specify a language-specific part in a
|
||||
# multilingual field. "oldValue" is a tuple
|
||||
# (value, fieldName).
|
||||
|
@ -1455,7 +1456,7 @@ class BaseMixin:
|
|||
# Add users or groups having, locally, this role on this object.
|
||||
localRoles = getattr(self.aq_base, '__ac_local_roles__', None)
|
||||
if not localRoles: return res
|
||||
for id, roles in localRoles.iteritems():
|
||||
for id, roles in localRoles.items():
|
||||
for role in roles:
|
||||
if role in res:
|
||||
usr = 'user:%s' % id
|
||||
|
@ -1465,7 +1466,7 @@ class BaseMixin:
|
|||
def showState(self):
|
||||
'''Must I show self's current state ?'''
|
||||
stateShow = self.State(name=False).show
|
||||
if callable(stateShow):
|
||||
if isinstance(stateShow, collections.Callable):
|
||||
return stateShow(self.getWorkflow(), self.appy())
|
||||
return stateShow
|
||||
|
||||
|
@ -1479,7 +1480,7 @@ class BaseMixin:
|
|||
if not hasattr(klass, 'showTransitions'): return (layoutType=='view')
|
||||
showValue = klass.showTransitions
|
||||
# This value can be a single value or a tuple/list of values.
|
||||
if isinstance(showValue, basestring): return layoutType == showValue
|
||||
if isinstance(showValue, str): return layoutType == showValue
|
||||
return layoutType in showValue
|
||||
|
||||
getUrlDefaults = {'page':True, 'nav':True}
|
||||
|
@ -1524,8 +1525,8 @@ class BaseMixin:
|
|||
if 'nav' not in kwargs: kwargs['nav'] = True
|
||||
# Create URL parameters from kwargs
|
||||
params = []
|
||||
for name, value in kwargs.iteritems():
|
||||
if isinstance(value, basestring):
|
||||
for name, value in kwargs.items():
|
||||
if isinstance(value, str):
|
||||
params.append('%s=%s' % (name, value))
|
||||
elif self.REQUEST.get(name, ''):
|
||||
params.append('%s=%s' % (name, self.REQUEST[name]))
|
||||
|
@ -1601,7 +1602,7 @@ class BaseMixin:
|
|||
# Find the name of the method to call.
|
||||
methodName = rq.QUERY_STRING.split('=')[1]
|
||||
return self.xml(action=methodName)
|
||||
elif rq.has_key('do'):
|
||||
elif 'do' in rq:
|
||||
# The user wants to call a method on this object and get its result
|
||||
# as XML.
|
||||
return self.xml(action=rq['do'])
|
||||
|
@ -1672,7 +1673,7 @@ class BaseMixin:
|
|||
if field.type != 'group':
|
||||
fieldMapping = field.mapping[label]
|
||||
if fieldMapping:
|
||||
if callable(fieldMapping):
|
||||
if isinstance(fieldMapping, collections.Callable):
|
||||
fieldMapping = field.callMethod(self, fieldMapping)
|
||||
mapping.update(fieldMapping)
|
||||
label = getattr(field, '%sId' % label)
|
||||
|
@ -1697,15 +1698,15 @@ class BaseMixin:
|
|||
# Perform replacements, according to p_format.
|
||||
res = self.formatText(res, format)
|
||||
# Perform variable replacements
|
||||
for name, repl in mapping.iteritems():
|
||||
if not isinstance(repl, basestring): repl = str(repl)
|
||||
for name, repl in mapping.items():
|
||||
if not isinstance(repl, str): repl = str(repl)
|
||||
res = res.replace('${%s}' % name, repl)
|
||||
return res
|
||||
|
||||
def getPageLayout(self, layoutType):
|
||||
'''Returns the layout corresponding to p_layoutType for p_self.'''
|
||||
res = self.wrapperClass.getPageLayouts()[layoutType]
|
||||
if isinstance(res, basestring): res = Table(res)
|
||||
if isinstance(res, str): res = Table(res)
|
||||
return res
|
||||
|
||||
def download(self, name=None):
|
||||
|
|
21
gen/model.py
21
gen/model.py
|
@ -5,6 +5,7 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
import types
|
||||
import appy.gen as gen
|
||||
import collections
|
||||
|
||||
# Prototypical instances of every type -----------------------------------------
|
||||
class Protos:
|
||||
|
@ -49,7 +50,7 @@ class ModelClass:
|
|||
'''This method returns the code declaration for p_appyType.'''
|
||||
typeArgs = ''
|
||||
proto = Protos.get(appyType)
|
||||
for name, value in appyType.__dict__.iteritems():
|
||||
for name, value in appyType.__dict__.items():
|
||||
# Some attrs can't be given to the constructor
|
||||
if name in Protos.notInit: continue
|
||||
# If the given value corresponds to the default value, don't give it
|
||||
|
@ -74,7 +75,7 @@ class ModelClass:
|
|||
# defined. So we initialize it to None. The post-init of the
|
||||
# field must be done manually in wrappers.py.
|
||||
value = 'None'
|
||||
elif isinstance(value, basestring):
|
||||
elif isinstance(value, str):
|
||||
value = '"%s"' % value
|
||||
elif isinstance(value, gen.Ref):
|
||||
if not value.isBack: continue
|
||||
|
@ -91,10 +92,10 @@ class ModelClass:
|
|||
value = 'Grp("%s")' % value.name
|
||||
elif isinstance(value, gen.Page):
|
||||
value = 'pges["%s"]' % value.name
|
||||
elif callable(value):
|
||||
elif isinstance(value, collections.Callable):
|
||||
className = wrapperName
|
||||
if (appyType.type == 'Ref') and appyType.isBack:
|
||||
className = value.im_class.__name__
|
||||
className = value.__self__.__class__.__name__
|
||||
value = '%s.%s' % (className, value.__name__)
|
||||
typeArgs += '%s=%s,' % (name, value)
|
||||
return '%s(%s)' % (appyType.__class__.__name__, typeArgs)
|
||||
|
@ -118,17 +119,17 @@ class ModelClass:
|
|||
pages = {}
|
||||
layouts = []
|
||||
for name in klass._appy_attributes:
|
||||
exec 'appyType = klass.%s' % name
|
||||
exec('appyType = klass.%s' % name)
|
||||
if appyType.page.name not in pages:
|
||||
pages[appyType.page.name] = appyType.page
|
||||
res += ' pges = {'
|
||||
for page in pages.itervalues():
|
||||
for page in pages.values():
|
||||
# Determine page "show" attributes
|
||||
pShow = ''
|
||||
for attr in ('',) + page.subElements:
|
||||
attrName = 'show%s' % attr.capitalize()
|
||||
pageShow = getattr(page, attrName)
|
||||
if isinstance(pageShow, basestring): pageShow='"%s"' % pageShow
|
||||
if isinstance(pageShow, str): pageShow='"%s"' % pageShow
|
||||
elif callable(pageShow):
|
||||
pageShow = '%s.%s' % (wrapperName, pageShow.__name__)
|
||||
if pageShow != True:
|
||||
|
@ -142,7 +143,7 @@ class ModelClass:
|
|||
res += '}\n'
|
||||
# Secondly, dump every (not Ref.isBack) attribute
|
||||
for name in klass._appy_attributes:
|
||||
exec 'appyType = klass.%s' % name
|
||||
exec('appyType = klass.%s' % name)
|
||||
if (appyType.type == 'Ref') and appyType.isBack: continue
|
||||
typeBody = klass._appy_getTypeBody(appyType, wrapperName)
|
||||
res += ' %s=%s\n' % (name, typeBody)
|
||||
|
@ -305,12 +306,12 @@ class Tool(ModelClass):
|
|||
@classmethod
|
||||
def _appy_clean(klass):
|
||||
toClean = []
|
||||
for k, v in klass.__dict__.iteritems():
|
||||
for k, v in klass.__dict__.items():
|
||||
if not k.startswith('__') and (not k.startswith('_appy_')):
|
||||
if k not in defaultToolFields:
|
||||
toClean.append(k)
|
||||
for k in toClean:
|
||||
exec 'del klass.%s' % k
|
||||
exec('del klass.%s' % k)
|
||||
klass._appy_attributes = list(defaultToolFields)
|
||||
klass.folder = True
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
import os, re, time, copy
|
||||
from utils import produceNiceMessage
|
||||
from .utils import produceNiceMessage
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
poHeader = '''msgid ""
|
||||
|
@ -213,7 +213,7 @@ class PoFile:
|
|||
if keepExistingOrder:
|
||||
# Update existing messages and add inexistent messages to the end.
|
||||
for newMsg in newMessages:
|
||||
if self.messagesDict.has_key(newMsg.id):
|
||||
if newMsg.id in self.messagesDict:
|
||||
msg = self.messagesDict[newMsg.id]
|
||||
else:
|
||||
msg = self.addMessage(newMsg)
|
||||
|
@ -224,7 +224,7 @@ class PoFile:
|
|||
notNewMessages = [m for m in self.messages if m.id not in newIds]
|
||||
del self.messages[:]
|
||||
for newMsg in newMessages:
|
||||
if self.messagesDict.has_key(newMsg.id):
|
||||
if newMsg.id in self.messagesDict:
|
||||
msg = self.messagesDict[newMsg.id]
|
||||
self.messages.append(msg)
|
||||
else:
|
||||
|
@ -240,7 +240,7 @@ class PoFile:
|
|||
fb = ''
|
||||
if not self.isPot:
|
||||
# I must add fallbacks
|
||||
if fallbacks.has_key(self.language):
|
||||
if self.language in fallbacks:
|
||||
fb = '"X-is-fallback-for: %s\\n"' % fallbacks[self.language]
|
||||
f.write(poHeader % (self.applicationName, creationTime,
|
||||
self.language, self.language, self.domain, fb))
|
||||
|
|
21
gen/utils.py
21
gen/utils.py
|
@ -1,7 +1,8 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
import re, os, os.path, base64, urllib
|
||||
import re, os, os.path, base64, urllib.request, urllib.parse, urllib.error
|
||||
from appy.px import Px
|
||||
from appy.shared import utils as sutils
|
||||
import collections
|
||||
|
||||
# Function for creating a Zope object ------------------------------------------
|
||||
def createObject(folder, id, className, appName, wf=True, noSecurity=False):
|
||||
|
@ -10,8 +11,8 @@ def createObject(folder, id, className, appName, wf=True, noSecurity=False):
|
|||
creation of the config object), computing workflow-related info is not
|
||||
possible at this time. This is why this function can be called with
|
||||
p_wf=False.'''
|
||||
exec 'from Products.%s.%s import %s as ZopeClass' % \
|
||||
(appName, className, className)
|
||||
exec('from Products.%s.%s import %s as ZopeClass' % \
|
||||
(appName, className, className))
|
||||
# Get the tool. It may not be present yet, maybe are we creating it now.
|
||||
if folder.meta_type.endswith('Folder'):
|
||||
# p_folder is a standard Zope (temp) folder.
|
||||
|
@ -178,7 +179,7 @@ def callMethod(obj, method, klass=None, cache=True):
|
|||
if methodType == 'staticmethod':
|
||||
method = method.__get__(klass)
|
||||
elif methodType == 'instancemethod':
|
||||
method = method.im_func
|
||||
method = method.__func__
|
||||
# Call the method if cache is not needed
|
||||
if not cache: return method(obj)
|
||||
# If first arg of method is named "tool" instead of the traditional "self",
|
||||
|
@ -187,7 +188,7 @@ def callMethod(obj, method, klass=None, cache=True):
|
|||
# Every method call, even on different instances, will be cached in a unique
|
||||
# key.
|
||||
cheat = False
|
||||
if not klass and (method.func_code.co_varnames[0] == 'tool'):
|
||||
if not klass and (method.__code__.co_varnames[0] == 'tool'):
|
||||
prefix = obj.klass.__name__
|
||||
obj = obj.tool
|
||||
cheat = True
|
||||
|
@ -200,7 +201,7 @@ def callMethod(obj, method, klass=None, cache=True):
|
|||
else:
|
||||
prefix = obj.uid
|
||||
# Second part of the key: p_method name
|
||||
key = '%s:%s' % (prefix, method.func_name)
|
||||
key = '%s:%s' % (prefix, method.__name__)
|
||||
# Return the cached value if present in the method cache.
|
||||
if key in rq.methodCache:
|
||||
return rq.methodCache[key]
|
||||
|
@ -216,20 +217,20 @@ def readCookie(request):
|
|||
(None, None).'''
|
||||
cookie = request.get('_appy_', None)
|
||||
if not cookie: return None, None
|
||||
cookieValue = base64.decodestring(urllib.unquote(cookie))
|
||||
cookieValue = base64.decodestring(urllib.parse.unquote(cookie))
|
||||
if ':' in cookieValue: return cookieValue.split(':')
|
||||
return None, None
|
||||
|
||||
def writeCookie(login, password, request):
|
||||
'''Encode p_login and p_password into the cookie set in the p_request.'''
|
||||
cookieValue = base64.encodestring('%s:%s' % (login, password)).rstrip()
|
||||
cookieValue = urllib.quote(cookieValue)
|
||||
cookieValue = urllib.parse.quote(cookieValue)
|
||||
request.RESPONSE.setCookie('_appy_', cookieValue, path='/')
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
def initMasterValue(v):
|
||||
'''Standardizes p_v as a list of strings, excepted if p_v is a method.'''
|
||||
if callable(v): return v
|
||||
if isinstance(v, collections.Callable): return v
|
||||
if not isinstance(v, bool) and not v: res = []
|
||||
elif type(v) not in sutils.sequenceTypes: res = [v]
|
||||
else: res = v
|
||||
|
@ -243,7 +244,7 @@ class No:
|
|||
instead. When creating such an instance, you can specify an error
|
||||
message.'''
|
||||
def __init__(self, msg): self.msg = msg
|
||||
def __nonzero__(self): return False
|
||||
def __bool__(self): return False
|
||||
def __repr__(self): return '<No: %s>' % self.msg
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -424,7 +424,7 @@ class ToolWrapper(AbstractWrapper):
|
|||
'<tr><th></th><th>%s</th></tr>' % \
|
||||
self.translate('last_user_access')
|
||||
rows = []
|
||||
for userId, lastAccess in self.o.loggedUsers.items():
|
||||
for userId, lastAccess in list(self.o.loggedUsers.items()):
|
||||
user = self.search1('User', noSecurity=True, login=userId)
|
||||
if not user: continue # Could have been deleted in the meanwhile
|
||||
fmt = '%s (%s)' % (self.dateFormat, self.hourFormat)
|
||||
|
@ -515,7 +515,7 @@ class ToolWrapper(AbstractWrapper):
|
|||
failed += subFailed
|
||||
try:
|
||||
startObject.reindex()
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
failed.append(startObject)
|
||||
return nb, failed
|
||||
|
||||
|
|
|
@ -343,7 +343,7 @@ class UserWrapper(AbstractWrapper):
|
|||
if not localRoles: return res
|
||||
# Gets the logins of this user and all its groups.
|
||||
logins = self.getLogins()
|
||||
for login, roles in localRoles.iteritems():
|
||||
for login, roles in localRoles.items():
|
||||
# Ignore logins not corresponding to this user.
|
||||
if login not in logins: continue
|
||||
for role in roles:
|
||||
|
@ -388,7 +388,7 @@ class UserWrapper(AbstractWrapper):
|
|||
if not localRoles: return
|
||||
# Gets the logins of this user and all its groups.
|
||||
userLogins = self.getLogins()
|
||||
for login, roles in localRoles.iteritems():
|
||||
for login, roles in localRoles.items():
|
||||
# Ignore logins not corresponding to this user.
|
||||
if login not in userLogins: continue
|
||||
for role in roles:
|
||||
|
|
|
@ -861,7 +861,7 @@ class AbstractWrapper(object):
|
|||
if len(self.__class__.__bases__) > 1:
|
||||
# There is a custom user class
|
||||
custom = self.__class__.__bases__[-1]
|
||||
if custom.__dict__.has_key(methodName):
|
||||
if methodName in custom.__dict__:
|
||||
return custom.__dict__[methodName]
|
||||
|
||||
def _callCustom(self, methodName, *args, **kwargs):
|
||||
|
@ -973,7 +973,7 @@ class AbstractWrapper(object):
|
|||
present, will not be called; any other defined method will not be
|
||||
called neither (ie, Ref.insert, Ref.beforeLink, Ref.afterLink...).
|
||||
'''
|
||||
isField = isinstance(fieldNameOrClass, basestring)
|
||||
isField = isinstance(fieldNameOrClass, str)
|
||||
tool = self.tool.o
|
||||
# Determine the class of the object to create
|
||||
if isField:
|
||||
|
@ -984,7 +984,7 @@ class AbstractWrapper(object):
|
|||
klass = fieldNameOrClass
|
||||
portalType = tool.getPortalType(klass)
|
||||
# Determine object id
|
||||
if kwargs.has_key('id'):
|
||||
if 'id' in kwargs:
|
||||
objId = kwargs['id']
|
||||
del kwargs['id']
|
||||
else:
|
||||
|
@ -1002,7 +1002,7 @@ class AbstractWrapper(object):
|
|||
noSecurity=noSecurity)
|
||||
appyObj = zopeObj.appy()
|
||||
# Set object attributes
|
||||
for attrName, attrValue in kwargs.iteritems():
|
||||
for attrName, attrValue in kwargs.items():
|
||||
try:
|
||||
setattr(appyObj, attrName, attrValue)
|
||||
except AttributeError, ae:
|
||||
|
@ -1183,8 +1183,8 @@ class AbstractWrapper(object):
|
|||
# Get the Appy object from the brain
|
||||
if noSecurity: method = '_unrestrictedGetObject'
|
||||
else: method = 'getObject'
|
||||
exec 'obj = brain.%s().appy()' % method
|
||||
exec expression
|
||||
exec('obj = brain.%s().appy()' % method)
|
||||
exec(expression)
|
||||
return ctx
|
||||
|
||||
def reindex(self, fields=None, unindex=False):
|
||||
|
@ -1249,7 +1249,7 @@ class AbstractWrapper(object):
|
|||
else:
|
||||
return xml
|
||||
elif format == 'csv':
|
||||
if isinstance(at, basestring):
|
||||
if isinstance(at, str):
|
||||
marshaller = CsvMarshaller(include=include, exclude=exclude)
|
||||
return marshaller.marshall(self)
|
||||
else:
|
||||
|
|
|
@ -101,7 +101,7 @@ class BufferAction:
|
|||
try:
|
||||
res = self._evalExpr(expr, context)
|
||||
error = False
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
res = None
|
||||
errorMessage = EVAL_ERROR % (expr, self.getExceptionLine(e))
|
||||
self.manageError(result, context, errorMessage)
|
||||
|
@ -134,7 +134,7 @@ class BufferAction:
|
|||
error = False
|
||||
try:
|
||||
feRes = eval(self.fromExpr, context)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
msg = FROM_EVAL_ERROR% (self.fromExpr, self.getExceptionLine(e))
|
||||
self.manageError(result, context, msg)
|
||||
error = True
|
||||
|
@ -240,7 +240,7 @@ class ForAction(BufferAction):
|
|||
return
|
||||
# Remember variable hidden by iter if any
|
||||
hasHiddenVariable = False
|
||||
if context.has_key(self.iter):
|
||||
if self.iter in context:
|
||||
hiddenVariable = context[self.iter]
|
||||
hasHiddenVariable = True
|
||||
# In the case of cells, initialize some values
|
||||
|
|
|
@ -80,15 +80,15 @@ NULL_ACTION_ERROR = 'There was a problem with this action. Possible causes: ' \
|
|||
class BufferIterator:
|
||||
def __init__(self, buffer):
|
||||
self.buffer = buffer
|
||||
self.remainingSubBufferIndexes = self.buffer.subBuffers.keys()
|
||||
self.remainingElemIndexes = self.buffer.elements.keys()
|
||||
self.remainingSubBufferIndexes = list(self.buffer.subBuffers.keys())
|
||||
self.remainingElemIndexes = list(self.buffer.elements.keys())
|
||||
self.remainingSubBufferIndexes.sort()
|
||||
self.remainingElemIndexes.sort()
|
||||
|
||||
def hasNext(self):
|
||||
return self.remainingSubBufferIndexes or self.remainingElemIndexes
|
||||
|
||||
def next(self):
|
||||
def __next__(self):
|
||||
nextSubBufferIndex = None
|
||||
if self.remainingSubBufferIndexes:
|
||||
nextSubBufferIndex = self.remainingSubBufferIndexes[0]
|
||||
|
@ -131,7 +131,7 @@ class Buffer:
|
|||
return subBuffer
|
||||
|
||||
def removeLastSubBuffer(self):
|
||||
subBufferIndexes = self.subBuffers.keys()
|
||||
subBufferIndexes = list(self.subBuffers.keys())
|
||||
subBufferIndexes.sort()
|
||||
lastIndex = subBufferIndexes.pop()
|
||||
del self.subBuffers[lastIndex]
|
||||
|
@ -176,7 +176,7 @@ class Buffer:
|
|||
self.write('<%s' % elem)
|
||||
# Some table elements must be patched (pod only)
|
||||
if self.pod: self.patchTableElement(elem, attrs)
|
||||
for name, value in attrs.items():
|
||||
for name, value in list(attrs.items()):
|
||||
if ignoreAttrs and (name in ignoreAttrs): continue
|
||||
if renamedAttrs and (name in renamedAttrs): name=renamedAttrs[name]
|
||||
# If the value begins with ':', it is a Python expression. Else,
|
||||
|
@ -244,7 +244,7 @@ class FileBuffer(Buffer):
|
|||
res, escape = expr.evaluate(self.env.context)
|
||||
if escape: self.dumpContent(res)
|
||||
else: self.write(res)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if not self.env.raiseOnError:
|
||||
PodError.dump(self, EVAL_EXPR_ERROR % (expression, e),
|
||||
dumpTb=False)
|
||||
|
@ -271,7 +271,7 @@ class MemoryBuffer(Buffer):
|
|||
|
||||
def __init__(self, env, parent):
|
||||
Buffer.__init__(self, env, parent)
|
||||
self.content = u''
|
||||
self.content = ''
|
||||
self.elements = {}
|
||||
self.action = None
|
||||
|
||||
|
@ -297,7 +297,7 @@ class MemoryBuffer(Buffer):
|
|||
|
||||
def getIndex(self, podElemName):
|
||||
res = -1
|
||||
for index, podElem in self.elements.iteritems():
|
||||
for index, podElem in self.elements.items():
|
||||
if podElem.__class__.__name__.lower() == podElemName:
|
||||
if index > res:
|
||||
res = index
|
||||
|
@ -305,7 +305,7 @@ class MemoryBuffer(Buffer):
|
|||
|
||||
def getMainElement(self):
|
||||
res = None
|
||||
if self.elements.has_key(0):
|
||||
if 0 in self.elements:
|
||||
res = self.elements[0]
|
||||
return res
|
||||
|
||||
|
@ -317,7 +317,7 @@ class MemoryBuffer(Buffer):
|
|||
if elem != mainElem: return
|
||||
# elem is the same as the main elem. But is it really the main elem, or
|
||||
# the same elem, found deeper in the buffer?
|
||||
for index, iElem in self.elements.iteritems():
|
||||
for index, iElem in self.elements.items():
|
||||
foundElem = None
|
||||
if hasattr(iElem, 'OD'):
|
||||
if iElem.OD:
|
||||
|
@ -331,7 +331,7 @@ class MemoryBuffer(Buffer):
|
|||
def unreferenceElement(self, elem):
|
||||
# Find last occurrence of this element
|
||||
elemIndex = -1
|
||||
for index, iElem in self.elements.iteritems():
|
||||
for index, iElem in self.elements.items():
|
||||
foundElem = None
|
||||
if hasattr(iElem, 'OD'):
|
||||
# A POD element
|
||||
|
@ -347,7 +347,7 @@ class MemoryBuffer(Buffer):
|
|||
def pushSubBuffer(self, subBuffer):
|
||||
'''Sets p_subBuffer at the very end of the buffer.'''
|
||||
subIndex = None
|
||||
for index, aSubBuffer in self.subBuffers.iteritems():
|
||||
for index, aSubBuffer in self.subBuffers.items():
|
||||
if aSubBuffer == subBuffer:
|
||||
subIndex = index
|
||||
break
|
||||
|
@ -356,7 +356,7 @@ class MemoryBuffer(Buffer):
|
|||
# in the parent (if it is a temp buffer generated from a cut)
|
||||
del self.subBuffers[subIndex]
|
||||
self.subBuffers[self.getLength()] = subBuffer
|
||||
self.content += u' '
|
||||
self.content += ' '
|
||||
|
||||
def transferAllContent(self):
|
||||
'''Transfer all content to parent.'''
|
||||
|
@ -370,10 +370,10 @@ class MemoryBuffer(Buffer):
|
|||
oldParentLength = self.parent.getLength()
|
||||
self.parent.write(self.content)
|
||||
# Transfer elements
|
||||
for index, podElem in self.elements.iteritems():
|
||||
for index, podElem in self.elements.items():
|
||||
self.parent.elements[oldParentLength + index] = podElem
|
||||
# Transfer sub-buffers
|
||||
for index, buf in self.subBuffers.iteritems():
|
||||
for index, buf in self.subBuffers.items():
|
||||
self.parent.subBuffers[oldParentLength + index] = buf
|
||||
# Empty the buffer
|
||||
MemoryBuffer.__init__(self, self.env, self.parent)
|
||||
|
@ -391,7 +391,7 @@ class MemoryBuffer(Buffer):
|
|||
elem.colIndex = elem.tableInfo.curColIndex
|
||||
if elem == 'x':
|
||||
# See comment on similar statement in the method below.
|
||||
self.content += u' '
|
||||
self.content += ' '
|
||||
|
||||
def addExpression(self, expression, tiedHook=None):
|
||||
# Create the POD expression
|
||||
|
@ -400,20 +400,20 @@ class MemoryBuffer(Buffer):
|
|||
self.elements[self.getLength()] = expr
|
||||
# To be sure that an expr and an elem can't be found at the same index
|
||||
# in the buffer.
|
||||
self.content += u' '
|
||||
self.content += ' '
|
||||
|
||||
def addAttributes(self):
|
||||
'''pod-only: adds an Attributes instance into this buffer.'''
|
||||
attrs = Attributes(self.env)
|
||||
self.elements[self.getLength()] = attrs
|
||||
self.content += u' '
|
||||
self.content += ' '
|
||||
return attrs
|
||||
|
||||
def addAttribute(self, name, expr):
|
||||
'''px-only: adds an Attribute instance into this buffer.'''
|
||||
attr = Attribute(name, expr)
|
||||
self.elements[self.getLength()] = attr
|
||||
self.content += u' '
|
||||
self.content += ' '
|
||||
return attr
|
||||
|
||||
def _getVariables(self, expr):
|
||||
|
@ -453,7 +453,7 @@ class MemoryBuffer(Buffer):
|
|||
raise ParsingError(
|
||||
ELEMENT_NOT_FOUND % (podElem, str([
|
||||
e.__class__.__name__.lower() \
|
||||
for e in self.elements.values()])))
|
||||
for e in list(self.elements.values())])))
|
||||
podElem = self.elements[indexPodElem]
|
||||
# Check the 'from' clause
|
||||
fromClause = None
|
||||
|
@ -471,7 +471,7 @@ class MemoryBuffer(Buffer):
|
|||
self.env.ifActions.append(self.action)
|
||||
if self.action.name:
|
||||
# We must register this action as a named action
|
||||
if self.env.namedIfActions.has_key(self.action.name):
|
||||
if self.action.name in self.env.namedIfActions:
|
||||
raise ParsingError(DUPLICATE_NAMED_IF)
|
||||
self.env.namedIfActions[self.action.name] = self.action
|
||||
elif actionType == 'else':
|
||||
|
@ -480,7 +480,7 @@ class MemoryBuffer(Buffer):
|
|||
# Does the "else" action reference a named "if" action?
|
||||
ifReference = subExpr.strip()
|
||||
if ifReference:
|
||||
if not self.env.namedIfActions.has_key(ifReference):
|
||||
if ifReference not in self.env.namedIfActions:
|
||||
raise ParsingError(ELSE_WITHOUT_NAMED_IF % ifReference)
|
||||
linkedIfAction = self.env.namedIfActions[ifReference]
|
||||
# This "else" action "consumes" the "if" action: this way,
|
||||
|
@ -510,7 +510,7 @@ class MemoryBuffer(Buffer):
|
|||
self.action = NullAction(statementName, self, None, podElem,
|
||||
None, source, fromClause)
|
||||
res = indexPodElem
|
||||
except ParsingError, ppe:
|
||||
except ParsingError as ppe:
|
||||
PodError.dump(self, ppe, removeFirstLine=True)
|
||||
return res
|
||||
|
||||
|
@ -552,7 +552,7 @@ class MemoryBuffer(Buffer):
|
|||
elementsToDelete = []
|
||||
mustShift = False
|
||||
while iter.hasNext():
|
||||
itemIndex, item = iter.next()
|
||||
itemIndex, item = next(iter)
|
||||
if keepFirstPart:
|
||||
if itemIndex >= index:
|
||||
newIndex = itemIndex-index
|
||||
|
@ -580,11 +580,11 @@ class MemoryBuffer(Buffer):
|
|||
del self.subBuffers[subIndex]
|
||||
if mustShift:
|
||||
elements = {}
|
||||
for elemIndex, elem in self.elements.iteritems():
|
||||
for elemIndex, elem in self.elements.items():
|
||||
elements[elemIndex-index] = elem
|
||||
self.elements = elements
|
||||
subBuffers = {}
|
||||
for subIndex, buf in self.subBuffers.iteritems():
|
||||
for subIndex, buf in self.subBuffers.items():
|
||||
subBuffers[subIndex-index] = buf
|
||||
self.subBuffers = subBuffers
|
||||
# Manage content
|
||||
|
@ -598,7 +598,7 @@ class MemoryBuffer(Buffer):
|
|||
|
||||
def getElementIndexes(self, expressions=True):
|
||||
res = []
|
||||
for index, elem in self.elements.iteritems():
|
||||
for index, elem in self.elements.items():
|
||||
condition = isinstance(elem, Expression) or \
|
||||
isinstance(elem, Attributes)
|
||||
if not expressions:
|
||||
|
@ -696,7 +696,7 @@ class MemoryBuffer(Buffer):
|
|||
iter = BufferIterator(self)
|
||||
currentIndex = self.getStartIndex(removeMainElems)
|
||||
while iter.hasNext():
|
||||
index, evalEntry = iter.next()
|
||||
index, evalEntry = next(iter)
|
||||
result.write(self.content[currentIndex:index])
|
||||
currentIndex = index + 1
|
||||
if isinstance(evalEntry, Expression):
|
||||
|
@ -708,7 +708,7 @@ class MemoryBuffer(Buffer):
|
|||
# This exception has already been treated (see the
|
||||
# "except" block below). Simply re-raise it when needed.
|
||||
if self.env.raiseOnError: raise e
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if not self.env.raiseOnError:
|
||||
PodError.dump(result, EVAL_EXPR_ERROR % (
|
||||
evalEntry.expr, e))
|
||||
|
@ -729,5 +729,5 @@ class MemoryBuffer(Buffer):
|
|||
|
||||
def clean(self):
|
||||
'''Cleans the buffer content.'''
|
||||
self.content = u''
|
||||
self.content = ''
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -109,7 +109,7 @@ class Converter:
|
|||
res = res[self.inputType]
|
||||
else:
|
||||
raise ConverterError(BAD_RESULT_TYPE % (self.resultType,
|
||||
FILE_TYPES.keys()))
|
||||
list(FILE_TYPES.keys())))
|
||||
return res
|
||||
|
||||
def getResultUrl(self):
|
||||
|
@ -275,7 +275,7 @@ class ConverterScript:
|
|||
' %s.\n' \
|
||||
' "python" should be a UNO-enabled Python interpreter (ie the ' \
|
||||
' one which is included in the LibreOffice distribution).' % \
|
||||
str(FILE_TYPES.keys())
|
||||
str(list(FILE_TYPES.keys()))
|
||||
def run(self):
|
||||
optParser = OptionParser(usage=ConverterScript.usage)
|
||||
optParser.add_option("-p", "--port", dest="port",
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,USA.
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
import os, os.path, time, shutil, struct, random, urlparse
|
||||
import os, os.path, time, shutil, struct, random, urllib.parse
|
||||
from appy.pod import PodError
|
||||
from appy.pod.odf_parser import OdfEnvironment
|
||||
from appy.shared import mimeTypesExts
|
||||
|
@ -53,7 +53,7 @@ class DocImporter:
|
|||
if at and not at.startswith('http') and not os.path.isfile(at):
|
||||
raise PodError(FILE_NOT_FOUND % at)
|
||||
self.format = format
|
||||
self.res = u''
|
||||
self.res = ''
|
||||
self.renderer = renderer
|
||||
self.ns = renderer.currentParser.env.namespaces
|
||||
# Unpack some useful namespaces
|
||||
|
@ -285,7 +285,7 @@ class ImageImporter(DocImporter):
|
|||
def moveFile(self, at, importPath):
|
||||
'''Copies file at p_at into the ODT file at p_importPath.'''
|
||||
# Has this image already been imported ?
|
||||
for imagePath, imageAt in self.fileNames.iteritems():
|
||||
for imagePath, imageAt in self.fileNames.items():
|
||||
if imageAt == at:
|
||||
# Yes!
|
||||
i = importPath.rfind(self.pictFolder) + 1
|
||||
|
@ -327,7 +327,7 @@ class ImageImporter(DocImporter):
|
|||
# The imageResolver is a Zope application. From it, we will
|
||||
# retrieve the object on which the image is stored and get
|
||||
# the file to download.
|
||||
urlParts = urlparse.urlsplit(at)
|
||||
urlParts = urllib.parse.urlsplit(at)
|
||||
path = urlParts[2][1:].split('/')[:-1]
|
||||
try:
|
||||
obj = imageResolver.unrestrictedTraverse(path)
|
||||
|
|
|
@ -148,7 +148,7 @@ class Expression(PodElement):
|
|||
# pod/px result.
|
||||
resultType = res.__class__.__name__
|
||||
if resultType == 'NoneType':
|
||||
res = u''
|
||||
res = ''
|
||||
elif resultType == 'str':
|
||||
res = res.decode('utf-8')
|
||||
elif resultType == 'unicode':
|
||||
|
@ -160,7 +160,7 @@ class Expression(PodElement):
|
|||
# Force escapeXml to False.
|
||||
escapeXml = False
|
||||
else:
|
||||
res = unicode(res)
|
||||
res = str(res)
|
||||
return res, escapeXml
|
||||
|
||||
class Attributes(PodElement):
|
||||
|
@ -199,7 +199,7 @@ class Attributes(PodElement):
|
|||
try:
|
||||
self.tiedExpression.evaluate(context)
|
||||
self.tiedExpression.evaluated = True
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# Don't set "evaluated" to True. This way, when the buffer will
|
||||
# evaluate the expression directly, we will really evaluate it, so
|
||||
# the error will be dumped into the pod result.
|
||||
|
@ -208,7 +208,7 @@ class Attributes(PodElement):
|
|||
self.computeAttributes(self.tiedExpression)
|
||||
# Now, self.attrs has been populated. Transform it into a string.
|
||||
res = ''
|
||||
for name, value in self.attrs.iteritems():
|
||||
for name, value in self.attrs.items():
|
||||
res += ' %s=%s' % (name, quoteattr(value))
|
||||
return res
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ class OdInsert:
|
|||
def resolve(self, namespaces):
|
||||
'''Replaces all unresolved namespaces in p_odtChunk, thanks to the dict
|
||||
of p_namespaces.'''
|
||||
for nsName, nsUri in self.nsUris.iteritems():
|
||||
for nsName, nsUri in self.nsUris.items():
|
||||
self.odtChunk = re.sub('@%s@' % nsName, namespaces[nsUri],
|
||||
self.odtChunk)
|
||||
return self.odtChunk
|
||||
|
@ -126,14 +126,14 @@ class PodEnvironment(OdfEnvironment):
|
|||
res = {}
|
||||
for insert in self.inserts:
|
||||
elemName = insert.elem.getFullName(self.namespaces)
|
||||
if not res.has_key(elemName):
|
||||
if elemName not in res:
|
||||
res[elemName] = insert
|
||||
return res
|
||||
|
||||
def manageInserts(self):
|
||||
'''We just dumped the start of an elem. Here we will insert any odt
|
||||
chunk if needed.'''
|
||||
if self.inserts.has_key(self.currentElem.elem):
|
||||
if self.currentElem.elem in self.inserts:
|
||||
insert = self.inserts[self.currentElem.elem]
|
||||
self.currentBuffer.write(insert.resolve(self.namespaces))
|
||||
# The insert is destroyed after single use
|
||||
|
@ -160,12 +160,12 @@ class PodEnvironment(OdfEnvironment):
|
|||
elif elem == Cell.OD.elem:
|
||||
colspan = 1
|
||||
attrSpan = self.tags['number-columns-spanned']
|
||||
if self.currentElem.attrs.has_key(attrSpan):
|
||||
if attrSpan in self.currentElem.attrs:
|
||||
colspan = int(self.currentElem.attrs[attrSpan])
|
||||
self.getTable().curColIndex += colspan
|
||||
elif elem == self.tags['table-column']:
|
||||
attrs = self.currentElem.attrs
|
||||
if attrs.has_key(self.tags['number-columns-repeated']):
|
||||
if self.tags['number-columns-repeated'] in attrs:
|
||||
self.getTable().nbOfColumns += int(
|
||||
attrs[self.tags['number-columns-repeated']])
|
||||
else:
|
||||
|
@ -254,8 +254,8 @@ class PodParser(OdfParser):
|
|||
e.state = e.READING_EXPRESSION
|
||||
e.exprHasStyle = False
|
||||
elif (elem == e.tags['table-cell']) and \
|
||||
attrs.has_key(e.tags['formula']) and \
|
||||
attrs.has_key(e.tags['value-type']) and \
|
||||
e.tags['formula'] in attrs and \
|
||||
e.tags['value-type'] in attrs and \
|
||||
(attrs[e.tags['value-type']] == 'string') and \
|
||||
attrs[e.tags['formula']].startswith('of:="'):
|
||||
# In an ODS template, any cell containing a formula of type "string"
|
||||
|
|
|
@ -18,7 +18,11 @@
|
|||
|
||||
# ------------------------------------------------------------------------------
|
||||
import zipfile, shutil, xml.sax, os, os.path, re, mimetypes, time
|
||||
from UserDict import UserDict
|
||||
#python3 compat
|
||||
try:
|
||||
from UserDict import UserDict
|
||||
except ImportError:
|
||||
from collections import UserDict
|
||||
import appy.pod
|
||||
from appy.pod import PodError
|
||||
from appy.shared import mimeTypes, mimeTypesExts
|
||||
|
@ -80,7 +84,7 @@ CONTENT_POD_FONTS = '<@style@:font-face @style@:name="PodStarSymbol" ' \
|
|||
'@svg@:font-family="StarSymbol"/>'
|
||||
|
||||
# Default text styles added by pod in styles.xml
|
||||
f = file('%s/styles.in.styles.xml' % os.path.dirname(appy.pod.__file__))
|
||||
f = open('%s/styles.in.styles.xml' % os.path.dirname(appy.pod.__file__))
|
||||
STYLES_POD_STYLES = f.read()
|
||||
f.close()
|
||||
|
||||
|
@ -263,7 +267,7 @@ class Renderer:
|
|||
|
||||
imageFormats = ('png', 'jpeg', 'jpg', 'gif', 'svg')
|
||||
ooFormats = ('odt',)
|
||||
convertibleFormats = FILE_TYPES.keys()
|
||||
convertibleFormats = list(FILE_TYPES.keys())
|
||||
def importDocument(self, content=None, at=None, format=None,
|
||||
anchor='as-char', wrapInPara=True, size=None,
|
||||
sizeUnit='cm', style=None,
|
||||
|
@ -309,7 +313,7 @@ class Renderer:
|
|||
format = os.path.splitext(at)[1][1:]
|
||||
else:
|
||||
# If format is a mimeType, convert it to an extension
|
||||
if mimeTypesExts.has_key(format):
|
||||
if format in mimeTypesExts:
|
||||
format = mimeTypesExts[format]
|
||||
isImage = False
|
||||
isOdt = False
|
||||
|
@ -370,9 +374,9 @@ class Renderer:
|
|||
f = open(self.result, 'w')
|
||||
f.write('Hello')
|
||||
f.close()
|
||||
except OSError, oe:
|
||||
except OSError as oe:
|
||||
raise PodError(CANT_WRITE_RESULT % (self.result, oe))
|
||||
except IOError, ie:
|
||||
except IOError as ie:
|
||||
raise PodError(CANT_WRITE_RESULT % (self.result, ie))
|
||||
self.result = os.path.abspath(self.result)
|
||||
os.remove(self.result)
|
||||
|
@ -381,7 +385,7 @@ class Renderer:
|
|||
self.tempFolder = '%s.%f' % (absResult, time.time())
|
||||
try:
|
||||
os.mkdir(self.tempFolder)
|
||||
except OSError, oe:
|
||||
except OSError as oe:
|
||||
raise PodError(CANT_WRITE_TEMP_FOLDER % (self.result, oe))
|
||||
|
||||
def patchManifest(self):
|
||||
|
@ -390,7 +394,7 @@ class Renderer:
|
|||
if self.fileNames:
|
||||
j = os.path.join
|
||||
toInsert = ''
|
||||
for fileName in self.fileNames.iterkeys():
|
||||
for fileName in self.fileNames.keys():
|
||||
if fileName.endswith('.svg'):
|
||||
fileName = os.path.splitext(fileName)[0] + '.png'
|
||||
mimeType = mimetypes.guess_type(fileName)[0]
|
||||
|
@ -442,7 +446,7 @@ class Renderer:
|
|||
if 'span[font-style=italic]' not in stylesMapping:
|
||||
stylesMapping['span[font-style=italic]'] = 'podItalic'
|
||||
self.stylesManager.stylesMapping = stylesMapping
|
||||
except PodError, po:
|
||||
except PodError as po:
|
||||
self.contentParser.env.currentBuffer.content.close()
|
||||
self.stylesParser.env.currentBuffer.content.close()
|
||||
if os.path.exists(self.tempFolder):
|
||||
|
@ -454,14 +458,14 @@ class Renderer:
|
|||
loOutput = ''
|
||||
try:
|
||||
if (not isinstance(self.ooPort, int)) and \
|
||||
(not isinstance(self.ooPort, long)):
|
||||
(not isinstance(self.ooPort, int)):
|
||||
raise PodError(BAD_OO_PORT % str(self.ooPort))
|
||||
try:
|
||||
from appy.pod.converter import Converter, ConverterError
|
||||
try:
|
||||
Converter(resultName, resultType, self.ooPort,
|
||||
self.stylesTemplate).run()
|
||||
except ConverterError, ce:
|
||||
except ConverterError as ce:
|
||||
raise PodError(CONVERT_ERROR % str(ce))
|
||||
except ImportError:
|
||||
# I do not have UNO. So try to launch a UNO-enabled Python
|
||||
|
@ -485,13 +489,13 @@ class Renderer:
|
|||
self.ooPort)
|
||||
if self.stylesTemplate: cmd += ' -t%s' % self.stylesTemplate
|
||||
loOutput = executeCommand(cmd)
|
||||
except PodError, pe:
|
||||
except PodError as pe:
|
||||
# When trying to call LO in server mode for producing ODT or ODS
|
||||
# (=forceOoCall=True), if an error occurs we have nevertheless
|
||||
# an ODT or ODS to return to the user. So we produce a warning
|
||||
# instead of raising an error.
|
||||
if (resultType in self.templateTypes) and self.forceOoCall:
|
||||
print(WARNING_INCOMPLETE_OD % str(pe))
|
||||
print((WARNING_INCOMPLETE_OD % str(pe)))
|
||||
else:
|
||||
raise pe
|
||||
return loOutput
|
||||
|
@ -501,7 +505,7 @@ class Renderer:
|
|||
(ods or odt). If self.template is a string, it is a file name and we
|
||||
simply get its extension. Else, it is a binary file in a StringIO
|
||||
instance, and we seek the mime type from the first bytes.'''
|
||||
if isinstance(self.template, basestring):
|
||||
if isinstance(self.template, str):
|
||||
res = os.path.splitext(self.template)[1][1:]
|
||||
else:
|
||||
# A StringIO instance
|
||||
|
@ -534,8 +538,8 @@ class Renderer:
|
|||
if self.finalizeFunction:
|
||||
try:
|
||||
self.finalizeFunction(self.unzipFolder)
|
||||
except Exception, e:
|
||||
print(WARNING_FINALIZE_ERROR % str(e))
|
||||
except Exception as e:
|
||||
print((WARNING_FINALIZE_ERROR % str(e)))
|
||||
# Re-zip the result, first as an OpenDocument file of the same type as
|
||||
# the POD template (odt, ods...)
|
||||
resultExt = self.getTemplateType()
|
||||
|
|
|
@ -18,7 +18,12 @@
|
|||
|
||||
# ------------------------------------------------------------------------------
|
||||
import re, os.path
|
||||
from UserDict import UserDict
|
||||
#python3 compat
|
||||
try:
|
||||
from UserDict import UserDict
|
||||
except ImportError:
|
||||
from collections import UserDict
|
||||
|
||||
import appy.pod
|
||||
from appy.pod import *
|
||||
from appy.pod.odf_parser import OdfEnvironment, OdfParser
|
||||
|
@ -93,7 +98,7 @@ class Styles(UserDict):
|
|||
'''Tries to find a style which has level p_level. Returns None if no
|
||||
such style exists.'''
|
||||
res = None
|
||||
for style in self.itervalues():
|
||||
for style in self.values():
|
||||
if (style.family == 'paragraph') and (style.outlineLevel == level):
|
||||
res = style
|
||||
break
|
||||
|
@ -102,7 +107,7 @@ class Styles(UserDict):
|
|||
'''Gets the style that has this p_displayName. Returns None if not
|
||||
found.'''
|
||||
res = None
|
||||
for style in self.itervalues():
|
||||
for style in self.values():
|
||||
if style.displayName == displayName:
|
||||
res = style
|
||||
break
|
||||
|
@ -111,9 +116,9 @@ class Styles(UserDict):
|
|||
'''Returns a list of all the styles of the given p_stylesType.'''
|
||||
res = []
|
||||
if stylesType == 'all':
|
||||
res = self.values()
|
||||
res = list(self.values())
|
||||
else:
|
||||
for style in self.itervalues():
|
||||
for style in self.values():
|
||||
if (style.family == stylesType) and style.displayName:
|
||||
res.append(style)
|
||||
return res
|
||||
|
@ -145,22 +150,22 @@ class StylesParser(OdfParser):
|
|||
displayNameAttr = '%s:display-name' % e.ns(e.NS_STYLE)
|
||||
# Create the style
|
||||
style = Style(name=attrs[nameAttr], family=attrs[familyAttr])
|
||||
if attrs.has_key(classAttr):
|
||||
if classAttr in attrs:
|
||||
style.styleClass = attrs[classAttr]
|
||||
if attrs.has_key(displayNameAttr):
|
||||
if displayNameAttr in attrs:
|
||||
style.displayName = attrs[displayNameAttr]
|
||||
# Record this style in the environment
|
||||
e.styles[style.name] = style
|
||||
e.currentStyle = style
|
||||
levelKey = '%s:default-outline-level' % e.ns(e.NS_STYLE)
|
||||
if attrs.has_key(levelKey) and attrs[levelKey].strip():
|
||||
if levelKey in attrs and attrs[levelKey].strip():
|
||||
style.outlineLevel = int(attrs[levelKey])
|
||||
else:
|
||||
if e.state == PARSING_STYLE:
|
||||
# I am parsing tags within the style.
|
||||
if elem == ('%s:text-properties' % e.ns(e.NS_STYLE)):
|
||||
fontSizeKey = '%s:font-size' % e.ns(e.NS_FO)
|
||||
if attrs.has_key(fontSizeKey):
|
||||
if fontSizeKey in attrs:
|
||||
e.currentStyle.setFontSize(attrs[fontSizeKey])
|
||||
def endElement(self, elem):
|
||||
e = OdfParser.endElement(self, elem)
|
||||
|
@ -250,14 +255,14 @@ class StylesManager:
|
|||
if not isinstance(stylesMapping, dict) and \
|
||||
not isinstance(stylesMapping, UserDict):
|
||||
raise PodError(MAPPING_NOT_DICT)
|
||||
for xhtmlStyleName, odtStyleName in stylesMapping.iteritems():
|
||||
if not isinstance(xhtmlStyleName, basestring):
|
||||
for xhtmlStyleName, odtStyleName in stylesMapping.items():
|
||||
if not isinstance(xhtmlStyleName, str):
|
||||
raise PodError(MAPPING_ELEM_NOT_STRING)
|
||||
if (xhtmlStyleName == 'h*') and \
|
||||
not isinstance(odtStyleName, int):
|
||||
raise PodError(MAPPING_OUTLINE_DELTA_NOT_INT)
|
||||
if (xhtmlStyleName != 'h*') and \
|
||||
not isinstance(odtStyleName, basestring):
|
||||
not isinstance(odtStyleName, str):
|
||||
raise PodError(MAPPING_ELEM_NOT_STRING)
|
||||
if (xhtmlStyleName != 'h*') and \
|
||||
((not xhtmlStyleName) or (not odtStyleName)):
|
||||
|
@ -278,7 +283,7 @@ class StylesManager:
|
|||
if xhtmlStyleName != 'h*':
|
||||
odtStyle = self.styles.getStyle(odtStyleName)
|
||||
if not odtStyle:
|
||||
if self.podSpecificStyles.has_key(odtStyleName):
|
||||
if odtStyleName in self.podSpecificStyles:
|
||||
odtStyle = self.podSpecificStyles[odtStyleName]
|
||||
else:
|
||||
raise PodError(STYLE_NOT_FOUND % odtStyleName)
|
||||
|
@ -311,7 +316,7 @@ class StylesManager:
|
|||
This method returns True if p_attrs contains the winning (name,value)
|
||||
pairs that match those in p_matchingAttrs. Note that ALL attrs in
|
||||
p_matchingAttrs must be present in p_attrs.'''
|
||||
for name, value in matchingAttrs.iteritems():
|
||||
for name, value in matchingAttrs.items():
|
||||
if name not in attrs: return
|
||||
if value != attrs[name]: return
|
||||
return True
|
||||
|
@ -356,29 +361,29 @@ class StylesManager:
|
|||
'''
|
||||
res = None
|
||||
cssStyleName = None
|
||||
if attrs and attrs.has_key('class'):
|
||||
if attrs and 'class' in attrs:
|
||||
cssStyleName = attrs['class']
|
||||
if classValue:
|
||||
cssStyleName = classValue
|
||||
# (1)
|
||||
if localStylesMapping.has_key(cssStyleName):
|
||||
if cssStyleName in localStylesMapping:
|
||||
res = localStylesMapping[cssStyleName]
|
||||
# (2)
|
||||
if (not res) and localStylesMapping.has_key(elem):
|
||||
if (not res) and elem in localStylesMapping:
|
||||
styles = localStylesMapping[elem]
|
||||
res = self.getStyleFromMapping(elem, attrs, styles)
|
||||
# (3)
|
||||
if (not res) and self.stylesMapping.has_key(cssStyleName):
|
||||
if (not res) and cssStyleName in self.stylesMapping:
|
||||
res = self.stylesMapping[cssStyleName]
|
||||
# (4)
|
||||
if (not res) and self.stylesMapping.has_key(elem):
|
||||
if (not res) and elem in self.stylesMapping:
|
||||
styles = self.stylesMapping[elem]
|
||||
res = self.getStyleFromMapping(elem, attrs, styles)
|
||||
# (5)
|
||||
if (not res) and self.styles.has_key(cssStyleName):
|
||||
if (not res) and cssStyleName in self.styles:
|
||||
res = self.styles[cssStyleName]
|
||||
# (6)
|
||||
if (not res) and self.podSpecificStyles.has_key(cssStyleName):
|
||||
if (not res) and cssStyleName in self.podSpecificStyles:
|
||||
res = self.podSpecificStyles[cssStyleName]
|
||||
# (7)
|
||||
if not res:
|
||||
|
@ -386,9 +391,9 @@ class StylesManager:
|
|||
if elem in XHTML_HEADINGS:
|
||||
# Is there a delta that must be taken into account ?
|
||||
outlineDelta = 0
|
||||
if localStylesMapping.has_key('h*'):
|
||||
if 'h*' in localStylesMapping:
|
||||
outlineDelta += localStylesMapping['h*']
|
||||
elif self.stylesMapping.has_key('h*'):
|
||||
elif 'h*' in self.stylesMapping:
|
||||
outlineDelta += self.stylesMapping['h*']
|
||||
outlineLevel = int(elem[1]) + outlineDelta
|
||||
# Normalize the outline level
|
||||
|
|
|
@ -38,7 +38,7 @@ class AnnotationsRemover(OdfParser):
|
|||
machine-specific info, like absolute paths to the python files, etc.'''
|
||||
def __init__(self, env, caller):
|
||||
OdfParser.__init__(self, env, caller)
|
||||
self.res = u''
|
||||
self.res = ''
|
||||
self.inAnnotation = False # Are we parsing an annotation ?
|
||||
self.textEncountered = False # Within an annotation, have we already
|
||||
# met a text ?
|
||||
|
@ -58,7 +58,7 @@ class AnnotationsRemover(OdfParser):
|
|||
self.ignore = True
|
||||
if not self.ignore:
|
||||
self.res += '<%s' % elem
|
||||
for attrName, attrValue in attrs.items():
|
||||
for attrName, attrValue in list(attrs.items()):
|
||||
self.res += ' %s="%s"' % (attrName, attrValue)
|
||||
self.res += '>'
|
||||
def endElement(self, elem):
|
||||
|
@ -93,12 +93,12 @@ class Test(appy.shared.test.Test):
|
|||
if not os.path.exists(contextPy):
|
||||
raise TesterError(CONTEXT_NOT_FOUND % contextPy)
|
||||
contextPkg = 'appy.pod.test.contexts.%s' % contextName
|
||||
exec 'import %s' % contextPkg
|
||||
exec 'context = dir(%s)' % contextPkg
|
||||
exec('import %s' % contextPkg)
|
||||
exec('context = dir(%s)' % contextPkg)
|
||||
res = {}
|
||||
for elem in context:
|
||||
if not elem.startswith('__'):
|
||||
exec 'res[elem] = %s.%s' % (contextPkg, elem)
|
||||
exec('res[elem] = %s.%s' % (contextPkg, elem))
|
||||
return res
|
||||
|
||||
def do(self):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
class Student:
|
||||
def __init__(self, **kwargs):
|
||||
for k, v in kwargs.iteritems():
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
students = [
|
||||
|
|
|
@ -47,7 +47,7 @@ class HtmlElement:
|
|||
# but for a strange reason those attrs are back to None (probably for
|
||||
# performance reasons they become inaccessible after a while).
|
||||
self.classAttr = None
|
||||
if attrs.has_key('class'):
|
||||
if 'class' in attrs:
|
||||
self.classAttr = attrs['class']
|
||||
self.tagsToReopen = [] # When the HTML element corresponding to self
|
||||
# is completely dumped, if there was a problem related to tags
|
||||
|
@ -58,7 +58,7 @@ class HtmlElement:
|
|||
# to self, we may need to close other tags (ie closing a paragraph
|
||||
# before closing a cell). This list contains HtmlElement instances.
|
||||
self.elemType = self.elem
|
||||
if self.elemTypes.has_key(self.elem):
|
||||
if self.elem in self.elemTypes:
|
||||
self.elemType = self.elemTypes[self.elem]
|
||||
# If a conflict occurs on this element, we will note it.
|
||||
self.isConflictual = False
|
||||
|
@ -71,7 +71,7 @@ class HtmlElement:
|
|||
def getOdfTag(self, env):
|
||||
'''Gets the raw ODF tag that corresponds to me'''
|
||||
res = ''
|
||||
if HTML_2_ODT.has_key(self.elem):
|
||||
if self.elem in HTML_2_ODT:
|
||||
res += '%s:%s' % (env.textNs, HTML_2_ODT[self.elem])
|
||||
elif self.elem == 'a':
|
||||
res += '%s:a' % env.textNs
|
||||
|
@ -216,8 +216,8 @@ class HtmlTable:
|
|||
elems = str(time.time()).split('.')
|
||||
self.name= 'AppyTable%s%s%d' % (elems[0],elems[1],random.randint(1,100))
|
||||
self.styleNs = env.ns[OdfEnvironment.NS_STYLE]
|
||||
self.res = u'' # The sub-buffer
|
||||
self.tempRes = u'' # The temporary sub-buffer, into which we will
|
||||
self.res = '' # The sub-buffer
|
||||
self.tempRes = '' # The temporary sub-buffer, into which we will
|
||||
# dump all table sub-elements, until we encounter the end of the first
|
||||
# row. Then, we will know how much columns are defined in the table;
|
||||
# we will dump columns declarations into self.res and dump self.tempRes
|
||||
|
@ -294,8 +294,8 @@ class XhtmlEnvironment(XmlEnvironment):
|
|||
XmlEnvironment.__init__(self)
|
||||
self.renderer = renderer
|
||||
self.ns = renderer.currentParser.env.namespaces
|
||||
self.res = u''
|
||||
self.currentContent = u''
|
||||
self.res = ''
|
||||
self.currentContent = ''
|
||||
self.currentElements = [] # Stack of currently walked elements
|
||||
self.currentLists = [] # Stack of currently walked lists (ul or ol)
|
||||
self.currentTables = [] # Stack of currently walked tables
|
||||
|
@ -349,7 +349,7 @@ class XhtmlEnvironment(XmlEnvironment):
|
|||
# Dump and reinitialize the current content
|
||||
contentSize = len(self.currentContent)
|
||||
self.dumpString(escapeXml(self.currentContent))
|
||||
self.currentContent = u''
|
||||
self.currentContent = ''
|
||||
# If we are within a table cell, update the total size of cell content
|
||||
if not contentSize: return
|
||||
if self.currentTables and self.currentTables[-1].inCell:
|
||||
|
@ -363,7 +363,7 @@ class XhtmlEnvironment(XmlEnvironment):
|
|||
styleName = None
|
||||
if odtStyle:
|
||||
styleName = odtStyle.name
|
||||
elif DEFAULT_ODT_STYLES.has_key(htmlElem.elem):
|
||||
elif htmlElem.elem in DEFAULT_ODT_STYLES:
|
||||
styleName = DEFAULT_ODT_STYLES[htmlElem.elem]
|
||||
res = ''
|
||||
if styleName:
|
||||
|
@ -458,7 +458,7 @@ class XhtmlEnvironment(XmlEnvironment):
|
|||
elif elem in TABLE_CELL_TAGS:
|
||||
# Determine colspan
|
||||
colspan = 1
|
||||
if attrs.has_key('colspan'): colspan = int(attrs['colspan'])
|
||||
if 'colspan' in attrs: colspan = int(attrs['colspan'])
|
||||
table = self.currentTables[-1]
|
||||
table.inCell = colspan
|
||||
table.cellIndex += colspan
|
||||
|
@ -503,7 +503,7 @@ class XhtmlEnvironment(XmlEnvironment):
|
|||
table.res+= '<%s:table-column %s:style-name="%s.%d"/>' % \
|
||||
(self.tableNs, self.tableNs, table.name, i)
|
||||
table.res += table.tempRes
|
||||
table.tempRes = u''
|
||||
table.tempRes = ''
|
||||
elif elem in TABLE_CELL_TAGS:
|
||||
# Update attr "columnContentSizes" of the currently parsed table,
|
||||
# excepted if the cell spans several columns.
|
||||
|
@ -535,7 +535,7 @@ class XhtmlParser(XmlParser):
|
|||
resAttrs = attrs
|
||||
if attrs:
|
||||
resAttrs = {}
|
||||
for attrName in attrs.keys():
|
||||
for attrName in list(attrs.keys()):
|
||||
resAttrs[attrName.lower()] = attrs[attrName]
|
||||
if attrs == None:
|
||||
return resElem
|
||||
|
@ -548,11 +548,11 @@ class XhtmlParser(XmlParser):
|
|||
currentElem = e.onElementStart(elem, attrs)
|
||||
odfTag = currentElem.getOdfTag(e)
|
||||
|
||||
if HTML_2_ODT.has_key(elem):
|
||||
if elem in HTML_2_ODT:
|
||||
e.dumpStyledElement(currentElem, odfTag, attrs)
|
||||
elif elem == 'a':
|
||||
e.dumpString('<%s %s:type="simple"' % (odfTag, e.linkNs))
|
||||
if attrs.has_key('href'):
|
||||
if 'href' in attrs:
|
||||
e.dumpString(' %s:href="%s"' % (e.linkNs,
|
||||
escapeXml(attrs['href'])))
|
||||
e.dumpString('>')
|
||||
|
@ -577,13 +577,13 @@ class XhtmlParser(XmlParser):
|
|||
elif elem in TABLE_CELL_TAGS:
|
||||
e.dumpString('<%s %s:style-name="%s"' % \
|
||||
(odfTag, e.tableNs, DEFAULT_ODT_STYLES[elem]))
|
||||
if attrs.has_key('colspan'):
|
||||
if 'colspan' in attrs:
|
||||
e.dumpString(' %s:number-columns-spanned="%s"' % \
|
||||
(e.tableNs, attrs['colspan']))
|
||||
e.dumpString('>')
|
||||
elif elem == 'img':
|
||||
style = None
|
||||
if attrs.has_key('style'): style = attrs['style']
|
||||
if 'style' in attrs: style = attrs['style']
|
||||
imgCode = e.renderer.importDocument(at=attrs['src'],
|
||||
wrapInPara=False, style=style)
|
||||
e.dumpString(imgCode)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
# ------------------------------------------------------------------------------
|
||||
import xml.sax
|
||||
from px_parser import PxParser, PxEnvironment
|
||||
from .px_parser import PxParser, PxEnvironment
|
||||
from appy.pod.buffers import MemoryBuffer
|
||||
from appy.shared.xml_parser import xmlPrologue, xhtmlPrologue
|
||||
|
||||
|
@ -18,7 +18,7 @@ class Px:
|
|||
xhtmlPrologue = xhtmlPrologue
|
||||
|
||||
def __init__(self, content, isFileName=False, partial=True,
|
||||
template=None, hook=None, prologue=None, unicode=True):
|
||||
template=None, hook=None, prologue=None, str=True):
|
||||
'''p_content is the PX code, as a string, or a file name if p_isFileName
|
||||
is True. If this code represents a complete XML file, p_partial is
|
||||
False. Else, we must surround p_content with a root tag to be able
|
||||
|
@ -49,7 +49,7 @@ class Px:
|
|||
# Is there some (XML, XHTML...) prologue to dump?
|
||||
self.prologue = prologue
|
||||
# Will the result be unicode or str?
|
||||
self.unicode = unicode
|
||||
self.str = str
|
||||
self.parse()
|
||||
|
||||
def parse(self):
|
||||
|
@ -64,7 +64,7 @@ class Px:
|
|||
# produce a tree of memory buffers.
|
||||
try:
|
||||
self.parser.parse(self.content)
|
||||
except xml.sax.SAXParseException, spe:
|
||||
except xml.sax.SAXParseException as spe:
|
||||
self.completeErrorMessage(spe)
|
||||
raise spe
|
||||
|
||||
|
@ -110,7 +110,7 @@ class Px:
|
|||
res = result.content
|
||||
if self.prologue:
|
||||
res = self.prologue + res
|
||||
if not self.unicode:
|
||||
if not self.str:
|
||||
res = res.encode('utf-8')
|
||||
return res
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ class PxParser(XmlParser):
|
|||
# See if we have a PX attribute among p_attrs.
|
||||
found = False
|
||||
for name in self.pxAttributes:
|
||||
if attrs.has_key(name):
|
||||
if name in attrs:
|
||||
if not found:
|
||||
# This is the first PX attr we find.
|
||||
# Create a sub-buffer with an action.
|
||||
|
@ -87,7 +87,7 @@ class PxParser(XmlParser):
|
|||
hook = None
|
||||
ignorableAttrs = self.pxAttributes
|
||||
for name in self.noDumpTags:
|
||||
if attrs.has_key(name) and attrs[name].startswith(':'):
|
||||
if name in attrs and attrs[name].startswith(':'):
|
||||
hook = (name, attrs[name][1:])
|
||||
ignorableAttrs += (name,)
|
||||
break
|
||||
|
|
|
@ -50,12 +50,12 @@ class UnicodeBuffer:
|
|||
self.buffer = []
|
||||
def write(self, s):
|
||||
if s == None: return
|
||||
if isinstance(s, unicode):
|
||||
if isinstance(s, str):
|
||||
self.buffer.append(s)
|
||||
elif isinstance(s, str):
|
||||
self.buffer.append(s.decode('utf-8'))
|
||||
else:
|
||||
self.buffer.append(unicode(s))
|
||||
self.buffer.append(str(s))
|
||||
def getValue(self):
|
||||
return u''.join(self.buffer)
|
||||
return ''.join(self.buffer)
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -109,18 +109,18 @@ class CsvParser:
|
|||
def convertValue(self, value, basicType):
|
||||
'''Converts the atomic p_value which is a string into some other atomic
|
||||
Python type specified in p_basicType (int, float, ...).'''
|
||||
if (basicType != str) and (basicType != unicode):
|
||||
if (basicType != str) and (basicType != str):
|
||||
try:
|
||||
exec 'res = %s' % str(value)
|
||||
except SyntaxError, se:
|
||||
exec('res = %s' % str(value))
|
||||
except SyntaxError as se:
|
||||
res = None
|
||||
else:
|
||||
try:
|
||||
exec 'res = """%s"""' % str(value)
|
||||
except SyntaxError, se:
|
||||
exec('res = """%s"""' % str(value))
|
||||
except SyntaxError as se:
|
||||
try:
|
||||
exec "res = '''%s'''" % str(value)
|
||||
except SyntaxError, se:
|
||||
exec("res = '''%s'''" % str(value))
|
||||
except SyntaxError as se:
|
||||
res = None
|
||||
return res
|
||||
|
||||
|
@ -183,7 +183,7 @@ class CsvParser:
|
|||
self.res.append(lineObject)
|
||||
f.close()
|
||||
# The second pass resolves the p_references if any
|
||||
for attrName, refInfo in self.references.iteritems():
|
||||
for attrName, refInfo in self.references.items():
|
||||
if attrName in self.attributes:
|
||||
# Replace ID with real object from p_references
|
||||
for obj in self.res:
|
||||
|
@ -228,7 +228,7 @@ class CsvMarshaller:
|
|||
|
||||
def marshallValue(self, field, value):
|
||||
'''Produces a version of p_value that can be dumped in a CSV file.'''
|
||||
if isinstance(value, basestring):
|
||||
if isinstance(value, str):
|
||||
# Format the string as a one-line CSV-ready value
|
||||
res = self.marshallString(value)
|
||||
elif type(value) in sequenceTypes:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
import os, re, httplib, sys, stat, urlparse, time, socket, xml.sax
|
||||
import urllib
|
||||
import os, re, http.client, sys, stat, urllib.parse, time, socket, xml.sax
|
||||
from urllib.parse import quote
|
||||
from StringIO import StringIO
|
||||
from mimetypes import guess_type
|
||||
from base64 import encodestring
|
||||
|
@ -19,12 +19,12 @@ class FormDataEncoder:
|
|||
|
||||
def marshalValue(self, name, value):
|
||||
if isinstance(value, basestring):
|
||||
return '%s=%s' % (name, urllib.quote(str(value)))
|
||||
return '%s=%s' % (name, quote(str(value)))
|
||||
elif isinstance(value, float):
|
||||
return '%s:float=%s' % (name, value)
|
||||
elif isinstance(value, int):
|
||||
return '%s:int=%s' % (name, value)
|
||||
elif isinstance(value, long):
|
||||
elif isinstance(value, int):
|
||||
res = '%s:long=%s' % (name, value)
|
||||
if res[-1] == 'L':
|
||||
res = res[:-1]
|
||||
|
@ -101,8 +101,8 @@ class HttpResponse:
|
|||
redirect the user to if self.code is 302, or will unmarshall XML
|
||||
data into Python objects.'''
|
||||
if self.code == 302:
|
||||
return urlparse.urlparse(self.headers['location'])[2]
|
||||
elif self.headers.has_key('content-type'):
|
||||
return urllib.parse.urlparse(self.headers['location'])[2]
|
||||
elif 'content-type' in self.headers:
|
||||
contentType = self.extractContentType(self.headers['content-type'])
|
||||
for xmlHeader in self.xmlHeaders:
|
||||
if contentType.startswith(xmlHeader):
|
||||
|
@ -116,7 +116,7 @@ class HttpResponse:
|
|||
raise ResourceError('Distant server exception: ' \
|
||||
'%s' % res)
|
||||
return res
|
||||
except xml.sax.SAXParseException, se:
|
||||
except xml.sax.SAXParseException as se:
|
||||
raise ResourceError('Invalid XML response (%s)'%str(se))
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -158,7 +158,7 @@ class Resource:
|
|||
def updateHeaders(self, headers):
|
||||
# Add credentials if present
|
||||
if not (self.username and self.password): return
|
||||
if headers.has_key('Authorization'): return
|
||||
if 'Authorization' in headers: return
|
||||
credentials = '%s:%s' % (self.username, self.password)
|
||||
credentials = credentials.replace('\012', '')
|
||||
headers['Authorization'] = "Basic %s" % encodestring(credentials)
|
||||
|
@ -170,19 +170,19 @@ class Resource:
|
|||
|
||||
def send(self, method, uri, body=None, headers={}, bodyType=None):
|
||||
'''Sends a HTTP request with p_method, for p_uri.'''
|
||||
conn = httplib.HTTPConnection(self.host, self.port)
|
||||
conn = http.client.HTTPConnection(self.host, self.port)
|
||||
try:
|
||||
conn.connect()
|
||||
except socket.gaierror, sge:
|
||||
except socket.gaierror as sge:
|
||||
raise ResourceError('Check your Internet connection (%s)'% str(sge))
|
||||
except socket.error, se:
|
||||
except socket.error as se:
|
||||
raise ResourceError('Connection error (%s)' % str(se))
|
||||
# Tell what kind of HTTP request it will be.
|
||||
conn.putrequest(method, uri, skip_host=True)
|
||||
# Add HTTP headers
|
||||
self.updateHeaders(headers)
|
||||
if self.headers: headers.update(self.headers)
|
||||
for n, v in headers.items(): conn.putheader(n, v)
|
||||
for n, v in list(headers.items()): conn.putheader(n, v)
|
||||
conn.endheaders()
|
||||
# Add HTTP body
|
||||
if body:
|
||||
|
|
|
@ -250,7 +250,7 @@ class HtmlDiff:
|
|||
deleteStyle = 'color: red; text-decoration: line-through; cursor: help'
|
||||
|
||||
def __init__(self, old, new,
|
||||
insertMsg=u'Inserted text', deleteMsg=u'Deleted text',
|
||||
insertMsg='Inserted text', deleteMsg='Deleted text',
|
||||
insertCss=None, deleteCss=None, insertName='insert',
|
||||
deleteName='delete', diffRatio=0.7):
|
||||
# p_old and p_new are strings containing chunks of HTML. If they are not
|
||||
|
@ -306,24 +306,24 @@ class HtmlDiff:
|
|||
else: tag = 'span'
|
||||
# What message will it show in its 'title' attribute?
|
||||
if not msg:
|
||||
exec 'msg = self.%sMsg' % type
|
||||
exec('msg = self.%sMsg' % type)
|
||||
# What CSS class (or, if none, tag-specific style) will be used ?
|
||||
exec 'cssClass = self.%sCss' % type
|
||||
exec('cssClass = self.%sCss' % type)
|
||||
if cssClass:
|
||||
style = 'class="%s"' % cssClass
|
||||
else:
|
||||
exec 'style = self.%sStyle' % type
|
||||
exec('style = self.%sStyle' % type)
|
||||
style = 'style="%s"' % style
|
||||
# The 'name' attribute of the tag indicates the type of the update.
|
||||
exec 'tagName = self.%sName' % type
|
||||
exec('tagName = self.%sName' % type)
|
||||
# The idea is: if there are several lines, every line must be surrounded
|
||||
# by a tag. This way, we know that a surrounding tag can't span several
|
||||
# lines, which is a prerequisite for managing cumulative diffs.
|
||||
if sep == ' ':
|
||||
if not isinstance(seq, basestring):
|
||||
if not isinstance(seq, str):
|
||||
seq = sep.join(seq)
|
||||
sep = ''
|
||||
if isinstance(seq, basestring):
|
||||
if isinstance(seq, str):
|
||||
return '%s<%s name="%s" %s title="%s">%s</%s>%s' % \
|
||||
(sep, tag, tagName, style, msg, seq, tag, sep)
|
||||
else:
|
||||
|
@ -592,7 +592,7 @@ class HtmlDiff:
|
|||
try:
|
||||
merger = Merger(lineA, add, previousDiffsA, self)
|
||||
add = merger.merge()
|
||||
except Merger.MergeError, e:
|
||||
except Merger.MergeError as e:
|
||||
# The merge algorithm has made a burn out. Simplify and
|
||||
# consider lineA has having been completely deleted and
|
||||
# lineB has completely inserted.
|
||||
|
|
|
@ -239,14 +239,14 @@ class LdapConnector:
|
|||
self.server = ldap.initialize(self.serverUri)
|
||||
self.server.simple_bind_s(login, password)
|
||||
return True, None
|
||||
except AttributeError, ae:
|
||||
except AttributeError as ae:
|
||||
# When the ldap module is not there, trying to catch ldap.LDAPError
|
||||
# will raise an error.
|
||||
message = str(ae)
|
||||
self.log('Ldap connect error with login %s (%s).' % \
|
||||
(login, message))
|
||||
return False, message
|
||||
except ldap.LDAPError, le:
|
||||
except ldap.LDAPError as le:
|
||||
message = str(le)
|
||||
self.log('%s: connect error with login %s (%s).' % \
|
||||
(self.serverUri, login, message))
|
||||
|
@ -275,7 +275,7 @@ class LdapConnector:
|
|||
timeout=self.timeout)
|
||||
except ldap.TIMEOUT:
|
||||
pass
|
||||
except ldap.LDAPError, le:
|
||||
except ldap.LDAPError as le:
|
||||
self.log('LDAP query error %s: %s' % \
|
||||
(le.__class__.__name__, str(le)))
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -160,20 +160,20 @@ class Debianizer:
|
|||
name = '%s/%sctl' % (binFolder, self.appNameLower)
|
||||
f = file(name, 'w')
|
||||
f.write(appCtl % self.appNameLower)
|
||||
os.chmod(name, 0744) # Make it executable by owner.
|
||||
os.chmod(name, 0o744) # Make it executable by owner.
|
||||
f.close()
|
||||
# <app>run
|
||||
name = '%s/%srun' % (binFolder, self.appNameLower)
|
||||
f = file(name, 'w')
|
||||
f.write(appRun % self.appNameLower)
|
||||
os.chmod(name, 0744) # Make it executable by owner.
|
||||
os.chmod(name, 0o744) # Make it executable by owner.
|
||||
f.close()
|
||||
# startoo
|
||||
name = '%s/startoo' % binFolder
|
||||
f = file(name, 'w')
|
||||
f.write(ooStart)
|
||||
f.close()
|
||||
os.chmod(name, 0744) # Make it executable by owner.
|
||||
os.chmod(name, 0o744) # Make it executable by owner.
|
||||
# /var/lib/<app> (will store Data.fs, lock files, etc)
|
||||
varLibFolder = j(debFolder, 'var', 'lib', self.appNameLower)
|
||||
os.makedirs(varLibFolder)
|
||||
|
@ -208,7 +208,7 @@ class Debianizer:
|
|||
'application.' % n, '%sctl start' % n,
|
||||
'%sctl restart' % n, '%sctl stop' % n))
|
||||
f.close()
|
||||
os.chmod(name, 0744) # Make it executable by owner.
|
||||
os.chmod(name, 0o744) # Make it executable by owner.
|
||||
# /etc/init.d/oo (start OpenOffice at boot time)
|
||||
name = '%s/oo' % initdFolder
|
||||
f = file(name, 'w')
|
||||
|
@ -216,7 +216,7 @@ class Debianizer:
|
|||
'startoo', 'startoo', "#Can't stop OO."))
|
||||
f.write('\n')
|
||||
f.close()
|
||||
os.chmod(name, 0744) # Make it executable by owner.
|
||||
os.chmod(name, 0o744) # Make it executable by owner.
|
||||
# Get the size of the app, in Kb.
|
||||
os.chdir(tempFolder)
|
||||
cmd = subprocess.Popen(['du', '-b', '-s', 'debian'],
|
||||
|
|
|
@ -25,8 +25,8 @@
|
|||
be strictly greater than 1.'''
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
import re, sys, UserList, UserDict
|
||||
from StringIO import StringIO
|
||||
import re, sys, collections, UserDict
|
||||
from io import StringIO
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
class ParserError(Exception): pass
|
||||
|
@ -69,7 +69,7 @@ LIST_VALUE_ERROR = 'Value "%s" is malformed: within it, %s. You should check ' \
|
|||
|
||||
# -----------------------------------------------------------------------------
|
||||
class Type:
|
||||
basicTypes = {'f': float, 'i':int, 'g':long, 'b':bool}
|
||||
basicTypes = {'f': float, 'i':int, 'g':int, 'b':bool}
|
||||
separators = ['-', ';', ',', ':']
|
||||
def __init__(self, typeDecl):
|
||||
self.basicType = None # The python basic type
|
||||
|
@ -85,12 +85,12 @@ class Type:
|
|||
self.listNumber += 1
|
||||
else:
|
||||
# Get the basic type
|
||||
if not (char in Type.basicTypes.keys()):
|
||||
if not (char in list(Type.basicTypes.keys())):
|
||||
raise TypeError(BASIC_TYPE_ERROR % char)
|
||||
self.basicType = Type.basicTypes[char]
|
||||
break
|
||||
if not self.basicType:
|
||||
self.basicType = unicode
|
||||
self.basicType = str
|
||||
def convertBasicValue(self, value):
|
||||
try:
|
||||
return self.basicType(value.strip())
|
||||
|
@ -136,7 +136,7 @@ class Type:
|
|||
elif not resIsComplete:
|
||||
try:
|
||||
res = self.convertListItem(value, separators)
|
||||
except TypeError, te:
|
||||
except TypeError as te:
|
||||
raise TypeError(LIST_VALUE_ERROR % (value, te, self.name))
|
||||
return res
|
||||
def convertListItem(self, stringItem, remainingSeps):
|
||||
|
@ -161,9 +161,9 @@ class Type:
|
|||
return self.name
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
class Table(UserList.UserList):
|
||||
class Table(collections.UserList):
|
||||
def __init__(self):
|
||||
UserList.UserList.__init__(self)
|
||||
collections.UserList.__init__(self)
|
||||
self.name = None
|
||||
self.parent = None
|
||||
self.parentRow = None
|
||||
|
@ -213,7 +213,7 @@ class TableRow(UserDict.UserDict):
|
|||
via the parent table self.table.'''
|
||||
keyError = False
|
||||
t = self.table
|
||||
if self.has_key(key):
|
||||
if key in self:
|
||||
res = UserDict.UserDict.__getitem__(self, key)
|
||||
else:
|
||||
# Get the parent row
|
||||
|
@ -259,9 +259,9 @@ class TableRow(UserDict.UserDict):
|
|||
# -----------------------------------------------------------------------------
|
||||
class NameResolver:
|
||||
def resolveNames(self, tables):
|
||||
for tableName, table in tables.iteritems():
|
||||
for tableName, table in tables.items():
|
||||
if table.parent:
|
||||
if not tables.has_key(table.parent):
|
||||
if table.parent not in tables:
|
||||
raise ParserError(PARENT_NOT_FOUND %
|
||||
(table.parent, table.name))
|
||||
table.parent = tables[table.parent]
|
||||
|
@ -330,10 +330,10 @@ class TableParser:
|
|||
def manageSpecialChar(self):
|
||||
specialChar = int(self.specialCharBuffer)
|
||||
self.specialCharBuffer = ''
|
||||
if self.specialChars.has_key(specialChar):
|
||||
if specialChar in self.specialChars:
|
||||
self.contentBuffer.write(self.specialChars[specialChar])
|
||||
else:
|
||||
print('Warning: char %d not known.' % specialChar)
|
||||
print(('Warning: char %d not known.' % specialChar))
|
||||
self.state = TableParser.READING_CONTENT
|
||||
def bufferize(self, char):
|
||||
if self.state == TableParser.READING_CONTROL_WORD:
|
||||
|
@ -403,7 +403,7 @@ class TableParser:
|
|||
columnNames.append(name.strip())
|
||||
try:
|
||||
columnTypes.append(Type(typeDecl.strip()))
|
||||
except TypeError, te:
|
||||
except TypeError as te:
|
||||
raise ParserError(TYPE_ERROR %
|
||||
(header, self.currentTableName, te))
|
||||
else:
|
||||
|
@ -449,7 +449,7 @@ class TableParser:
|
|||
if columnType:
|
||||
try:
|
||||
columnValue = columnType.convertValue(columnValue)
|
||||
except TypeError, te:
|
||||
except TypeError as te:
|
||||
raise ParserError(VALUE_ERROR %
|
||||
(columnName, self.currentTableName,
|
||||
te))
|
||||
|
@ -496,7 +496,7 @@ class RtfTablesParser:
|
|||
# -----------------------------------------------------------------------------
|
||||
if __name__ =='__main__':
|
||||
tables = RtfTablesParser("Tests.rtf").parse()
|
||||
for key, item in tables.iteritems():
|
||||
print('Table %s' % key)
|
||||
for key, item in tables.items():
|
||||
print(('Table %s' % key))
|
||||
print(item)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
|
|
@ -74,7 +74,7 @@ class Sap:
|
|||
try:
|
||||
self.sap = pysap.Rfc_connection(conn_string = params)
|
||||
self.sap.open()
|
||||
except pysap.BaseSapRfcError, se:
|
||||
except pysap.BaseSapRfcError as se:
|
||||
# Put in the error message the connection string without the
|
||||
# password.
|
||||
connNoPasswd = params[:params.index('PASSWD')] + 'PASSWD=********'
|
||||
|
@ -84,7 +84,7 @@ class Sap:
|
|||
'''Create a struct corresponding to SAP/C structure definition
|
||||
p_structDef and fills it with dict p_userData.'''
|
||||
res = structDef()
|
||||
for name, value in userData.iteritems():
|
||||
for name, value in userData.items():
|
||||
if name not in structDef._sfield_names_:
|
||||
raise SapError(SAP_STRUCT_ELEM_NOT_FOUND % (paramName, name))
|
||||
sapType = structDef._sfield_sap_types_[name]
|
||||
|
@ -93,7 +93,7 @@ class Sap:
|
|||
sType = '%s%d' % (sapType[0], sapType[1])
|
||||
# "None" value is tolerated.
|
||||
if value == None: value = ''
|
||||
if not isinstance(value, basestring):
|
||||
if not isinstance(value, str):
|
||||
raise SapError(
|
||||
SAP_STRING_REQUIRED % (name, paramName, sType))
|
||||
if len(value) > sapType[1]:
|
||||
|
@ -113,7 +113,7 @@ class Sap:
|
|||
functionName = self.functionName
|
||||
function = self.sap.get_interface(functionName)
|
||||
# Specify the parameters
|
||||
for name, value in params.iteritems():
|
||||
for name, value in params.items():
|
||||
if type(value) == dict:
|
||||
# The param corresponds to a SAP/C "struct"
|
||||
v = self.createStructure(
|
||||
|
@ -140,7 +140,7 @@ class Sap:
|
|||
function[name] = v
|
||||
# Call the function
|
||||
function()
|
||||
except pysap.BaseSapRfcError, se:
|
||||
except pysap.BaseSapRfcError as se:
|
||||
raise SapError(SAP_FUNCTION_ERROR % (functionName, str(se)))
|
||||
return SapResult(function)
|
||||
|
||||
|
@ -198,10 +198,10 @@ class Sap:
|
|||
self.sap.get_structure(typeName)
|
||||
res += '%s\n%s\n\n' % \
|
||||
(typeName, self.getTypeInfo(typeName))
|
||||
except pysap.BaseSapRfcError, ee:
|
||||
except pysap.BaseSapRfcError as ee:
|
||||
pass
|
||||
return res
|
||||
except pysap.BaseSapRfcError, se:
|
||||
except pysap.BaseSapRfcError as se:
|
||||
if se.value == 'FU_NOT_FOUND':
|
||||
raise SapError(SAP_FUNCTION_NOT_FOUND % (functionName))
|
||||
else:
|
||||
|
@ -229,6 +229,6 @@ class Sap:
|
|||
'''Disconnects from SAP.'''
|
||||
try:
|
||||
self.sap.close()
|
||||
except pysap.BaseSapRfcError, se:
|
||||
except pysap.BaseSapRfcError as se:
|
||||
raise SapError(SAP_DISCONNECT_ERROR % str(se))
|
||||
# ------------------------------------------------------------------------------
|
||||
|
|
|
@ -177,7 +177,7 @@ class Tester:
|
|||
(not isinstance(flavours, tuple)):
|
||||
raise TesterError(FLAVOURS_NOT_LIST)
|
||||
for flavour in flavours:
|
||||
if not isinstance(flavour, basestring):
|
||||
if not isinstance(flavour, str):
|
||||
raise TesterError(FLAVOUR_NOT_STRING)
|
||||
self.flavours = flavours
|
||||
self.flavour = None
|
||||
|
@ -198,7 +198,7 @@ class Tester:
|
|||
if self.flavour:
|
||||
ext = '.%s' % self.flavour
|
||||
configTableName = 'Configuration%s' % ext
|
||||
if self.tables.has_key(configTableName):
|
||||
if configTableName in self.tables:
|
||||
self.config = self.tables[configTableName].asDict()
|
||||
self.tempFolder = os.path.join(self.testFolder, 'temp')
|
||||
if os.path.exists(self.tempFolder):
|
||||
|
@ -249,11 +249,11 @@ class Tester:
|
|||
self.report.say('Test successful.\n')
|
||||
self.nbOfSuccesses += 1
|
||||
def run(self):
|
||||
assert self.tables.has_key('TestSuites'), \
|
||||
assert 'TestSuites' in self.tables, \
|
||||
TesterError(MAIN_TABLE_NOT_FOUND % self.testPlan)
|
||||
for testSuite in self.tables['TestSuites']:
|
||||
if (not testSuite.has_key('Name')) or \
|
||||
(not testSuite.has_key('Description')):
|
||||
if ('Name' not in testSuite) or \
|
||||
('Description' not in testSuite):
|
||||
raise TesterError(MAIN_TABLE_MALFORMED)
|
||||
if testSuite['Name'].startswith('_'):
|
||||
tsName = testSuite['Name'][1:]
|
||||
|
@ -261,8 +261,8 @@ class Tester:
|
|||
else:
|
||||
tsName = testSuite['Name']
|
||||
tsIgnored = False
|
||||
assert self.tables.has_key('%s.descriptions' % tsName) \
|
||||
and self.tables.has_key('%s.data' % tsName), \
|
||||
assert '%s.descriptions' % tsName in self.tables \
|
||||
and '%s.data' % tsName in self.tables, \
|
||||
TesterError(TEST_SUITE_NOT_FOUND % (tsName, tsName))
|
||||
assert len(self.tables['%s.descriptions' % tsName]) == \
|
||||
len(self.tables['%s.data' % tsName]), \
|
||||
|
|
|
@ -44,7 +44,7 @@ class FolderDeleter:
|
|||
dirName = os.path.dirname(dirName)
|
||||
else:
|
||||
break
|
||||
except OSError, oe:
|
||||
except OSError:
|
||||
break
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -53,7 +53,7 @@ def cleanFolder(folder, exts=extsToClean, folders=(), verbose=False):
|
|||
'''This function allows to remove, in p_folder and subfolders, any file
|
||||
whose extension is in p_exts, and any folder whose name is in
|
||||
p_folders.'''
|
||||
if verbose: print('Cleaning folder %s...' % folder)
|
||||
if verbose: print(('Cleaning folder %s...' % folder))
|
||||
# Remove files with an extension listed in p_exts
|
||||
if exts:
|
||||
for root, dirs, files in os.walk(folder):
|
||||
|
@ -61,7 +61,7 @@ def cleanFolder(folder, exts=extsToClean, folders=(), verbose=False):
|
|||
ext = os.path.splitext(fileName)[1]
|
||||
if (ext in exts) or ext.endswith('~'):
|
||||
fileToRemove = os.path.join(root, fileName)
|
||||
if verbose: print('Removing file %s...' % fileToRemove)
|
||||
if verbose: print(('Removing file %s...' % fileToRemove))
|
||||
os.remove(fileToRemove)
|
||||
# Remove folders whose names are in p_folders.
|
||||
if folders:
|
||||
|
@ -69,7 +69,7 @@ def cleanFolder(folder, exts=extsToClean, folders=(), verbose=False):
|
|||
for folderName in dirs:
|
||||
if folderName in folders:
|
||||
toDelete = os.path.join(root, folderName)
|
||||
if verbose: print('Removing folder %s...' % toDelete)
|
||||
if verbose: print(('Removing folder %s...' % toDelete))
|
||||
FolderDeleter.delete(toDelete)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -142,14 +142,14 @@ def copyData(data, target, targetMethod, type='string', encoding=None,
|
|||
dump(encodeData(chunk, encoding))
|
||||
elif type == 'zope':
|
||||
# A OFS.Image.File instance can be split into several chunks
|
||||
if isinstance(data.data, basestring): # One chunk
|
||||
if isinstance(data.data, str): # One chunk
|
||||
dump(encodeData(data.data, encoding))
|
||||
else:
|
||||
# Several chunks
|
||||
data = data.data
|
||||
while data is not None:
|
||||
dump(encodeData(data.data, encoding))
|
||||
data = data.next
|
||||
data = data.__next__
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
def splitList(l, sub):
|
||||
|
@ -222,12 +222,12 @@ def getOsTempFolder():
|
|||
tmp = '/tmp'
|
||||
if os.path.exists(tmp) and os.path.isdir(tmp):
|
||||
res = tmp
|
||||
elif os.environ.has_key('TMP'):
|
||||
elif 'TMP' in os.environ:
|
||||
res = os.environ['TMP']
|
||||
elif os.environ.has_key('TEMP'):
|
||||
elif 'TEMP' in os.environ:
|
||||
res = os.environ['TEMP']
|
||||
else:
|
||||
raise "Sorry, I can't find a temp folder on your machine."
|
||||
raise Exception("Sorry, I can't find a temp folder on your machine.")
|
||||
return res
|
||||
|
||||
def getTempFileName(prefix='', extension=''):
|
||||
|
@ -273,10 +273,10 @@ def normalizeString(s, usage='fileName'):
|
|||
except UnicodeDecodeError:
|
||||
# Another encoding may be in use
|
||||
s = s.decode('latin-1')
|
||||
elif not isinstance(s, unicode): s = unicode(s)
|
||||
elif not isinstance(s, str): s = str(s)
|
||||
# For extracted text, replace any unwanted char with a blank
|
||||
if usage == 'extractedText':
|
||||
res = u''
|
||||
res = ''
|
||||
for char in s:
|
||||
if char not in extractIgnore: res += char
|
||||
else: res += ' '
|
||||
|
@ -291,7 +291,7 @@ def normalizeString(s, usage='fileName'):
|
|||
for char in s:
|
||||
if char not in fileNameIgnore: res += char
|
||||
elif usage.startswith('alpha'):
|
||||
exec 'rex = %sRex' % usage
|
||||
exec('rex = %sRex' % usage)
|
||||
res = ''
|
||||
for char in s:
|
||||
if rex.match(char): res += char
|
||||
|
@ -319,7 +319,7 @@ def keepDigits(s):
|
|||
def getStringDict(d):
|
||||
'''Gets the string literal corresponding to dict p_d.'''
|
||||
res = []
|
||||
for k, v in d.iteritems():
|
||||
for k, v in d.items():
|
||||
if type(v) not in sequenceTypes:
|
||||
if not isinstance(k, basestring): k = str(k)
|
||||
if not isinstance(v, basestring): v = str(v)
|
||||
|
@ -386,7 +386,7 @@ def formatNumber(n, sep=',', precision=2, tsep=' '):
|
|||
# ------------------------------------------------------------------------------
|
||||
def lower(s):
|
||||
'''French-accents-aware variant of string.lower.'''
|
||||
isUnicode = isinstance(s, unicode)
|
||||
isUnicode = isinstance(s, str)
|
||||
if not isUnicode: s = s.decode('utf-8')
|
||||
res = s.lower()
|
||||
if not isUnicode: res = res.encode('utf-8')
|
||||
|
@ -394,14 +394,14 @@ def lower(s):
|
|||
|
||||
def upper(s):
|
||||
'''French-accents-aware variant of string.upper.'''
|
||||
isUnicode = isinstance(s, unicode)
|
||||
isUnicode = isinstance(s, str)
|
||||
if not isUnicode: s = s.decode('utf-8')
|
||||
res = s.upper()
|
||||
if not isUnicode: res = res.encode('utf-8')
|
||||
return res
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
typeLetters = {'b': bool, 'i': int, 'j': long, 'f':float, 's':str, 'u':unicode,
|
||||
typeLetters = {'b': bool, 'i': int, 'j': int, 'f':float, 's':str, 'u':str,
|
||||
'l': list, 'd': dict}
|
||||
caExts = {'py': ('.py', '.vpy', '.cpy'), 'xml': ('.pt', '.cpt', '.xml')}
|
||||
|
||||
|
@ -506,8 +506,8 @@ class CodeAnalysis:
|
|||
if not lines: return
|
||||
commentRate = (self.commentLines / float(lines)) * 100.0
|
||||
blankRate = (self.emptyLines / float(lines)) * 100.0
|
||||
print('%s: %d files, %d lines (%.0f%% comments, %.0f%% blank)' % \
|
||||
(self.name, self.numberOfFiles, lines, commentRate, blankRate))
|
||||
print(('%s: %d files, %d lines (%.0f%% comments, %.0f%% blank)' % \
|
||||
(self.name, self.numberOfFiles, lines, commentRate, blankRate)))
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
class LinesCounter:
|
||||
|
@ -516,7 +516,7 @@ class LinesCounter:
|
|||
'%stemp' % os.sep)
|
||||
|
||||
def __init__(self, folderOrModule, excludes=None):
|
||||
if isinstance(folderOrModule, basestring):
|
||||
if isinstance(folderOrModule, str):
|
||||
# It is the path of some folder
|
||||
self.folder = folderOrModule
|
||||
else:
|
||||
|
@ -624,10 +624,10 @@ class FileWrapper:
|
|||
if self.content.__class__.__name__ == 'Pdata':
|
||||
# The file content is splitted in several chunks.
|
||||
f.write(self.content.data)
|
||||
nextPart = self.content.next
|
||||
nextPart = self.content.__next__
|
||||
while nextPart:
|
||||
f.write(nextPart.data)
|
||||
nextPart = nextPart.next
|
||||
nextPart = nextPart.__next__
|
||||
else:
|
||||
# Only one chunk
|
||||
f.write(self.content)
|
||||
|
|
|
@ -66,9 +66,9 @@ HTML_ENTITIES = {
|
|||
'euro':'€', 'nbsp':' ', "rsquo":"'", "lsquo":"'", "ldquo":"'",
|
||||
"rdquo":"'", 'ndash': '—', 'mdash': '—', 'oelig':'oe', 'quot': "'",
|
||||
'mu': 'µ'}
|
||||
import htmlentitydefs
|
||||
for k, v in htmlentitydefs.entitydefs.iteritems():
|
||||
if not HTML_ENTITIES.has_key(k) and not XML_ENTITIES.has_key(k):
|
||||
import html.entities
|
||||
for k, v in html.entities.entitydefs.items():
|
||||
if k not in HTML_ENTITIES and k not in XML_ENTITIES:
|
||||
HTML_ENTITIES[k] = ''
|
||||
|
||||
def escapeXml(s, format='xml', nsText='text'):
|
||||
|
@ -77,13 +77,13 @@ def escapeXml(s, format='xml', nsText='text'):
|
|||
their ODF counterparts. In this case, it is needed to give the name of
|
||||
the "text" namespace (p_nsText) as defined in the ODF document where the
|
||||
line breaks and tabs must be inserted.'''
|
||||
if isinstance(s, unicode):
|
||||
res = u''
|
||||
if isinstance(s, str):
|
||||
res = ''
|
||||
else:
|
||||
res = ''
|
||||
odf = format == 'odf'
|
||||
for c in s:
|
||||
if XML_SPECIAL_CHARS_NO_APOS.has_key(c):
|
||||
if c in XML_SPECIAL_CHARS_NO_APOS:
|
||||
# We do not escape 'apos': there is no particular need for that.
|
||||
res += XML_SPECIAL_CHARS_NO_APOS[c]
|
||||
elif odf and (c == '\n'):
|
||||
|
@ -99,12 +99,12 @@ def escapeXml(s, format='xml', nsText='text'):
|
|||
def escapeXhtml(s):
|
||||
'''Return p_s, whose XHTML special chars and carriage return chars have
|
||||
been replaced with corresponding XHTML entities.'''
|
||||
if isinstance(s, unicode):
|
||||
res = u''
|
||||
if isinstance(s, str):
|
||||
res = ''
|
||||
else:
|
||||
res = ''
|
||||
for c in s:
|
||||
if XML_SPECIAL_CHARS_NO_APOS.has_key(c):
|
||||
if c in XML_SPECIAL_CHARS_NO_APOS:
|
||||
res += XML_SPECIAL_CHARS_NO_APOS[c]
|
||||
elif c == '\n':
|
||||
res += '<br/>'
|
||||
|
@ -144,7 +144,7 @@ class XmlElement:
|
|||
res = self.elem
|
||||
if self.attrs:
|
||||
res += '('
|
||||
for attrName, attrValue in self.attrs.items():
|
||||
for attrName, attrValue in list(self.attrs.items()):
|
||||
res += '%s="%s"' % (attrName, attrValue)
|
||||
res += ')'
|
||||
return res
|
||||
|
@ -167,7 +167,7 @@ class XmlEnvironment:
|
|||
self.parser = None
|
||||
def manageNamespaces(self, attrs):
|
||||
'''Manages namespaces definitions encountered in p_attrs.'''
|
||||
for attrName, attrValue in attrs.items():
|
||||
for attrName, attrValue in list(attrs.items()):
|
||||
if attrName.startswith('xmlns:'):
|
||||
self.namespaces[attrValue] = attrName[6:]
|
||||
def ns(self, nsUri):
|
||||
|
@ -221,7 +221,7 @@ class XmlParser(ContentHandler, ErrorHandler):
|
|||
def skippedEntity(self, name):
|
||||
'''This method is called every time expat does not recognize an entity.
|
||||
We provide here support for HTML entities.'''
|
||||
if HTML_ENTITIES.has_key(name):
|
||||
if name in HTML_ENTITIES:
|
||||
self.characters(HTML_ENTITIES[name].decode('utf-8'))
|
||||
else:
|
||||
# Put a question mark instead of raising an exception.
|
||||
|
@ -230,10 +230,10 @@ class XmlParser(ContentHandler, ErrorHandler):
|
|||
# ErrorHandler methods ---------------------------------------------------
|
||||
def error(self, error):
|
||||
if self.raiseOnError: raise error
|
||||
else: print('SAX error %s' % str(error))
|
||||
else: print(('SAX error %s' % str(error)))
|
||||
def fatalError(self, error):
|
||||
if self.raiseOnError: raise error
|
||||
else: print('SAX fatal error %s' % str(error))
|
||||
else: print(('SAX fatal error %s' % str(error)))
|
||||
def warning(self, error): pass
|
||||
|
||||
def parse(self, xml, source='string'):
|
||||
|
@ -246,9 +246,9 @@ class XmlParser(ContentHandler, ErrorHandler):
|
|||
method will close it.
|
||||
'''
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
from io import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
from io import StringIO
|
||||
self._xml = xml
|
||||
self.parser.setContentHandler(self)
|
||||
self.parser.setErrorHandler(self)
|
||||
|
@ -333,7 +333,7 @@ class XmlUnmarshaller(XmlParser):
|
|||
def convertAttrs(self, attrs):
|
||||
'''Converts XML attrs to a dict.'''
|
||||
res = {}
|
||||
for k, v in attrs.items():
|
||||
for k, v in list(attrs.items()):
|
||||
if ':' in k: # An attr prefixed with a namespace. Remove this.
|
||||
k = k.split(':')[-1]
|
||||
res[str(k)] = self.encode(v)
|
||||
|
@ -362,9 +362,9 @@ class XmlUnmarshaller(XmlParser):
|
|||
e = XmlParser.startElement(self, elem, attrs)
|
||||
# Determine the type of the element.
|
||||
elemType = 'unicode' # Default value
|
||||
if attrs.has_key('type'):
|
||||
if 'type' in attrs:
|
||||
elemType = attrs['type']
|
||||
elif self.tagTypes.has_key(elem):
|
||||
elif elem in self.tagTypes:
|
||||
elemType = self.tagTypes[elem]
|
||||
if elemType in self.containerTags:
|
||||
# I must create a new container object.
|
||||
|
@ -375,9 +375,9 @@ class XmlUnmarshaller(XmlParser):
|
|||
elif elemType == 'dict': newObject = {}
|
||||
elif elemType == 'file':
|
||||
newObject = UnmarshalledFile()
|
||||
if attrs.has_key('name'):
|
||||
if 'name' in attrs:
|
||||
newObject.name = self.encode(attrs['name'])
|
||||
if attrs.has_key('mimeType'):
|
||||
if 'mimeType' in attrs:
|
||||
newObject.mimeType = self.encode(attrs['mimeType'])
|
||||
else: newObject = Object(**self.convertAttrs(attrs))
|
||||
# Store the value on the last container, or on the root object.
|
||||
|
@ -454,17 +454,17 @@ class XmlUnmarshaller(XmlParser):
|
|||
if not value: value = None
|
||||
else:
|
||||
# If we have a custom converter for values of this type, use it.
|
||||
if self.conversionFunctions.has_key(e.currentBasicType):
|
||||
if e.currentBasicType in self.conversionFunctions:
|
||||
try:
|
||||
value = self.conversionFunctions[e.currentBasicType](
|
||||
value)
|
||||
except Exception, err:
|
||||
except Exception as err:
|
||||
raise AppyError(CUSTOM_CONVERSION_ERROR % (
|
||||
e.currentBasicType, value, str(err)))
|
||||
# If not, try a standard conversion
|
||||
elif e.currentBasicType in self.numericTypes:
|
||||
try:
|
||||
exec 'value = %s' % value
|
||||
exec('value = %s' % value)
|
||||
except SyntaxError:
|
||||
raise AppyError(CONVERSION_ERROR % (
|
||||
e.currentBasicType, value))
|
||||
|
@ -578,7 +578,7 @@ class XmlMarshaller:
|
|||
tagName = self.getTagName(self.rootElementName)
|
||||
res.write('<'); res.write(tagName)
|
||||
# Dumps namespace definitions if any
|
||||
for prefix, url in self.namespaces.iteritems():
|
||||
for prefix, url in self.namespaces.items():
|
||||
if not prefix:
|
||||
pre = 'xmlns' # The default namespace
|
||||
else:
|
||||
|
@ -597,7 +597,7 @@ class XmlMarshaller:
|
|||
s = s.decode('utf-8')
|
||||
# Replace special chars by XML entities
|
||||
for c in s:
|
||||
if self.xmlEntities.has_key(c):
|
||||
if c in self.xmlEntities:
|
||||
res.write(self.xmlEntities[c])
|
||||
else:
|
||||
res.write(c)
|
||||
|
@ -617,13 +617,13 @@ class XmlMarshaller:
|
|||
# There will be several parts.
|
||||
w(v.data.data.encode('base64'))
|
||||
# Write subsequent parts
|
||||
nextPart = v.data.next
|
||||
nextPart = v.data.__next__
|
||||
nextPartNb = 2
|
||||
while nextPart:
|
||||
w('</%s>' % partTag) # Close the previous part
|
||||
w('<%s type="base64" number="%d">' % (partTag, nextPartNb))
|
||||
w(nextPart.data.encode('base64'))
|
||||
nextPart = nextPart.next
|
||||
nextPart = nextPart.__next__
|
||||
nextPartNb += 1
|
||||
else:
|
||||
w(v.data.encode('base64'))
|
||||
|
@ -654,7 +654,7 @@ class XmlMarshaller:
|
|||
|
||||
def dumpDict(self, res, v):
|
||||
'''Dumps the XML version of dict p_v.'''
|
||||
for key, value in v.iteritems():
|
||||
for key, value in v.items():
|
||||
res.write('<entry type="object">')
|
||||
self.dumpField(res, 'k', key)
|
||||
self.dumpField(res, 'v', value)
|
||||
|
@ -681,7 +681,7 @@ class XmlMarshaller:
|
|||
# The previous condition must be checked before this one because
|
||||
# referred objects may be stored in lists or tuples, too.
|
||||
for elem in value: self.dumpField(res, 'e', elem)
|
||||
elif isinstance(value, basestring): self.dumpString(res, value)
|
||||
elif isinstance(value, str): self.dumpString(res, value)
|
||||
elif isinstance(value, bool): res.write(self.trueFalse[value])
|
||||
elif fieldType == 'object':
|
||||
if hasattr(value, 'absolute_url'):
|
||||
|
@ -689,7 +689,7 @@ class XmlMarshaller:
|
|||
res.write(value.absolute_url())
|
||||
else:
|
||||
# Dump the entire object content
|
||||
for k, v in value.__dict__.iteritems():
|
||||
for k, v in value.__dict__.items():
|
||||
if not k.startswith('__'):
|
||||
self.dumpField(res, k, v)
|
||||
# Maybe we could add a parameter to the marshaller to know how
|
||||
|
@ -771,7 +771,7 @@ class XmlMarshaller:
|
|||
rootTagName = self.dumpRootTag(res, instance)
|
||||
# Dump the fields of this root object
|
||||
if objectType == 'popo':
|
||||
for fieldName, fieldValue in instance.__dict__.iteritems():
|
||||
for fieldName, fieldValue in instance.__dict__.items():
|
||||
mustDump = False
|
||||
if fieldName in self.fieldsToExclude:
|
||||
mustDump = False
|
||||
|
@ -823,11 +823,11 @@ class XmlMarshaller:
|
|||
histTag = self.getTagName('history')
|
||||
eventTag = self.getTagName('event')
|
||||
res.write('<%s type="list">' % histTag)
|
||||
key = instance.workflow_history.keys()[0]
|
||||
key = list(instance.workflow_history.keys())[0]
|
||||
history = instance.workflow_history[key]
|
||||
for event in history:
|
||||
res.write('<%s type="object">' % eventTag)
|
||||
for k, v in event.iteritems():
|
||||
for k, v in event.items():
|
||||
self.dumpField(res, k, v)
|
||||
res.write('</%s>' % eventTag)
|
||||
res.write('</%s>' % histTag)
|
||||
|
@ -856,7 +856,7 @@ class XmlHandler(ContentHandler):
|
|||
(like dates) from a file that need to be compared to another file.'''
|
||||
def __init__(self, xmlTagsToIgnore, xmlAttrsToIgnore):
|
||||
ContentHandler.__init__(self)
|
||||
self.res = unicode(xmlPrologue)
|
||||
self.res = str(xmlPrologue)
|
||||
self.namespaces = {} # ~{s_namespaceUri:s_namespaceName}~
|
||||
self.indentLevel = -1
|
||||
self.tabWidth = 3
|
||||
|
@ -891,7 +891,7 @@ class XmlHandler(ContentHandler):
|
|||
self.res += '\n' + (' ' * self.indentLevel * self.tabWidth)
|
||||
def manageNamespaces(self, attrs):
|
||||
'''Manage namespaces definitions encountered in attrs'''
|
||||
for attrName, attrValue in attrs.items():
|
||||
for attrName, attrValue in list(attrs.items()):
|
||||
if attrName.startswith('xmlns:'):
|
||||
self.namespaces[attrValue] = attrName[6:]
|
||||
def ns(self, nsUri):
|
||||
|
@ -906,7 +906,7 @@ class XmlHandler(ContentHandler):
|
|||
self.indentLevel += 1
|
||||
self.dumpSpaces()
|
||||
self.res += '<%s' % elem
|
||||
attrsNames = attrs.keys()
|
||||
attrsNames = list(attrs.keys())
|
||||
attrsNames.sort()
|
||||
for attrToIgnore in self.attrsToIgnore:
|
||||
if attrToIgnore in attrsNames:
|
||||
|
@ -986,7 +986,7 @@ class XmlComparator:
|
|||
else: print(line)
|
||||
else:
|
||||
if report: report.say(line[:-1], encoding=encoding)
|
||||
else: print(line[:-1])
|
||||
else: print((line[:-1]))
|
||||
lastLinePrinted = True
|
||||
else:
|
||||
lastLinePrinted = False
|
||||
|
@ -1061,7 +1061,7 @@ class XhtmlCleaner(XmlParser):
|
|||
self.env.ignoreContent = False
|
||||
try:
|
||||
res = self.parse('<x>%s</x>' % s).encode('utf-8')
|
||||
except SAXParseException, e:
|
||||
except SAXParseException as e:
|
||||
raise self.Error(str(e))
|
||||
return res
|
||||
|
||||
|
@ -1106,7 +1106,7 @@ class XhtmlCleaner(XmlParser):
|
|||
prefix = ''
|
||||
res = '%s<%s' % (prefix, elem)
|
||||
# Include the found attributes, excepted those that must be ignored.
|
||||
for name, value in attrs.items():
|
||||
for name, value in list(attrs.items()):
|
||||
if not self.keepStyles:
|
||||
if name in self.attrsToIgnore: continue
|
||||
elif name == 'style':
|
||||
|
@ -1115,7 +1115,7 @@ class XhtmlCleaner(XmlParser):
|
|||
res += ' %s="%s"' % (name, value)
|
||||
# Include additional attributes if required.
|
||||
if elem in self.attrsToAdd:
|
||||
for name, value in self.attrsToAdd[elem].iteritems():
|
||||
for name, value in self.attrsToAdd[elem].items():
|
||||
res += ' %s="%s"' % (name, value)
|
||||
# Close the tag if it is a no-end tag
|
||||
if elem in self.noEndTags:
|
||||
|
@ -1163,7 +1163,7 @@ class XhtmlCleaner(XmlParser):
|
|||
# I give here to lstrip an explicit list of what is to be considered
|
||||
# as blank chars, because I do not want unicode NBSP chars to be in
|
||||
# this list.
|
||||
toAdd = content.lstrip(u' \n\r\t')
|
||||
toAdd = content.lstrip(' \n\r\t')
|
||||
else:
|
||||
toAdd = content
|
||||
# Re-transform XML special chars to entities.
|
||||
|
|
Loading…
Reference in a new issue