tools v2.1
combined kindle/mobi plugin
This commit is contained in:
@@ -0,0 +1,21 @@
|
||||
eReader PDB2PML - eReaderPDB2PML_vXX_plugin.zip
|
||||
|
||||
All credit given to The Dark Reverser for the original standalone script. I had the much easier job of converting it to a Calibre plugin.
|
||||
|
||||
This plugin is meant to convert secure Ereader files (PDB) to unsecured PMLZ files. Calibre can then convert it to whatever format you desire. It is meant to function without having to install any dependencies... other than having Calibre installed, of course. I've included the psyco libraries (compiled for each platform) for speed. If your system can use them, great! Otherwise, they won't be used and things will just work slower.
|
||||
|
||||
Installation:
|
||||
Go to Calibre's Preferences page... click on the Plugins button. Use the file dialog button to select the plugin's zip file (eReaderPDB2PML_vXX_plugin.zip) and click the 'Add' button. You're done.
|
||||
|
||||
Configuration:
|
||||
Highlight the plugin (eReader PDB 2 PML under the "File type plugins" category) and click the "Customize Plugin" button on Calibre's Preferences->Plugins page. Enter your name and last 8 digits of the credit card number separated by a comma: Your Name,12341234
|
||||
|
||||
If you've purchased books with more than one credit card, separate the info with a colon: Your Name,12341234:Other Name,23452345 (NOTE: Do NOT put quotes around your name like you do with the original script!!)
|
||||
|
||||
Troubleshooting:
|
||||
If you find that it's not working for you (imported pdb's are not converted to pmlz format), you can save a lot of time and trouble by trying to add the pdb to Calibre with the command line tools. This will print out a lot of helpful debugging info that can be copied into any online help requests. I'm going to ask you to do it first, anyway, so you might
|
||||
as well get used to it. ;)
|
||||
|
||||
Open a command prompt (terminal) and change to the directory where the ebook you're trying to import resides. Then type the command "calibredb add your_ebook.pdb". Don't type the quotes and obviously change the 'your_ebook.pdb' to whatever the filename of your book is. Copy the resulting output and paste it into any online help request you make.
|
||||
|
||||
** Note: the Mac version of Calibre doesn't install the command line tools by default. If you go to the 'Preferences' page and click on the miscellaneous button, you'll see the option to install the command line tools.
|
||||
148
Calibre_Plugins/eReaderPDB2PML_plugin/eReaderPDB2PML_plugin.py
Normal file
148
Calibre_Plugins/eReaderPDB2PML_plugin/eReaderPDB2PML_plugin.py
Normal file
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# eReaderPDB2PML_v01_plugin.py
|
||||
# Released under the terms of the GNU General Public Licence, version 3 or
|
||||
# later. <http://www.gnu.org/licenses/>
|
||||
#
|
||||
# All credit given to The Dark Reverser for the original standalone script.
|
||||
# I had the much easier job of converting it to Calibre a plugin.
|
||||
#
|
||||
# This plugin is meant to convert secure Ereader files (PDB) to unsecured PMLZ files.
|
||||
# Calibre can then convert it to whatever format you desire.
|
||||
# It is meant to function without having to install any dependencies...
|
||||
# other than having Calibre installed, of course. I've included the psyco libraries
|
||||
# (compiled for each platform) for speed. If your system can use them, great!
|
||||
# Otherwise, they won't be used and things will just work slower.
|
||||
#
|
||||
# Installation:
|
||||
# Go to Calibre's Preferences page... click on the Plugins button. Use the file
|
||||
# dialog button to select the plugin's zip file (eReaderPDB2PML_vXX_plugin.zip) and
|
||||
# click the 'Add' button. You're done.
|
||||
#
|
||||
# Configuration:
|
||||
# Highlight the plugin (eReader PDB 2 PML) and click the
|
||||
# "Customize Plugin" button on Calibre's Preferences->Plugins page.
|
||||
# Enter your name and the last 8 digits of the credit card number separated by
|
||||
# a comma: Your Name,12341234
|
||||
#
|
||||
# If you've purchased books with more than one credit card, separate the info with
|
||||
# a colon: Your Name,12341234:Other Name,23452345
|
||||
# NOTE: Do NOT put quotes around your name like you do with the original script!!
|
||||
#
|
||||
# Revision history:
|
||||
# 0.1 - Initial release
|
||||
|
||||
import sys, os
|
||||
|
||||
from calibre.customize import FileTypePlugin
|
||||
|
||||
class eRdrDeDRM(FileTypePlugin):
|
||||
name = 'eReader PDB 2 PML' # Name of the plugin
|
||||
description = 'Removes DRM from secure pdb files. \
|
||||
Credit given to The Dark Reverser for the original standalone script.'
|
||||
supported_platforms = ['linux', 'osx', 'windows'] # Platforms this plugin will run on
|
||||
author = 'DiapDealer' # The author of this plugin
|
||||
version = (0, 0, 1) # The version number of this plugin
|
||||
file_types = set(['pdb']) # The file types that this plugin will be applied to
|
||||
on_import = True # Run this plugin during the import
|
||||
|
||||
def run(self, path_to_ebook):
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
from calibre.constants import iswindows, isosx
|
||||
pdir = 'windows' if iswindows else 'osx' if isosx else 'linux'
|
||||
ppath = os.path.join(self.sys_insertion_path, pdir)
|
||||
sys.path.insert(0, ppath)
|
||||
#sys.path.append(ppath)
|
||||
|
||||
global bookname, erdr2pml
|
||||
import erdr2pml
|
||||
|
||||
if 'psyco' in sys.modules:
|
||||
print 'Using psyco acceleration for %s.' % pdir
|
||||
else:
|
||||
print 'NOT using psyco acceleration for %s. Conversion may be slow.' % pdir
|
||||
|
||||
infile = path_to_ebook
|
||||
bookname = os.path.splitext(os.path.basename(infile))[0]
|
||||
outdir = PersistentTemporaryDirectory()
|
||||
pmlzfile = self.temporary_file(bookname + '.pmlz')
|
||||
|
||||
if self.site_customization:
|
||||
keydata = self.site_customization
|
||||
ar = keydata.split(':')
|
||||
for i in ar:
|
||||
try:
|
||||
name, cc = i.split(',')
|
||||
except ValueError:
|
||||
sys.path.remove(ppath)
|
||||
print ' Error parsing user supplied data.'
|
||||
return path_to_ebook
|
||||
|
||||
try:
|
||||
print "Processing..."
|
||||
import time
|
||||
start_time = time.time()
|
||||
pmlfilepath = self.convertEreaderToPml(infile, name, cc, outdir)
|
||||
|
||||
if pmlfilepath and pmlfilepath != 1:
|
||||
import zipfile
|
||||
import shutil
|
||||
print " Creating PMLZ file"
|
||||
myZipFile = zipfile.ZipFile(pmlzfile.name,'w',zipfile.ZIP_STORED, False)
|
||||
list = os.listdir(outdir)
|
||||
for file in list:
|
||||
localname = file
|
||||
filePath = os.path.join(outdir,file)
|
||||
if os.path.isfile(filePath):
|
||||
myZipFile.write(filePath, localname)
|
||||
elif os.path.isdir(filePath):
|
||||
imageList = os.listdir(filePath)
|
||||
localimgdir = os.path.basename(filePath)
|
||||
for image in imageList:
|
||||
localname = os.path.join(localimgdir,image)
|
||||
imagePath = os.path.join(filePath,image)
|
||||
if os.path.isfile(imagePath):
|
||||
myZipFile.write(imagePath, localname)
|
||||
myZipFile.close()
|
||||
end_time = time.time()
|
||||
search_time = end_time - start_time
|
||||
print 'elapsed time: %.2f seconds' % (search_time, )
|
||||
print "done"
|
||||
return pmlzfile.name
|
||||
else:
|
||||
raise ValueError('Error Creating PML file.')
|
||||
except ValueError, e:
|
||||
print "Error: %s" % e
|
||||
pass
|
||||
raise Exception('Couldn\'t decrypt pdb file.')
|
||||
else:
|
||||
raise Exception('No name and CC# provided.')
|
||||
|
||||
def convertEreaderToPml(self, infile, name, cc, outdir):
|
||||
|
||||
print " Decoding File"
|
||||
sect = erdr2pml.Sectionizer(infile, 'PNRdPPrs')
|
||||
er = erdr2pml.EreaderProcessor(sect.loadSection, name, cc)
|
||||
|
||||
if er.getNumImages() > 0:
|
||||
print " Extracting images"
|
||||
#imagedir = bookname + '_img/'
|
||||
imagedir = 'images/'
|
||||
imagedirpath = os.path.join(outdir,imagedir)
|
||||
if not os.path.exists(imagedirpath):
|
||||
os.makedirs(imagedirpath)
|
||||
for i in xrange(er.getNumImages()):
|
||||
name, contents = er.getImage(i)
|
||||
file(os.path.join(imagedirpath, name), 'wb').write(contents)
|
||||
|
||||
print " Extracting pml"
|
||||
pml_string = er.getText()
|
||||
pmlfilename = bookname + ".pml"
|
||||
try:
|
||||
file(os.path.join(outdir, pmlfilename),'wb').write(erdr2pml.cleanPML(pml_string))
|
||||
return os.path.join(outdir, pmlfilename)
|
||||
except:
|
||||
return 1
|
||||
|
||||
def customization_help(self, gui=False):
|
||||
return 'Enter Account Name & Last 8 digits of Credit Card number (separate with a comma)'
|
||||
692
Calibre_Plugins/eReaderPDB2PML_plugin/erdr2pml.py
Normal file
692
Calibre_Plugins/eReaderPDB2PML_plugin/erdr2pml.py
Normal file
@@ -0,0 +1,692 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
|
||||
#
|
||||
# erdr2pml.py
|
||||
#
|
||||
# This is a python script. You need a Python interpreter to run it.
|
||||
# For example, ActiveState Python, which exists for windows.
|
||||
# Changelog
|
||||
#
|
||||
# Based on ereader2html version 0.08 plus some later small fixes
|
||||
#
|
||||
# 0.01 - Initial version
|
||||
# 0.02 - Support more eReader files. Support bold text and links. Fix PML decoder parsing bug.
|
||||
# 0.03 - Fix incorrect variable usage at one place.
|
||||
# 0.03b - enhancement by DeBockle (version 259 support)
|
||||
# Custom version 0.03 - no change to eReader support, only usability changes
|
||||
# - start of pep-8 indentation (spaces not tab), fix trailing blanks
|
||||
# - version variable, only one place to change
|
||||
# - added main routine, now callable as a library/module,
|
||||
# means tools can add optional support for ereader2html
|
||||
# - outdir is no longer a mandatory parameter (defaults based on input name if missing)
|
||||
# - time taken output to stdout
|
||||
# - Psyco support - reduces runtime by a factor of (over) 3!
|
||||
# E.g. (~600Kb file) 90 secs down to 24 secs
|
||||
# - newstyle classes
|
||||
# - changed map call to list comprehension
|
||||
# may not work with python 2.3
|
||||
# without Psyco this reduces runtime to 90%
|
||||
# E.g. 90 secs down to 77 secs
|
||||
# Psyco with map calls takes longer, do not run with map in Psyco JIT!
|
||||
# - izip calls used instead of zip (if available), further reduction
|
||||
# in run time (factor of 4.5).
|
||||
# E.g. (~600Kb file) 90 secs down to 20 secs
|
||||
# - Python 2.6+ support, avoid DeprecationWarning with sha/sha1
|
||||
# 0.04 - Footnote support, PML output, correct charset in html, support more PML tags
|
||||
# - Feature change, dump out PML file
|
||||
# - Added supprt for footnote tags. NOTE footnote ids appear to be bad (not usable)
|
||||
# in some pdb files :-( due to the same id being used multiple times
|
||||
# - Added correct charset encoding (pml is based on cp1252)
|
||||
# - Added logging support.
|
||||
# 0.05 - Improved type 272 support for sidebars, links, chapters, metainfo, etc
|
||||
# 0.06 - Merge of 0.04 and 0.05. Improved HTML output
|
||||
# Placed images in subfolder, so that it's possible to just
|
||||
# drop the book.pml file onto DropBook to make an unencrypted
|
||||
# copy of the eReader file.
|
||||
# Using that with Calibre works a lot better than the HTML
|
||||
# conversion in this code.
|
||||
# 0.07 - Further Improved type 272 support for sidebars with all earlier fixes
|
||||
# 0.08 - fixed typos, removed extraneous things
|
||||
# 0.09 - fixed typos in first_pages to first_page to again support older formats
|
||||
# 0.10 - minor cleanups
|
||||
# 0.11 - fixups for using correct xml for footnotes and sidebars for use with Dropbook
|
||||
# 0.12 - Fix added to prevent lowercasing of image names when the pml code itself uses a different case in the link name.
|
||||
# 0.13 - change to unbuffered stdout for use with gui front ends
|
||||
# 0.14 - contributed enhancement to support --make-pmlz switch
|
||||
# 0.15 - enabled high-ascii to pml character encoding. DropBook now works on Mac.
|
||||
|
||||
__version__='0.15'
|
||||
|
||||
# Import Psyco if available
|
||||
try:
|
||||
# Dumb speed hack 1
|
||||
# http://psyco.sourceforge.net
|
||||
import psyco
|
||||
psyco.full()
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
# Dumb speed hack 2
|
||||
# All map() calls converted to list comprehension (some use zip)
|
||||
# override zip with izip - saves memory and in rough testing
|
||||
# appears to be faster zip() is only used in the converted map() calls
|
||||
from itertools import izip as zip
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
class Unbuffered:
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
def write(self, data):
|
||||
self.stream.write(data)
|
||||
self.stream.flush()
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.stream, attr)
|
||||
|
||||
import sys
|
||||
sys.stdout=Unbuffered(sys.stdout)
|
||||
|
||||
import struct, binascii, getopt, zlib, os, os.path, urllib, tempfile
|
||||
|
||||
try:
|
||||
from hashlib import sha1
|
||||
except ImportError:
|
||||
# older Python release
|
||||
import sha
|
||||
sha1 = lambda s: sha.new(s)
|
||||
import cgi
|
||||
import logging
|
||||
|
||||
logging.basicConfig()
|
||||
#logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
ECB = 0
|
||||
CBC = 1
|
||||
class Des(object):
|
||||
__pc1 = [56, 48, 40, 32, 24, 16, 8, 0, 57, 49, 41, 33, 25, 17,
|
||||
9, 1, 58, 50, 42, 34, 26, 18, 10, 2, 59, 51, 43, 35,
|
||||
62, 54, 46, 38, 30, 22, 14, 6, 61, 53, 45, 37, 29, 21,
|
||||
13, 5, 60, 52, 44, 36, 28, 20, 12, 4, 27, 19, 11, 3]
|
||||
__left_rotations = [1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1]
|
||||
__pc2 = [13, 16, 10, 23, 0, 4,2, 27, 14, 5, 20, 9,
|
||||
22, 18, 11, 3, 25, 7, 15, 6, 26, 19, 12, 1,
|
||||
40, 51, 30, 36, 46, 54, 29, 39, 50, 44, 32, 47,
|
||||
43, 48, 38, 55, 33, 52, 45, 41, 49, 35, 28, 31]
|
||||
__ip = [57, 49, 41, 33, 25, 17, 9, 1, 59, 51, 43, 35, 27, 19, 11, 3,
|
||||
61, 53, 45, 37, 29, 21, 13, 5, 63, 55, 47, 39, 31, 23, 15, 7,
|
||||
56, 48, 40, 32, 24, 16, 8, 0, 58, 50, 42, 34, 26, 18, 10, 2,
|
||||
60, 52, 44, 36, 28, 20, 12, 4, 62, 54, 46, 38, 30, 22, 14, 6]
|
||||
__expansion_table = [31, 0, 1, 2, 3, 4, 3, 4, 5, 6, 7, 8,
|
||||
7, 8, 9, 10, 11, 12,11, 12, 13, 14, 15, 16,
|
||||
15, 16, 17, 18, 19, 20,19, 20, 21, 22, 23, 24,
|
||||
23, 24, 25, 26, 27, 28,27, 28, 29, 30, 31, 0]
|
||||
__sbox = [[14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7,
|
||||
0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8,
|
||||
4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0,
|
||||
15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13],
|
||||
[15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10,
|
||||
3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5,
|
||||
0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15,
|
||||
13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9],
|
||||
[10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8,
|
||||
13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1,
|
||||
13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7,
|
||||
1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12],
|
||||
[7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15,
|
||||
13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9,
|
||||
10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4,
|
||||
3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14],
|
||||
[2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9,
|
||||
14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6,
|
||||
4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14,
|
||||
11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3],
|
||||
[12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11,
|
||||
10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8,
|
||||
9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6,
|
||||
4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13],
|
||||
[4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1,
|
||||
13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6,
|
||||
1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2,
|
||||
6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12],
|
||||
[13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7,
|
||||
1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2,
|
||||
7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8,
|
||||
2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11],]
|
||||
__p = [15, 6, 19, 20, 28, 11,27, 16, 0, 14, 22, 25,
|
||||
4, 17, 30, 9, 1, 7,23,13, 31, 26, 2, 8,18, 12, 29, 5, 21, 10,3, 24]
|
||||
__fp = [39, 7, 47, 15, 55, 23, 63, 31,38, 6, 46, 14, 54, 22, 62, 30,
|
||||
37, 5, 45, 13, 53, 21, 61, 29,36, 4, 44, 12, 52, 20, 60, 28,
|
||||
35, 3, 43, 11, 51, 19, 59, 27,34, 2, 42, 10, 50, 18, 58, 26,
|
||||
33, 1, 41, 9, 49, 17, 57, 25,32, 0, 40, 8, 48, 16, 56, 24]
|
||||
# Type of crypting being done
|
||||
ENCRYPT = 0x00
|
||||
DECRYPT = 0x01
|
||||
def __init__(self, key, mode=ECB, IV=None):
|
||||
if len(key) != 8:
|
||||
raise ValueError("Invalid DES key size. Key must be exactly 8 bytes long.")
|
||||
self.block_size = 8
|
||||
self.key_size = 8
|
||||
self.__padding = ''
|
||||
self.setMode(mode)
|
||||
if IV:
|
||||
self.setIV(IV)
|
||||
self.L = []
|
||||
self.R = []
|
||||
self.Kn = [ [0] * 48 ] * 16 # 16 48-bit keys (K1 - K16)
|
||||
self.final = []
|
||||
self.setKey(key)
|
||||
def getKey(self):
|
||||
return self.__key
|
||||
def setKey(self, key):
|
||||
self.__key = key
|
||||
self.__create_sub_keys()
|
||||
def getMode(self):
|
||||
return self.__mode
|
||||
def setMode(self, mode):
|
||||
self.__mode = mode
|
||||
def getIV(self):
|
||||
return self.__iv
|
||||
def setIV(self, IV):
|
||||
if not IV or len(IV) != self.block_size:
|
||||
raise ValueError("Invalid Initial Value (IV), must be a multiple of " + str(self.block_size) + " bytes")
|
||||
self.__iv = IV
|
||||
def getPadding(self):
|
||||
return self.__padding
|
||||
def __String_to_BitList(self, data):
|
||||
l = len(data) * 8
|
||||
result = [0] * l
|
||||
pos = 0
|
||||
for c in data:
|
||||
i = 7
|
||||
ch = ord(c)
|
||||
while i >= 0:
|
||||
if ch & (1 << i) != 0:
|
||||
result[pos] = 1
|
||||
else:
|
||||
result[pos] = 0
|
||||
pos += 1
|
||||
i -= 1
|
||||
return result
|
||||
def __BitList_to_String(self, data):
|
||||
result = ''
|
||||
pos = 0
|
||||
c = 0
|
||||
while pos < len(data):
|
||||
c += data[pos] << (7 - (pos % 8))
|
||||
if (pos % 8) == 7:
|
||||
result += chr(c)
|
||||
c = 0
|
||||
pos += 1
|
||||
return result
|
||||
def __permutate(self, table, block):
|
||||
return [block[x] for x in table]
|
||||
def __create_sub_keys(self):
|
||||
key = self.__permutate(Des.__pc1, self.__String_to_BitList(self.getKey()))
|
||||
i = 0
|
||||
self.L = key[:28]
|
||||
self.R = key[28:]
|
||||
while i < 16:
|
||||
j = 0
|
||||
while j < Des.__left_rotations[i]:
|
||||
self.L.append(self.L[0])
|
||||
del self.L[0]
|
||||
self.R.append(self.R[0])
|
||||
del self.R[0]
|
||||
j += 1
|
||||
self.Kn[i] = self.__permutate(Des.__pc2, self.L + self.R)
|
||||
i += 1
|
||||
def __des_crypt(self, block, crypt_type):
|
||||
block = self.__permutate(Des.__ip, block)
|
||||
self.L = block[:32]
|
||||
self.R = block[32:]
|
||||
if crypt_type == Des.ENCRYPT:
|
||||
iteration = 0
|
||||
iteration_adjustment = 1
|
||||
else:
|
||||
iteration = 15
|
||||
iteration_adjustment = -1
|
||||
i = 0
|
||||
while i < 16:
|
||||
tempR = self.R[:]
|
||||
self.R = self.__permutate(Des.__expansion_table, self.R)
|
||||
self.R = [x ^ y for x,y in zip(self.R, self.Kn[iteration])]
|
||||
B = [self.R[:6], self.R[6:12], self.R[12:18], self.R[18:24], self.R[24:30], self.R[30:36], self.R[36:42], self.R[42:]]
|
||||
j = 0
|
||||
Bn = [0] * 32
|
||||
pos = 0
|
||||
while j < 8:
|
||||
m = (B[j][0] << 1) + B[j][5]
|
||||
n = (B[j][1] << 3) + (B[j][2] << 2) + (B[j][3] << 1) + B[j][4]
|
||||
v = Des.__sbox[j][(m << 4) + n]
|
||||
Bn[pos] = (v & 8) >> 3
|
||||
Bn[pos + 1] = (v & 4) >> 2
|
||||
Bn[pos + 2] = (v & 2) >> 1
|
||||
Bn[pos + 3] = v & 1
|
||||
pos += 4
|
||||
j += 1
|
||||
self.R = self.__permutate(Des.__p, Bn)
|
||||
self.R = [x ^ y for x, y in zip(self.R, self.L)]
|
||||
self.L = tempR
|
||||
i += 1
|
||||
iteration += iteration_adjustment
|
||||
self.final = self.__permutate(Des.__fp, self.R + self.L)
|
||||
return self.final
|
||||
def crypt(self, data, crypt_type):
|
||||
if not data:
|
||||
return ''
|
||||
if len(data) % self.block_size != 0:
|
||||
if crypt_type == Des.DECRYPT: # Decryption must work on 8 byte blocks
|
||||
raise ValueError("Invalid data length, data must be a multiple of " + str(self.block_size) + " bytes\n.")
|
||||
if not self.getPadding():
|
||||
raise ValueError("Invalid data length, data must be a multiple of " + str(self.block_size) + " bytes\n. Try setting the optional padding character")
|
||||
else:
|
||||
data += (self.block_size - (len(data) % self.block_size)) * self.getPadding()
|
||||
if self.getMode() == CBC:
|
||||
if self.getIV():
|
||||
iv = self.__String_to_BitList(self.getIV())
|
||||
else:
|
||||
raise ValueError("For CBC mode, you must supply the Initial Value (IV) for ciphering")
|
||||
i = 0
|
||||
dict = {}
|
||||
result = []
|
||||
while i < len(data):
|
||||
block = self.__String_to_BitList(data[i:i+8])
|
||||
if self.getMode() == CBC:
|
||||
if crypt_type == Des.ENCRYPT:
|
||||
block = [x ^ y for x, y in zip(block, iv)]
|
||||
processed_block = self.__des_crypt(block, crypt_type)
|
||||
if crypt_type == Des.DECRYPT:
|
||||
processed_block = [x ^ y for x, y in zip(processed_block, iv)]
|
||||
iv = block
|
||||
else:
|
||||
iv = processed_block
|
||||
else:
|
||||
processed_block = self.__des_crypt(block, crypt_type)
|
||||
result.append(self.__BitList_to_String(processed_block))
|
||||
i += 8
|
||||
if crypt_type == Des.DECRYPT and self.getPadding():
|
||||
s = result[-1]
|
||||
while s[-1] == self.getPadding():
|
||||
s = s[:-1]
|
||||
result[-1] = s
|
||||
return ''.join(result)
|
||||
def encrypt(self, data, pad=''):
|
||||
self.__padding = pad
|
||||
return self.crypt(data, Des.ENCRYPT)
|
||||
def decrypt(self, data, pad=''):
|
||||
self.__padding = pad
|
||||
return self.crypt(data, Des.DECRYPT)
|
||||
|
||||
class Sectionizer(object):
|
||||
def __init__(self, filename, ident):
|
||||
self.contents = file(filename, 'rb').read()
|
||||
self.header = self.contents[0:72]
|
||||
self.num_sections, = struct.unpack('>H', self.contents[76:78])
|
||||
if self.header[0x3C:0x3C+8] != ident:
|
||||
raise ValueError('Invalid file format')
|
||||
self.sections = []
|
||||
for i in xrange(self.num_sections):
|
||||
offset, a1,a2,a3,a4 = struct.unpack('>LBBBB', self.contents[78+i*8:78+i*8+8])
|
||||
flags, val = a1, a2<<16|a3<<8|a4
|
||||
self.sections.append( (offset, flags, val) )
|
||||
def loadSection(self, section):
|
||||
if section + 1 == self.num_sections:
|
||||
end_off = len(self.contents)
|
||||
else:
|
||||
end_off = self.sections[section + 1][0]
|
||||
off = self.sections[section][0]
|
||||
return self.contents[off:end_off]
|
||||
|
||||
def sanitizeFileName(s):
|
||||
r = ''
|
||||
for c in s:
|
||||
if c in "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_.-":
|
||||
r += c
|
||||
return r
|
||||
|
||||
def fixKey(key):
|
||||
def fixByte(b):
|
||||
return b ^ ((b ^ (b<<1) ^ (b<<2) ^ (b<<3) ^ (b<<4) ^ (b<<5) ^ (b<<6) ^ (b<<7) ^ 0x80) & 0x80)
|
||||
return "".join([chr(fixByte(ord(a))) for a in key])
|
||||
|
||||
def deXOR(text, sp, table):
|
||||
r=''
|
||||
j = sp
|
||||
for i in xrange(len(text)):
|
||||
r += chr(ord(table[j]) ^ ord(text[i]))
|
||||
j = j + 1
|
||||
if j == len(table):
|
||||
j = 0
|
||||
return r
|
||||
|
||||
class EreaderProcessor(object):
|
||||
def __init__(self, section_reader, username, creditcard):
|
||||
self.section_reader = section_reader
|
||||
data = section_reader(0)
|
||||
version, = struct.unpack('>H', data[0:2])
|
||||
self.version = version
|
||||
logging.info('eReader file format version %s', version)
|
||||
if version != 272 and version != 260 and version != 259:
|
||||
raise ValueError('incorrect eReader version %d (error 1)' % version)
|
||||
data = section_reader(1)
|
||||
self.data = data
|
||||
des = Des(fixKey(data[0:8]))
|
||||
cookie_shuf, cookie_size = struct.unpack('>LL', des.decrypt(data[-8:]))
|
||||
if cookie_shuf < 3 or cookie_shuf > 0x14 or cookie_size < 0xf0 or cookie_size > 0x200:
|
||||
raise ValueError('incorrect eReader version (error 2)')
|
||||
input = des.decrypt(data[-cookie_size:])
|
||||
def unshuff(data, shuf):
|
||||
r = [''] * len(data)
|
||||
j = 0
|
||||
for i in xrange(len(data)):
|
||||
j = (j + shuf) % len(data)
|
||||
r[j] = data[i]
|
||||
assert len("".join(r)) == len(data)
|
||||
return "".join(r)
|
||||
r = unshuff(input[0:-8], cookie_shuf)
|
||||
|
||||
def fixUsername(s):
|
||||
r = ''
|
||||
for c in s.lower():
|
||||
if (c >= 'a' and c <= 'z' or c >= '0' and c <= '9'):
|
||||
r += c
|
||||
return r
|
||||
|
||||
user_key = struct.pack('>LL', binascii.crc32(fixUsername(username)) & 0xffffffff, binascii.crc32(creditcard[-8:])& 0xffffffff)
|
||||
drm_sub_version = struct.unpack('>H', r[0:2])[0]
|
||||
self.num_text_pages = struct.unpack('>H', r[2:4])[0] - 1
|
||||
self.num_image_pages = struct.unpack('>H', r[26:26+2])[0]
|
||||
self.first_image_page = struct.unpack('>H', r[24:24+2])[0]
|
||||
if self.version == 272:
|
||||
self.num_footnote_pages = struct.unpack('>H', r[46:46+2])[0]
|
||||
self.first_footnote_page = struct.unpack('>H', r[44:44+2])[0]
|
||||
self.num_sidebar_pages = struct.unpack('>H', r[38:38+2])[0]
|
||||
self.first_sidebar_page = struct.unpack('>H', r[36:36+2])[0]
|
||||
# self.num_bookinfo_pages = struct.unpack('>H', r[34:34+2])[0]
|
||||
# self.first_bookinfo_page = struct.unpack('>H', r[32:32+2])[0]
|
||||
# self.num_chapter_pages = struct.unpack('>H', r[22:22+2])[0]
|
||||
# self.first_chapter_page = struct.unpack('>H', r[20:20+2])[0]
|
||||
# self.num_link_pages = struct.unpack('>H', r[30:30+2])[0]
|
||||
# self.first_link_page = struct.unpack('>H', r[28:28+2])[0]
|
||||
# self.num_xtextsize_pages = struct.unpack('>H', r[54:54+2])[0]
|
||||
# self.first_xtextsize_page = struct.unpack('>H', r[52:52+2])[0]
|
||||
|
||||
# **before** data record 1 was decrypted and unshuffled, it contained data
|
||||
# to create an XOR table and which is used to fix footnote record 0, link records, chapter records, etc
|
||||
self.xortable_offset = struct.unpack('>H', r[40:40+2])[0]
|
||||
self.xortable_size = struct.unpack('>H', r[42:42+2])[0]
|
||||
self.xortable = self.data[self.xortable_offset:self.xortable_offset + self.xortable_size]
|
||||
else:
|
||||
self.num_footnote_pages = 0
|
||||
self.num_sidebar_pages = 0
|
||||
self.first_footnote_page = -1
|
||||
self.first_sidebar_page = -1
|
||||
# self.num_bookinfo_pages = 0
|
||||
# self.num_chapter_pages = 0
|
||||
# self.num_link_pages = 0
|
||||
# self.num_xtextsize_pages = 0
|
||||
# self.first_bookinfo_page = -1
|
||||
# self.first_chapter_page = -1
|
||||
# self.first_link_page = -1
|
||||
# self.first_xtextsize_page = -1
|
||||
|
||||
logging.debug('self.num_text_pages %d', self.num_text_pages)
|
||||
logging.debug('self.num_footnote_pages %d, self.first_footnote_page %d', self.num_footnote_pages , self.first_footnote_page)
|
||||
logging.debug('self.num_sidebar_pages %d, self.first_sidebar_page %d', self.num_sidebar_pages , self.first_sidebar_page)
|
||||
self.flags = struct.unpack('>L', r[4:8])[0]
|
||||
reqd_flags = (1<<9) | (1<<7) | (1<<10)
|
||||
if (self.flags & reqd_flags) != reqd_flags:
|
||||
print "Flags: 0x%X" % self.flags
|
||||
raise ValueError('incompatible eReader file')
|
||||
des = Des(fixKey(user_key))
|
||||
if version == 259:
|
||||
if drm_sub_version != 7:
|
||||
raise ValueError('incorrect eReader version %d (error 3)' % drm_sub_version)
|
||||
encrypted_key_sha = r[44:44+20]
|
||||
encrypted_key = r[64:64+8]
|
||||
elif version == 260:
|
||||
if drm_sub_version != 13:
|
||||
raise ValueError('incorrect eReader version %d (error 3)' % drm_sub_version)
|
||||
encrypted_key = r[44:44+8]
|
||||
encrypted_key_sha = r[52:52+20]
|
||||
elif version == 272:
|
||||
encrypted_key = r[172:172+8]
|
||||
encrypted_key_sha = r[56:56+20]
|
||||
self.content_key = des.decrypt(encrypted_key)
|
||||
if sha1(self.content_key).digest() != encrypted_key_sha:
|
||||
raise ValueError('Incorrect Name and/or Credit Card')
|
||||
|
||||
def getNumImages(self):
|
||||
return self.num_image_pages
|
||||
|
||||
def getImage(self, i):
|
||||
sect = self.section_reader(self.first_image_page + i)
|
||||
name = sect[4:4+32].strip('\0')
|
||||
data = sect[62:]
|
||||
return sanitizeFileName(name), data
|
||||
|
||||
|
||||
# def getChapterNamePMLOffsetData(self):
|
||||
# cv = ''
|
||||
# if self.num_chapter_pages > 0:
|
||||
# for i in xrange(self.num_chapter_pages):
|
||||
# chaps = self.section_reader(self.first_chapter_page + i)
|
||||
# j = i % self.xortable_size
|
||||
# offname = deXOR(chaps, j, self.xortable)
|
||||
# offset = struct.unpack('>L', offname[0:4])[0]
|
||||
# name = offname[4:].strip('\0')
|
||||
# cv += '%d|%s\n' % (offset, name)
|
||||
# return cv
|
||||
|
||||
# def getLinkNamePMLOffsetData(self):
|
||||
# lv = ''
|
||||
# if self.num_link_pages > 0:
|
||||
# for i in xrange(self.num_link_pages):
|
||||
# links = self.section_reader(self.first_link_page + i)
|
||||
# j = i % self.xortable_size
|
||||
# offname = deXOR(links, j, self.xortable)
|
||||
# offset = struct.unpack('>L', offname[0:4])[0]
|
||||
# name = offname[4:].strip('\0')
|
||||
# lv += '%d|%s\n' % (offset, name)
|
||||
# return lv
|
||||
|
||||
# def getExpandedTextSizesData(self):
|
||||
# ts = ''
|
||||
# if self.num_xtextsize_pages > 0:
|
||||
# tsize = deXOR(self.section_reader(self.first_xtextsize_page), 0, self.xortable)
|
||||
# for i in xrange(self.num_text_pages):
|
||||
# xsize = struct.unpack('>H', tsize[0:2])[0]
|
||||
# ts += "%d\n" % xsize
|
||||
# tsize = tsize[2:]
|
||||
# return ts
|
||||
|
||||
# def getBookInfo(self):
|
||||
# bkinfo = ''
|
||||
# if self.num_bookinfo_pages > 0:
|
||||
# info = self.section_reader(self.first_bookinfo_page)
|
||||
# bkinfo = deXOR(info, 0, self.xortable)
|
||||
# bkinfo = bkinfo.replace('\0','|')
|
||||
# bkinfo += '\n'
|
||||
# return bkinfo
|
||||
|
||||
def getText(self):
|
||||
des = Des(fixKey(self.content_key))
|
||||
r = ''
|
||||
for i in xrange(self.num_text_pages):
|
||||
logging.debug('get page %d', i)
|
||||
r += zlib.decompress(des.decrypt(self.section_reader(1 + i)))
|
||||
|
||||
# now handle footnotes pages
|
||||
if self.num_footnote_pages > 0:
|
||||
r += '\n'
|
||||
# the record 0 of the footnote section must pass through the Xor Table to make it useful
|
||||
sect = self.section_reader(self.first_footnote_page)
|
||||
fnote_ids = deXOR(sect, 0, self.xortable)
|
||||
# the remaining records of the footnote sections need to be decoded with the content_key and zlib inflated
|
||||
des = Des(fixKey(self.content_key))
|
||||
for i in xrange(1,self.num_footnote_pages):
|
||||
logging.debug('get footnotepage %d', i)
|
||||
id_len = ord(fnote_ids[2])
|
||||
id = fnote_ids[3:3+id_len]
|
||||
fmarker = '<footnote id="%s">\n' % id
|
||||
fmarker += zlib.decompress(des.decrypt(self.section_reader(self.first_footnote_page + i)))
|
||||
fmarker += '\n</footnote>\n'
|
||||
r += fmarker
|
||||
fnote_ids = fnote_ids[id_len+4:]
|
||||
|
||||
# now handle sidebar pages
|
||||
if self.num_sidebar_pages > 0:
|
||||
r += '\n'
|
||||
# the record 0 of the sidebar section must pass through the Xor Table to make it useful
|
||||
sect = self.section_reader(self.first_sidebar_page)
|
||||
sbar_ids = deXOR(sect, 0, self.xortable)
|
||||
# the remaining records of the sidebar sections need to be decoded with the content_key and zlib inflated
|
||||
des = Des(fixKey(self.content_key))
|
||||
for i in xrange(1,self.num_sidebar_pages):
|
||||
id_len = ord(sbar_ids[2])
|
||||
id = sbar_ids[3:3+id_len]
|
||||
smarker = '<sidebar id="%s">\n' % id
|
||||
smarker += zlib.decompress(des.decrypt(self.section_reader(self.first_footnote_page + i)))
|
||||
smarker += '\n</sidebar>\n'
|
||||
r += smarker
|
||||
sbar_ids = sbar_ids[id_len+4:]
|
||||
|
||||
return r
|
||||
|
||||
def cleanPML(pml):
|
||||
# Convert special characters to proper PML code. High ASCII start at (\x80, \a128) and go up to (\xff, \a255)
|
||||
pml2 = pml
|
||||
for k in xrange(128,256):
|
||||
badChar = chr(k)
|
||||
pml2 = pml2.replace(badChar, '\\a%03d' % k)
|
||||
return pml2
|
||||
|
||||
def convertEreaderToPml(infile, name, cc, outdir):
|
||||
if not os.path.exists(outdir):
|
||||
os.makedirs(outdir)
|
||||
|
||||
print " Decoding File"
|
||||
sect = Sectionizer(infile, 'PNRdPPrs')
|
||||
er = EreaderProcessor(sect.loadSection, name, cc)
|
||||
|
||||
if er.getNumImages() > 0:
|
||||
print " Extracting images"
|
||||
imagedir = bookname + '_img/'
|
||||
imagedirpath = os.path.join(outdir,imagedir)
|
||||
if not os.path.exists(imagedirpath):
|
||||
os.makedirs(imagedirpath)
|
||||
for i in xrange(er.getNumImages()):
|
||||
name, contents = er.getImage(i)
|
||||
file(os.path.join(imagedirpath, name), 'wb').write(contents)
|
||||
|
||||
print " Extracting pml"
|
||||
pml_string = er.getText()
|
||||
pmlfilename = bookname + ".pml"
|
||||
file(os.path.join(outdir, pmlfilename),'wb').write(cleanPML(pml_string))
|
||||
|
||||
# bkinfo = er.getBookInfo()
|
||||
# if bkinfo != '':
|
||||
# print " Extracting book meta information"
|
||||
# file(os.path.join(outdir, 'bookinfo.txt'),'wb').write(bkinfo)
|
||||
|
||||
|
||||
def usage():
|
||||
print "Converts DRMed eReader books to PML Source"
|
||||
print "Usage:"
|
||||
print " erdr2pml [options] infile.pdb [outdir] \"your name\" credit_card_number "
|
||||
print " "
|
||||
print "Options: "
|
||||
print " -h prints this message"
|
||||
print " --make-pmlz create PMLZ instead of using output directory"
|
||||
print " "
|
||||
print "Note:"
|
||||
print " if ommitted, outdir defaults based on 'infile.pdb'"
|
||||
print " It's enough to enter the last 8 digits of the credit card number"
|
||||
return
|
||||
|
||||
def main(argv=None):
|
||||
global bookname
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "h", ["make-pmlz"])
|
||||
except getopt.GetoptError, err:
|
||||
print str(err)
|
||||
usage()
|
||||
return 1
|
||||
make_pmlz = False
|
||||
zipname = None
|
||||
for o, a in opts:
|
||||
if o == "-h":
|
||||
usage()
|
||||
return 0
|
||||
elif o == "--make-pmlz":
|
||||
make_pmlz = True
|
||||
zipname = ''
|
||||
|
||||
print "eRdr2Pml v%s. Copyright (c) 2009 The Dark Reverser" % __version__
|
||||
|
||||
if len(args)!=3 and len(args)!=4:
|
||||
usage()
|
||||
return 1
|
||||
else:
|
||||
if len(args)==3:
|
||||
infile, name, cc = args[0], args[1], args[2]
|
||||
outdir = infile[:-4] + '_Source'
|
||||
elif len(args)==4:
|
||||
infile, outdir, name, cc = args[0], args[1], args[2], args[3]
|
||||
|
||||
if make_pmlz :
|
||||
# ignore specified outdir, use tempdir instead
|
||||
outdir = tempfile.mkdtemp()
|
||||
|
||||
bookname = os.path.splitext(os.path.basename(infile))[0]
|
||||
|
||||
try:
|
||||
print "Processing..."
|
||||
import time
|
||||
start_time = time.time()
|
||||
convertEreaderToPml(infile, name, cc, outdir)
|
||||
|
||||
if make_pmlz :
|
||||
import zipfile
|
||||
import shutil
|
||||
print " Creating PMLZ file"
|
||||
zipname = infile[:-4] + '.pmlz'
|
||||
myZipFile = zipfile.ZipFile(zipname,'w',zipfile.ZIP_STORED, False)
|
||||
list = os.listdir(outdir)
|
||||
for file in list:
|
||||
localname = file
|
||||
filePath = os.path.join(outdir,file)
|
||||
if os.path.isfile(filePath):
|
||||
myZipFile.write(filePath, localname)
|
||||
elif os.path.isdir(filePath):
|
||||
imageList = os.listdir(filePath)
|
||||
localimgdir = os.path.basename(filePath)
|
||||
for image in imageList:
|
||||
localname = os.path.join(localimgdir,image)
|
||||
imagePath = os.path.join(filePath,image)
|
||||
if os.path.isfile(imagePath):
|
||||
myZipFile.write(imagePath, localname)
|
||||
myZipFile.close()
|
||||
# remove temporary directory
|
||||
shutil.rmtree(outdir)
|
||||
|
||||
end_time = time.time()
|
||||
search_time = end_time - start_time
|
||||
print 'elapsed time: %.2f seconds' % (search_time, )
|
||||
if make_pmlz :
|
||||
print 'output is %s' % zipname
|
||||
else :
|
||||
print 'output in %s' % outdir
|
||||
print "done"
|
||||
except ValueError, e:
|
||||
print "Error: %s" % e
|
||||
return 1
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import cProfile
|
||||
#command = """sys.exit(main())"""
|
||||
#cProfile.runctx( command, globals(), locals(), filename="cprofile.profile" )
|
||||
|
||||
sys.exit(main())
|
||||
@@ -0,0 +1,47 @@
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 41
|
||||
/svn/!svn/ver/70200/psyco/dist/py-support
|
||||
END
|
||||
core.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 49
|
||||
/svn/!svn/ver/70200/psyco/dist/py-support/core.py
|
||||
END
|
||||
support.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 52
|
||||
/svn/!svn/ver/49315/psyco/dist/py-support/support.py
|
||||
END
|
||||
classes.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 52
|
||||
/svn/!svn/ver/35003/psyco/dist/py-support/classes.py
|
||||
END
|
||||
__init__.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 53
|
||||
/svn/!svn/ver/35003/psyco/dist/py-support/__init__.py
|
||||
END
|
||||
logger.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 51
|
||||
/svn/!svn/ver/23284/psyco/dist/py-support/logger.py
|
||||
END
|
||||
kdictproxy.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 55
|
||||
/svn/!svn/ver/35003/psyco/dist/py-support/kdictproxy.py
|
||||
END
|
||||
profiler.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 53
|
||||
/svn/!svn/ver/70200/psyco/dist/py-support/profiler.py
|
||||
END
|
||||
@@ -0,0 +1,7 @@
|
||||
K 10
|
||||
svn:ignore
|
||||
V 14
|
||||
*~
|
||||
*.pyc
|
||||
*.pyo
|
||||
END
|
||||
266
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/.svn/entries
Normal file
266
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/.svn/entries
Normal file
@@ -0,0 +1,266 @@
|
||||
10
|
||||
|
||||
dir
|
||||
78269
|
||||
http://codespeak.net/svn/psyco/dist/py-support
|
||||
http://codespeak.net/svn
|
||||
|
||||
|
||||
|
||||
2009-12-18T16:35:35.119276Z
|
||||
70200
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
|
||||
|
||||
core.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
3b362177a839893c9e867880b3a7cef3
|
||||
2009-12-18T16:35:35.119276Z
|
||||
70200
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
8144
|
||||
|
||||
support.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
b0551e975d774f2f7f58a29ed4b6b90e
|
||||
2007-12-03T12:27:25.632574Z
|
||||
49315
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
6043
|
||||
|
||||
classes.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
5932ed955198d16ec17285dfb195d341
|
||||
2006-11-26T13:03:26.949973Z
|
||||
35003
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
1440
|
||||
|
||||
__init__.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
219582b5182dfa38a9119d059a71965f
|
||||
2006-11-26T13:03:26.949973Z
|
||||
35003
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
1895
|
||||
|
||||
logger.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
aa21f905df036af43082e1ea2a2561ee
|
||||
2006-02-13T15:02:51.744168Z
|
||||
23284
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
2678
|
||||
|
||||
kdictproxy.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
1c8611748dcee5b29848bf25be3ec473
|
||||
2006-11-26T13:03:26.949973Z
|
||||
35003
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
4369
|
||||
|
||||
profiler.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
858162366cbc39cd9e249e35e6f510c4
|
||||
2009-12-18T16:35:35.119276Z
|
||||
70200
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
11238
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,54 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco top-level file of the Psyco package.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco -- the Python Specializing Compiler.
|
||||
|
||||
Typical usage: add the following lines to your application's main module,
|
||||
preferably after the other imports:
|
||||
|
||||
try:
|
||||
import psyco
|
||||
psyco.full()
|
||||
except ImportError:
|
||||
print 'Psyco not installed, the program will just run slower'
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
|
||||
#
|
||||
# This module is present to make 'psyco' a package and to
|
||||
# publish the main functions and variables.
|
||||
#
|
||||
# More documentation can be found in core.py.
|
||||
#
|
||||
|
||||
|
||||
# Try to import the dynamic-loading _psyco and report errors
|
||||
try:
|
||||
import _psyco
|
||||
except ImportError, e:
|
||||
extramsg = ''
|
||||
import sys, imp
|
||||
try:
|
||||
file, filename, (suffix, mode, type) = imp.find_module('_psyco', __path__)
|
||||
except ImportError:
|
||||
ext = [suffix for suffix, mode, type in imp.get_suffixes()
|
||||
if type == imp.C_EXTENSION]
|
||||
if ext:
|
||||
extramsg = (" (cannot locate the compiled extension '_psyco%s' "
|
||||
"in the package path '%s')" % (ext[0], '; '.join(__path__)))
|
||||
else:
|
||||
extramsg = (" (check that the compiled extension '%s' is for "
|
||||
"the correct Python version; this is Python %s)" %
|
||||
(filename, sys.version.split()[0]))
|
||||
raise ImportError, str(e) + extramsg
|
||||
|
||||
# Publish important data by importing them in the package
|
||||
from support import __version__, error, warning, _getrealframe, _getemulframe
|
||||
from support import version_info, __version__ as hexversion
|
||||
from core import full, profile, background, runonly, stop, cannotcompile
|
||||
from core import log, bind, unbind, proxy, unproxy, dumpcodebuf
|
||||
from _psyco import setfilter
|
||||
from _psyco import compact, compacttype
|
||||
@@ -0,0 +1,42 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco class support module.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco class support module.
|
||||
|
||||
'psyco.classes.psyobj' is an alternate Psyco-optimized root for classes.
|
||||
Any class inheriting from it or using the metaclass '__metaclass__' might
|
||||
get optimized specifically for Psyco. It is equivalent to call
|
||||
psyco.bind() on the class object after its creation.
|
||||
|
||||
Importing everything from psyco.classes in a module will import the
|
||||
'__metaclass__' name, so all classes defined after a
|
||||
|
||||
from psyco.classes import *
|
||||
|
||||
will automatically use the Psyco-optimized metaclass.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
__all__ = ['psyobj', 'psymetaclass', '__metaclass__']
|
||||
|
||||
|
||||
from _psyco import compacttype
|
||||
import core
|
||||
from types import FunctionType
|
||||
|
||||
class psymetaclass(compacttype):
|
||||
"Psyco-optimized meta-class. Turns all methods into Psyco proxies."
|
||||
|
||||
def __new__(cls, name, bases, dict):
|
||||
bindlist = dict.get('__psyco__bind__')
|
||||
if bindlist is None:
|
||||
bindlist = [key for key, value in dict.items()
|
||||
if isinstance(value, FunctionType)]
|
||||
for attr in bindlist:
|
||||
dict[attr] = core.proxy(dict[attr])
|
||||
return super(psymetaclass, cls).__new__(cls, name, bases, dict)
|
||||
|
||||
psyobj = psymetaclass("psyobj", (), {})
|
||||
__metaclass__ = psymetaclass
|
||||
@@ -0,0 +1,231 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco main functions.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco main functions.
|
||||
|
||||
Here are the routines that you can use from your applications.
|
||||
These are mostly interfaces to the C core, but they depend on
|
||||
the Python version.
|
||||
|
||||
You can use these functions from the 'psyco' module instead of
|
||||
'psyco.core', e.g.
|
||||
|
||||
import psyco
|
||||
psyco.log('/tmp/psyco.log')
|
||||
psyco.profile()
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import _psyco
|
||||
import types
|
||||
from support import *
|
||||
|
||||
newfunction = types.FunctionType
|
||||
newinstancemethod = types.MethodType
|
||||
|
||||
|
||||
# Default charge profiler values
|
||||
default_watermark = 0.09 # between 0.0 (0%) and 1.0 (100%)
|
||||
default_halflife = 0.5 # seconds
|
||||
default_pollfreq_profile = 20 # Hz
|
||||
default_pollfreq_background = 100 # Hz -- a maximum for sleep's resolution
|
||||
default_parentframe = 0.25 # should not be more than 0.5 (50%)
|
||||
|
||||
|
||||
def full(memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Compile as much as possible.
|
||||
|
||||
Typical use is for small scripts performing intensive computations
|
||||
or string handling."""
|
||||
import profiler
|
||||
p = profiler.FullCompiler()
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def profile(watermark = default_watermark,
|
||||
halflife = default_halflife,
|
||||
pollfreq = default_pollfreq_profile,
|
||||
parentframe = default_parentframe,
|
||||
memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Turn on profiling.
|
||||
|
||||
The 'watermark' parameter controls how easily running functions will
|
||||
be compiled. The smaller the value, the more functions are compiled."""
|
||||
import profiler
|
||||
p = profiler.ActivePassiveProfiler(watermark, halflife,
|
||||
pollfreq, parentframe)
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def background(watermark = default_watermark,
|
||||
halflife = default_halflife,
|
||||
pollfreq = default_pollfreq_background,
|
||||
parentframe = default_parentframe,
|
||||
memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Turn on passive profiling.
|
||||
|
||||
This is a very lightweight mode in which only intensively computing
|
||||
functions can be detected. The smaller the 'watermark', the more functions
|
||||
are compiled."""
|
||||
import profiler
|
||||
p = profiler.PassiveProfiler(watermark, halflife, pollfreq, parentframe)
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def runonly(memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Nonprofiler.
|
||||
|
||||
XXX check if this is useful and document."""
|
||||
import profiler
|
||||
p = profiler.RunOnly()
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def stop():
|
||||
"""Turn off all automatic compilation. bind() calls remain in effect."""
|
||||
import profiler
|
||||
profiler.go([])
|
||||
|
||||
|
||||
def log(logfile='', mode='w', top=10):
|
||||
"""Enable logging to the given file.
|
||||
|
||||
If the file name is unspecified, a default name is built by appending
|
||||
a 'log-psyco' extension to the main script name.
|
||||
|
||||
Mode is 'a' to append to a possibly existing file or 'w' to overwrite
|
||||
an existing file. Note that the log file may grow quickly in 'a' mode."""
|
||||
import profiler, logger
|
||||
if not logfile:
|
||||
import os
|
||||
logfile, dummy = os.path.splitext(sys.argv[0])
|
||||
if os.path.basename(logfile):
|
||||
logfile += '.'
|
||||
logfile += 'log-psyco'
|
||||
if hasattr(_psyco, 'VERBOSE_LEVEL'):
|
||||
print >> sys.stderr, 'psyco: logging to', logfile
|
||||
# logger.current should be a real file object; subtle problems
|
||||
# will show up if its write() and flush() methods are written
|
||||
# in Python, as Psyco will invoke them while compiling.
|
||||
logger.current = open(logfile, mode)
|
||||
logger.print_charges = top
|
||||
profiler.logger = logger
|
||||
logger.writedate('Logging started')
|
||||
cannotcompile(logger.psycowrite)
|
||||
_psyco.statwrite(logger=logger.psycowrite)
|
||||
|
||||
|
||||
def bind(x, rec=None):
|
||||
"""Enable compilation of the given function, method, or class object.
|
||||
|
||||
If C is a class (or anything with a '__dict__' attribute), bind(C) will
|
||||
rebind all functions and methods found in C.__dict__ (which means, for
|
||||
classes, all methods defined in the class but not in its parents).
|
||||
|
||||
The optional second argument specifies the number of recursive
|
||||
compilation levels: all functions called by func are compiled
|
||||
up to the given depth of indirection."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
if rec is None:
|
||||
x.func_code = _psyco.proxycode(x)
|
||||
else:
|
||||
x.func_code = _psyco.proxycode(x, rec)
|
||||
return
|
||||
if hasattr(x, '__dict__'):
|
||||
funcs = [o for o in x.__dict__.values()
|
||||
if isinstance(o, types.MethodType)
|
||||
or isinstance(o, types.FunctionType)]
|
||||
if not funcs:
|
||||
raise error, ("nothing bindable found in %s object" %
|
||||
type(x).__name__)
|
||||
for o in funcs:
|
||||
bind(o, rec)
|
||||
return
|
||||
raise TypeError, "cannot bind %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def unbind(x):
|
||||
"""Reverse of bind()."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
try:
|
||||
f = _psyco.unproxycode(x.func_code)
|
||||
except error:
|
||||
pass
|
||||
else:
|
||||
x.func_code = f.func_code
|
||||
return
|
||||
if hasattr(x, '__dict__'):
|
||||
for o in x.__dict__.values():
|
||||
if (isinstance(o, types.MethodType)
|
||||
or isinstance(o, types.FunctionType)):
|
||||
unbind(o)
|
||||
return
|
||||
raise TypeError, "cannot unbind %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def proxy(x, rec=None):
|
||||
"""Return a Psyco-enabled copy of the function.
|
||||
|
||||
The original function is still available for non-compiled calls.
|
||||
The optional second argument specifies the number of recursive
|
||||
compilation levels: all functions called by func are compiled
|
||||
up to the given depth of indirection."""
|
||||
if isinstance(x, types.FunctionType):
|
||||
if rec is None:
|
||||
code = _psyco.proxycode(x)
|
||||
else:
|
||||
code = _psyco.proxycode(x, rec)
|
||||
return newfunction(code, x.func_globals, x.func_name)
|
||||
if isinstance(x, types.MethodType):
|
||||
p = proxy(x.im_func, rec)
|
||||
return newinstancemethod(p, x.im_self, x.im_class)
|
||||
raise TypeError, "cannot proxy %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def unproxy(proxy):
|
||||
"""Return a new copy of the original function of method behind a proxy.
|
||||
The result behaves like the original function in that calling it
|
||||
does not trigger compilation nor execution of any compiled code."""
|
||||
if isinstance(proxy, types.FunctionType):
|
||||
return _psyco.unproxycode(proxy.func_code)
|
||||
if isinstance(proxy, types.MethodType):
|
||||
f = unproxy(proxy.im_func)
|
||||
return newinstancemethod(f, proxy.im_self, proxy.im_class)
|
||||
raise TypeError, "%s objects cannot be proxies" % type(proxy).__name__
|
||||
|
||||
|
||||
def cannotcompile(x):
|
||||
"""Instruct Psyco never to compile the given function, method
|
||||
or code object."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
x = x.func_code
|
||||
if isinstance(x, types.CodeType):
|
||||
_psyco.cannotcompile(x)
|
||||
else:
|
||||
raise TypeError, "unexpected %s object" % type(x).__name__
|
||||
|
||||
|
||||
def dumpcodebuf():
|
||||
"""Write in file psyco.dump a copy of the emitted machine code,
|
||||
provided Psyco was compiled with a non-zero CODE_DUMP.
|
||||
See py-utils/httpxam.py to examine psyco.dump."""
|
||||
if hasattr(_psyco, 'dumpcodebuf'):
|
||||
_psyco.dumpcodebuf()
|
||||
|
||||
|
||||
###########################################################################
|
||||
# Psyco variables
|
||||
# error * the error raised by Psyco
|
||||
# warning * the warning raised by Psyco
|
||||
# __in_psyco__ * a new built-in variable which is always zero, but which
|
||||
# Psyco special-cases by returning 1 instead. So
|
||||
# __in_psyco__ can be used in a function to know if
|
||||
# that function is being executed by Psyco or not.
|
||||
@@ -0,0 +1,133 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Support code for the 'psyco.compact' type.
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
try:
|
||||
from UserDict import DictMixin
|
||||
except ImportError:
|
||||
|
||||
# backported from Python 2.3 to Python 2.2
|
||||
class DictMixin:
|
||||
# Mixin defining all dictionary methods for classes that already have
|
||||
# a minimum dictionary interface including getitem, setitem, delitem,
|
||||
# and keys. Without knowledge of the subclass constructor, the mixin
|
||||
# does not define __init__() or copy(). In addition to the four base
|
||||
# methods, progressively more efficiency comes with defining
|
||||
# __contains__(), __iter__(), and iteritems().
|
||||
|
||||
# second level definitions support higher levels
|
||||
def __iter__(self):
|
||||
for k in self.keys():
|
||||
yield k
|
||||
def has_key(self, key):
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
return False
|
||||
return True
|
||||
def __contains__(self, key):
|
||||
return self.has_key(key)
|
||||
|
||||
# third level takes advantage of second level definitions
|
||||
def iteritems(self):
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
def iterkeys(self):
|
||||
return self.__iter__()
|
||||
|
||||
# fourth level uses definitions from lower levels
|
||||
def itervalues(self):
|
||||
for _, v in self.iteritems():
|
||||
yield v
|
||||
def values(self):
|
||||
return [v for _, v in self.iteritems()]
|
||||
def items(self):
|
||||
return list(self.iteritems())
|
||||
def clear(self):
|
||||
for key in self.keys():
|
||||
del self[key]
|
||||
def setdefault(self, key, default):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = default
|
||||
return default
|
||||
def pop(self, key, *args):
|
||||
if len(args) > 1:
|
||||
raise TypeError, "pop expected at most 2 arguments, got "\
|
||||
+ repr(1 + len(args))
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
if args:
|
||||
return args[0]
|
||||
raise
|
||||
del self[key]
|
||||
return value
|
||||
def popitem(self):
|
||||
try:
|
||||
k, v = self.iteritems().next()
|
||||
except StopIteration:
|
||||
raise KeyError, 'container is empty'
|
||||
del self[k]
|
||||
return (k, v)
|
||||
def update(self, other):
|
||||
# Make progressively weaker assumptions about "other"
|
||||
if hasattr(other, 'iteritems'): # iteritems saves memory and lookups
|
||||
for k, v in other.iteritems():
|
||||
self[k] = v
|
||||
elif hasattr(other, '__iter__'): # iter saves memory
|
||||
for k in other:
|
||||
self[k] = other[k]
|
||||
else:
|
||||
for k in other.keys():
|
||||
self[k] = other[k]
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
def __repr__(self):
|
||||
return repr(dict(self.iteritems()))
|
||||
def __cmp__(self, other):
|
||||
if other is None:
|
||||
return 1
|
||||
if isinstance(other, DictMixin):
|
||||
other = dict(other.iteritems())
|
||||
return cmp(dict(self.iteritems()), other)
|
||||
def __len__(self):
|
||||
return len(self.keys())
|
||||
|
||||
###########################################################################
|
||||
|
||||
from _psyco import compact
|
||||
|
||||
|
||||
class compactdictproxy(DictMixin):
|
||||
|
||||
def __init__(self, ko):
|
||||
self._ko = ko # compact object of which 'self' is the dict
|
||||
|
||||
def __getitem__(self, key):
|
||||
return compact.__getslot__(self._ko, key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
compact.__setslot__(self._ko, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
compact.__delslot__(self._ko, key)
|
||||
|
||||
def keys(self):
|
||||
return compact.__members__.__get__(self._ko)
|
||||
|
||||
def clear(self):
|
||||
keys = self.keys()
|
||||
keys.reverse()
|
||||
for key in keys:
|
||||
del self[key]
|
||||
|
||||
def __repr__(self):
|
||||
keys = ', '.join(self.keys())
|
||||
return '<compactdictproxy object {%s}>' % (keys,)
|
||||
@@ -0,0 +1,96 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco logger.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco logger.
|
||||
|
||||
See log() in core.py.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
|
||||
import _psyco
|
||||
from time import time, localtime, strftime
|
||||
|
||||
|
||||
current = None
|
||||
print_charges = 10
|
||||
dump_delay = 0.2
|
||||
dump_last = 0.0
|
||||
|
||||
def write(s, level):
|
||||
t = time()
|
||||
f = t-int(t)
|
||||
try:
|
||||
current.write("%s.%02d %-*s%s\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0), 63-level, s,
|
||||
"%"*level))
|
||||
current.flush()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
def psycowrite(s):
|
||||
t = time()
|
||||
f = t-int(t)
|
||||
try:
|
||||
current.write("%s.%02d %-*s%s\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0), 60, s.strip(),
|
||||
"% %"))
|
||||
current.flush()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
##def writelines(lines, level=0):
|
||||
## if lines:
|
||||
## t = time()
|
||||
## f = t-int(t)
|
||||
## timedesc = strftime("%x %X", localtime(int(t)))
|
||||
## print >> current, "%s.%03d %-*s %s" % (
|
||||
## timedesc, int(f*1000),
|
||||
## 50-level, lines[0],
|
||||
## "+"*level)
|
||||
## timedesc = " " * (len(timedesc)+5)
|
||||
## for line in lines[1:]:
|
||||
## print >> current, timedesc, line
|
||||
|
||||
def writememory():
|
||||
write("memory usage: %d+ kb" % _psyco.memory(), 1)
|
||||
|
||||
def dumpcharges():
|
||||
global dump_last
|
||||
if print_charges:
|
||||
t = time()
|
||||
if not (dump_last <= t < dump_last+dump_delay):
|
||||
if t <= dump_last+1.5*dump_delay:
|
||||
dump_last += dump_delay
|
||||
else:
|
||||
dump_last = t
|
||||
#write("%s: charges:" % who, 0)
|
||||
lst = _psyco.stattop(print_charges)
|
||||
if lst:
|
||||
f = t-int(t)
|
||||
lines = ["%s.%02d ______\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0))]
|
||||
i = 1
|
||||
for co, charge in lst:
|
||||
detail = co.co_filename
|
||||
if len(detail) > 19:
|
||||
detail = '...' + detail[-17:]
|
||||
lines.append(" #%-3d |%4.1f %%| %-26s%20s:%d\n" %
|
||||
(i, charge*100.0, co.co_name, detail,
|
||||
co.co_firstlineno))
|
||||
i += 1
|
||||
current.writelines(lines)
|
||||
current.flush()
|
||||
|
||||
def writefinalstats():
|
||||
dumpcharges()
|
||||
writememory()
|
||||
writedate("program exit")
|
||||
|
||||
def writedate(msg):
|
||||
write('%s, %s' % (msg, strftime("%x")), 20)
|
||||
@@ -0,0 +1,379 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco profiler (Python part).
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco profiler (Python part).
|
||||
|
||||
The implementation of the non-time-critical parts of the profiler.
|
||||
See profile() and full() in core.py for the easy interface.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import _psyco
|
||||
from support import *
|
||||
import math, time, types, atexit
|
||||
now = time.time
|
||||
try:
|
||||
import thread
|
||||
except ImportError:
|
||||
import dummy_thread as thread
|
||||
|
||||
|
||||
# current profiler instance
|
||||
current = None
|
||||
|
||||
# enabled profilers, in order of priority
|
||||
profilers = []
|
||||
|
||||
# logger module (when enabled by core.log())
|
||||
logger = None
|
||||
|
||||
# a lock for a thread-safe go()
|
||||
go_lock = thread.allocate_lock()
|
||||
|
||||
def go(stop=0):
|
||||
# run the highest-priority profiler in 'profilers'
|
||||
global current
|
||||
go_lock.acquire()
|
||||
try:
|
||||
prev = current
|
||||
if stop:
|
||||
del profilers[:]
|
||||
if prev:
|
||||
if profilers and profilers[0] is prev:
|
||||
return # best profiler already running
|
||||
prev.stop()
|
||||
current = None
|
||||
for p in profilers[:]:
|
||||
if p.start():
|
||||
current = p
|
||||
if logger: # and p is not prev:
|
||||
logger.write("%s: starting" % p.__class__.__name__, 5)
|
||||
return
|
||||
finally:
|
||||
go_lock.release()
|
||||
# no profiler is running now
|
||||
if stop:
|
||||
if logger:
|
||||
logger.writefinalstats()
|
||||
else:
|
||||
tag2bind()
|
||||
|
||||
atexit.register(go, 1)
|
||||
|
||||
|
||||
def buildfncache(globals, cache):
|
||||
if hasattr(types.IntType, '__dict__'):
|
||||
clstypes = (types.ClassType, types.TypeType)
|
||||
else:
|
||||
clstypes = types.ClassType
|
||||
for x in globals.values():
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
cache[x.func_code] = x, ''
|
||||
elif isinstance(x, clstypes):
|
||||
for y in x.__dict__.values():
|
||||
if isinstance(y, types.MethodType):
|
||||
y = y.im_func
|
||||
if isinstance(y, types.FunctionType):
|
||||
cache[y.func_code] = y, x.__name__
|
||||
|
||||
# code-to-function mapping (cache)
|
||||
function_cache = {}
|
||||
|
||||
def trytobind(co, globals, log=1):
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
buildfncache(globals, function_cache)
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
if logger:
|
||||
logger.write('warning: cannot find function %s in %s' %
|
||||
(co.co_name, globals.get('__name__', '?')), 3)
|
||||
return # give up
|
||||
if logger and log:
|
||||
modulename = globals.get('__name__', '?')
|
||||
if clsname:
|
||||
modulename += '.' + clsname
|
||||
logger.write('bind function: %s.%s' % (modulename, co.co_name), 1)
|
||||
f.func_code = _psyco.proxycode(f)
|
||||
|
||||
|
||||
# the list of code objects that have been tagged
|
||||
tagged_codes = []
|
||||
|
||||
def tag(co, globals):
|
||||
if logger:
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
buildfncache(globals, function_cache)
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
clsname = '' # give up
|
||||
modulename = globals.get('__name__', '?')
|
||||
if clsname:
|
||||
modulename += '.' + clsname
|
||||
logger.write('tag function: %s.%s' % (modulename, co.co_name), 1)
|
||||
tagged_codes.append((co, globals))
|
||||
_psyco.turbo_frame(co)
|
||||
_psyco.turbo_code(co)
|
||||
|
||||
def tag2bind():
|
||||
if tagged_codes:
|
||||
if logger:
|
||||
logger.write('profiling stopped, binding %d functions' %
|
||||
len(tagged_codes), 2)
|
||||
for co, globals in tagged_codes:
|
||||
trytobind(co, globals, 0)
|
||||
function_cache.clear()
|
||||
del tagged_codes[:]
|
||||
|
||||
|
||||
class Profiler:
|
||||
MemoryTimerResolution = 0.103
|
||||
|
||||
def run(self, memory, time, memorymax, timemax):
|
||||
self.memory = memory
|
||||
self.memorymax = memorymax
|
||||
self.time = time
|
||||
if timemax is None:
|
||||
self.endtime = None
|
||||
else:
|
||||
self.endtime = now() + timemax
|
||||
self.alarms = []
|
||||
profilers.append(self)
|
||||
go()
|
||||
|
||||
def start(self):
|
||||
curmem = _psyco.memory()
|
||||
memlimits = []
|
||||
if self.memorymax is not None:
|
||||
if curmem >= self.memorymax:
|
||||
if logger:
|
||||
logger.writememory()
|
||||
return self.limitreached('memorymax')
|
||||
memlimits.append(self.memorymax)
|
||||
if self.memory is not None:
|
||||
if self.memory <= 0:
|
||||
if logger:
|
||||
logger.writememory()
|
||||
return self.limitreached('memory')
|
||||
memlimits.append(curmem + self.memory)
|
||||
self.memory_at_start = curmem
|
||||
|
||||
curtime = now()
|
||||
timelimits = []
|
||||
if self.endtime is not None:
|
||||
if curtime >= self.endtime:
|
||||
return self.limitreached('timemax')
|
||||
timelimits.append(self.endtime - curtime)
|
||||
if self.time is not None:
|
||||
if self.time <= 0.0:
|
||||
return self.limitreached('time')
|
||||
timelimits.append(self.time)
|
||||
self.time_at_start = curtime
|
||||
|
||||
try:
|
||||
self.do_start()
|
||||
except error, e:
|
||||
if logger:
|
||||
logger.write('%s: disabled by psyco.error:' % (
|
||||
self.__class__.__name__), 4)
|
||||
logger.write(' %s' % str(e), 3)
|
||||
return 0
|
||||
|
||||
if memlimits:
|
||||
self.memlimits_args = (time.sleep, (self.MemoryTimerResolution,),
|
||||
self.check_memory, (min(memlimits),))
|
||||
self.alarms.append(_psyco.alarm(*self.memlimits_args))
|
||||
if timelimits:
|
||||
self.alarms.append(_psyco.alarm(time.sleep, (min(timelimits),),
|
||||
self.time_out))
|
||||
return 1
|
||||
|
||||
def stop(self):
|
||||
for alarm in self.alarms:
|
||||
alarm.stop(0)
|
||||
for alarm in self.alarms:
|
||||
alarm.stop(1) # wait for parallel threads to stop
|
||||
del self.alarms[:]
|
||||
if self.time is not None:
|
||||
self.time -= now() - self.time_at_start
|
||||
if self.memory is not None:
|
||||
self.memory -= _psyco.memory() - self.memory_at_start
|
||||
|
||||
try:
|
||||
self.do_stop()
|
||||
except error:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def check_memory(self, limit):
|
||||
if _psyco.memory() < limit:
|
||||
return self.memlimits_args
|
||||
go()
|
||||
|
||||
def time_out(self):
|
||||
self.time = 0.0
|
||||
go()
|
||||
|
||||
def limitreached(self, limitname):
|
||||
try:
|
||||
profilers.remove(self)
|
||||
except ValueError:
|
||||
pass
|
||||
if logger:
|
||||
logger.write('%s: disabled (%s limit reached)' % (
|
||||
self.__class__.__name__, limitname), 4)
|
||||
return 0
|
||||
|
||||
|
||||
class FullCompiler(Profiler):
|
||||
|
||||
def do_start(self):
|
||||
_psyco.profiling('f')
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
|
||||
|
||||
class RunOnly(Profiler):
|
||||
|
||||
def do_start(self):
|
||||
_psyco.profiling('n')
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
|
||||
|
||||
class ChargeProfiler(Profiler):
|
||||
|
||||
def __init__(self, watermark, parentframe):
|
||||
self.watermark = watermark
|
||||
self.parent2 = parentframe * 2.0
|
||||
self.lock = thread.allocate_lock()
|
||||
|
||||
def init_charges(self):
|
||||
_psyco.statwrite(watermark = self.watermark,
|
||||
parent2 = self.parent2)
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
_psyco.statwrite(callback = None)
|
||||
|
||||
|
||||
class ActiveProfiler(ChargeProfiler):
|
||||
|
||||
def active_start(self):
|
||||
_psyco.profiling('p')
|
||||
|
||||
def do_start(self):
|
||||
self.init_charges()
|
||||
self.active_start()
|
||||
_psyco.statwrite(callback = self.charge_callback)
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
tag(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
class PassiveProfiler(ChargeProfiler):
|
||||
|
||||
initial_charge_unit = _psyco.statread('unit')
|
||||
reset_stats_after = 120 # half-lives (maximum 200!)
|
||||
reset_limit = initial_charge_unit * (2.0 ** reset_stats_after)
|
||||
|
||||
def __init__(self, watermark, halflife, pollfreq, parentframe):
|
||||
ChargeProfiler.__init__(self, watermark, parentframe)
|
||||
self.pollfreq = pollfreq
|
||||
# self.progress is slightly more than 1.0, and computed so that
|
||||
# do_profile() will double the change_unit every 'halflife' seconds.
|
||||
self.progress = 2.0 ** (1.0 / (halflife * pollfreq))
|
||||
|
||||
def reset(self):
|
||||
_psyco.statwrite(unit = self.initial_charge_unit, callback = None)
|
||||
_psyco.statreset()
|
||||
if logger:
|
||||
logger.write("%s: resetting stats" % self.__class__.__name__, 1)
|
||||
|
||||
def passive_start(self):
|
||||
self.passivealarm_args = (time.sleep, (1.0 / self.pollfreq,),
|
||||
self.do_profile)
|
||||
self.alarms.append(_psyco.alarm(*self.passivealarm_args))
|
||||
|
||||
def do_start(self):
|
||||
tag2bind()
|
||||
self.init_charges()
|
||||
self.passive_start()
|
||||
|
||||
def do_profile(self):
|
||||
_psyco.statcollect()
|
||||
if logger:
|
||||
logger.dumpcharges()
|
||||
nunit = _psyco.statread('unit') * self.progress
|
||||
if nunit > self.reset_limit:
|
||||
self.reset()
|
||||
else:
|
||||
_psyco.statwrite(unit = nunit, callback = self.charge_callback)
|
||||
return self.passivealarm_args
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
trytobind(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
class ActivePassiveProfiler(PassiveProfiler, ActiveProfiler):
|
||||
|
||||
def do_start(self):
|
||||
self.init_charges()
|
||||
self.active_start()
|
||||
self.passive_start()
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
tag(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
|
||||
#
|
||||
# we register our own version of sys.settrace(), sys.setprofile()
|
||||
# and thread.start_new_thread().
|
||||
#
|
||||
|
||||
def psyco_settrace(*args, **kw):
|
||||
"This is the Psyco-aware version of sys.settrace()."
|
||||
result = original_settrace(*args, **kw)
|
||||
go()
|
||||
return result
|
||||
|
||||
def psyco_setprofile(*args, **kw):
|
||||
"This is the Psyco-aware version of sys.setprofile()."
|
||||
result = original_setprofile(*args, **kw)
|
||||
go()
|
||||
return result
|
||||
|
||||
def psyco_thread_stub(callable, args, kw):
|
||||
_psyco.statcollect()
|
||||
if kw is None:
|
||||
return callable(*args)
|
||||
else:
|
||||
return callable(*args, **kw)
|
||||
|
||||
def psyco_start_new_thread(callable, args, kw=None):
|
||||
"This is the Psyco-aware version of thread.start_new_thread()."
|
||||
return original_start_new_thread(psyco_thread_stub, (callable, args, kw))
|
||||
|
||||
original_settrace = sys.settrace
|
||||
original_setprofile = sys.setprofile
|
||||
original_start_new_thread = thread.start_new_thread
|
||||
sys.settrace = psyco_settrace
|
||||
sys.setprofile = psyco_setprofile
|
||||
thread.start_new_thread = psyco_start_new_thread
|
||||
# hack to patch threading._start_new_thread if the module is
|
||||
# already loaded
|
||||
if ('threading' in sys.modules and
|
||||
hasattr(sys.modules['threading'], '_start_new_thread')):
|
||||
sys.modules['threading']._start_new_thread = psyco_start_new_thread
|
||||
@@ -0,0 +1,191 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco general support module.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco general support module.
|
||||
|
||||
For internal use.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import sys, _psyco, __builtin__
|
||||
|
||||
error = _psyco.error
|
||||
class warning(Warning):
|
||||
pass
|
||||
|
||||
_psyco.NoLocalsWarning = warning
|
||||
|
||||
def warn(msg):
|
||||
from warnings import warn
|
||||
warn(msg, warning, stacklevel=2)
|
||||
|
||||
#
|
||||
# Version checks
|
||||
#
|
||||
__version__ = 0x010600f0
|
||||
if _psyco.PSYVER != __version__:
|
||||
raise error, "version mismatch between Psyco parts, reinstall it"
|
||||
|
||||
version_info = (__version__ >> 24,
|
||||
(__version__ >> 16) & 0xff,
|
||||
(__version__ >> 8) & 0xff,
|
||||
{0xa0: 'alpha',
|
||||
0xb0: 'beta',
|
||||
0xc0: 'candidate',
|
||||
0xf0: 'final'}[__version__ & 0xf0],
|
||||
__version__ & 0xf)
|
||||
|
||||
|
||||
VERSION_LIMITS = [0x02020200, # 2.2.2
|
||||
0x02030000, # 2.3
|
||||
0x02040000] # 2.4
|
||||
|
||||
if ([v for v in VERSION_LIMITS if v <= sys.hexversion] !=
|
||||
[v for v in VERSION_LIMITS if v <= _psyco.PYVER ]):
|
||||
if sys.hexversion < VERSION_LIMITS[0]:
|
||||
warn("Psyco requires Python version 2.2.2 or later")
|
||||
else:
|
||||
warn("Psyco version does not match Python version. "
|
||||
"Psyco must be updated or recompiled")
|
||||
|
||||
|
||||
if hasattr(_psyco, 'ALL_CHECKS') and hasattr(_psyco, 'VERBOSE_LEVEL'):
|
||||
print >> sys.stderr, ('psyco: running in debugging mode on %s' %
|
||||
_psyco.PROCESSOR)
|
||||
|
||||
|
||||
###########################################################################
|
||||
# sys._getframe() gives strange results on a mixed Psyco- and Python-style
|
||||
# stack frame. Psyco provides a replacement that partially emulates Python
|
||||
# frames from Psyco frames. The new sys._getframe() may return objects of
|
||||
# a custom "Psyco frame" type, which is a subtype of the normal frame type.
|
||||
#
|
||||
# The same problems require some other built-in functions to be replaced
|
||||
# as well. Note that the local variables are not available in any
|
||||
# dictionary with Psyco.
|
||||
|
||||
|
||||
class Frame:
|
||||
pass
|
||||
|
||||
|
||||
class PythonFrame(Frame):
|
||||
|
||||
def __init__(self, frame):
|
||||
self.__dict__.update({
|
||||
'_frame': frame,
|
||||
})
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'f_back':
|
||||
try:
|
||||
result = embedframe(_psyco.getframe(self._frame))
|
||||
except ValueError:
|
||||
result = None
|
||||
except error:
|
||||
warn("f_back is skipping dead Psyco frames")
|
||||
result = self._frame.f_back
|
||||
self.__dict__['f_back'] = result
|
||||
return result
|
||||
else:
|
||||
return getattr(self._frame, attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
setattr(self._frame, attr, value)
|
||||
|
||||
def __delattr__(self, attr):
|
||||
delattr(self._frame, attr)
|
||||
|
||||
|
||||
class PsycoFrame(Frame):
|
||||
|
||||
def __init__(self, tag):
|
||||
self.__dict__.update({
|
||||
'_tag' : tag,
|
||||
'f_code' : tag[0],
|
||||
'f_globals': tag[1],
|
||||
})
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'f_back':
|
||||
try:
|
||||
result = embedframe(_psyco.getframe(self._tag))
|
||||
except ValueError:
|
||||
result = None
|
||||
elif attr == 'f_lineno':
|
||||
result = self.f_code.co_firstlineno # better than nothing
|
||||
elif attr == 'f_builtins':
|
||||
result = self.f_globals['__builtins__']
|
||||
elif attr == 'f_restricted':
|
||||
result = self.f_builtins is not __builtins__
|
||||
elif attr == 'f_locals':
|
||||
raise AttributeError, ("local variables of functions run by Psyco "
|
||||
"cannot be accessed in any way, sorry")
|
||||
else:
|
||||
raise AttributeError, ("emulated Psyco frames have "
|
||||
"no '%s' attribute" % attr)
|
||||
self.__dict__[attr] = result
|
||||
return result
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
raise AttributeError, "Psyco frame objects are read-only"
|
||||
|
||||
def __delattr__(self, attr):
|
||||
if attr == 'f_trace':
|
||||
# for bdb which relies on CPython frames exhibiting a slightly
|
||||
# buggy behavior: you can 'del f.f_trace' as often as you like
|
||||
# even without having set it previously.
|
||||
return
|
||||
raise AttributeError, "Psyco frame objects are read-only"
|
||||
|
||||
|
||||
def embedframe(result):
|
||||
if type(result) is type(()):
|
||||
return PsycoFrame(result)
|
||||
else:
|
||||
return PythonFrame(result)
|
||||
|
||||
def _getframe(depth=0):
|
||||
"""Return a frame object from the call stack. This is a replacement for
|
||||
sys._getframe() which is aware of Psyco frames.
|
||||
|
||||
The returned objects are instances of either PythonFrame or PsycoFrame
|
||||
instead of being real Python-level frame object, so that they can emulate
|
||||
the common attributes of frame objects.
|
||||
|
||||
The original sys._getframe() ignoring Psyco frames altogether is stored in
|
||||
psyco._getrealframe(). See also psyco._getemulframe()."""
|
||||
# 'depth+1' to account for this _getframe() Python function
|
||||
return embedframe(_psyco.getframe(depth+1))
|
||||
|
||||
def _getemulframe(depth=0):
|
||||
"""As _getframe(), but the returned objects are real Python frame objects
|
||||
emulating Psyco frames. Some of their attributes can be wrong or missing,
|
||||
however."""
|
||||
# 'depth+1' to account for this _getemulframe() Python function
|
||||
return _psyco.getframe(depth+1, 1)
|
||||
|
||||
def patch(name, module=__builtin__):
|
||||
f = getattr(_psyco, name)
|
||||
org = getattr(module, name)
|
||||
if org is not f:
|
||||
setattr(module, name, f)
|
||||
setattr(_psyco, 'original_' + name, org)
|
||||
|
||||
_getrealframe = sys._getframe
|
||||
sys._getframe = _getframe
|
||||
patch('globals')
|
||||
patch('eval')
|
||||
patch('execfile')
|
||||
patch('locals')
|
||||
patch('vars')
|
||||
patch('dir')
|
||||
patch('input')
|
||||
_psyco.original_raw_input = raw_input
|
||||
__builtin__.__in_psyco__ = 0==1 # False
|
||||
|
||||
if hasattr(_psyco, 'compact'):
|
||||
import kdictproxy
|
||||
_psyco.compactdictproxy = kdictproxy.compactdictproxy
|
||||
54
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/__init__.py
Normal file
54
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/__init__.py
Normal file
@@ -0,0 +1,54 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco top-level file of the Psyco package.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco -- the Python Specializing Compiler.
|
||||
|
||||
Typical usage: add the following lines to your application's main module,
|
||||
preferably after the other imports:
|
||||
|
||||
try:
|
||||
import psyco
|
||||
psyco.full()
|
||||
except ImportError:
|
||||
print 'Psyco not installed, the program will just run slower'
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
|
||||
#
|
||||
# This module is present to make 'psyco' a package and to
|
||||
# publish the main functions and variables.
|
||||
#
|
||||
# More documentation can be found in core.py.
|
||||
#
|
||||
|
||||
|
||||
# Try to import the dynamic-loading _psyco and report errors
|
||||
try:
|
||||
import _psyco
|
||||
except ImportError, e:
|
||||
extramsg = ''
|
||||
import sys, imp
|
||||
try:
|
||||
file, filename, (suffix, mode, type) = imp.find_module('_psyco', __path__)
|
||||
except ImportError:
|
||||
ext = [suffix for suffix, mode, type in imp.get_suffixes()
|
||||
if type == imp.C_EXTENSION]
|
||||
if ext:
|
||||
extramsg = (" (cannot locate the compiled extension '_psyco%s' "
|
||||
"in the package path '%s')" % (ext[0], '; '.join(__path__)))
|
||||
else:
|
||||
extramsg = (" (check that the compiled extension '%s' is for "
|
||||
"the correct Python version; this is Python %s)" %
|
||||
(filename, sys.version.split()[0]))
|
||||
raise ImportError, str(e) + extramsg
|
||||
|
||||
# Publish important data by importing them in the package
|
||||
from support import __version__, error, warning, _getrealframe, _getemulframe
|
||||
from support import version_info, __version__ as hexversion
|
||||
from core import full, profile, background, runonly, stop, cannotcompile
|
||||
from core import log, bind, unbind, proxy, unproxy, dumpcodebuf
|
||||
from _psyco import setfilter
|
||||
from _psyco import compact, compacttype
|
||||
42
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/classes.py
Normal file
42
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/classes.py
Normal file
@@ -0,0 +1,42 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco class support module.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco class support module.
|
||||
|
||||
'psyco.classes.psyobj' is an alternate Psyco-optimized root for classes.
|
||||
Any class inheriting from it or using the metaclass '__metaclass__' might
|
||||
get optimized specifically for Psyco. It is equivalent to call
|
||||
psyco.bind() on the class object after its creation.
|
||||
|
||||
Importing everything from psyco.classes in a module will import the
|
||||
'__metaclass__' name, so all classes defined after a
|
||||
|
||||
from psyco.classes import *
|
||||
|
||||
will automatically use the Psyco-optimized metaclass.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
__all__ = ['psyobj', 'psymetaclass', '__metaclass__']
|
||||
|
||||
|
||||
from _psyco import compacttype
|
||||
import core
|
||||
from types import FunctionType
|
||||
|
||||
class psymetaclass(compacttype):
|
||||
"Psyco-optimized meta-class. Turns all methods into Psyco proxies."
|
||||
|
||||
def __new__(cls, name, bases, dict):
|
||||
bindlist = dict.get('__psyco__bind__')
|
||||
if bindlist is None:
|
||||
bindlist = [key for key, value in dict.items()
|
||||
if isinstance(value, FunctionType)]
|
||||
for attr in bindlist:
|
||||
dict[attr] = core.proxy(dict[attr])
|
||||
return super(psymetaclass, cls).__new__(cls, name, bases, dict)
|
||||
|
||||
psyobj = psymetaclass("psyobj", (), {})
|
||||
__metaclass__ = psymetaclass
|
||||
231
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/core.py
Normal file
231
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/core.py
Normal file
@@ -0,0 +1,231 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco main functions.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco main functions.
|
||||
|
||||
Here are the routines that you can use from your applications.
|
||||
These are mostly interfaces to the C core, but they depend on
|
||||
the Python version.
|
||||
|
||||
You can use these functions from the 'psyco' module instead of
|
||||
'psyco.core', e.g.
|
||||
|
||||
import psyco
|
||||
psyco.log('/tmp/psyco.log')
|
||||
psyco.profile()
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import _psyco
|
||||
import types
|
||||
from support import *
|
||||
|
||||
newfunction = types.FunctionType
|
||||
newinstancemethod = types.MethodType
|
||||
|
||||
|
||||
# Default charge profiler values
|
||||
default_watermark = 0.09 # between 0.0 (0%) and 1.0 (100%)
|
||||
default_halflife = 0.5 # seconds
|
||||
default_pollfreq_profile = 20 # Hz
|
||||
default_pollfreq_background = 100 # Hz -- a maximum for sleep's resolution
|
||||
default_parentframe = 0.25 # should not be more than 0.5 (50%)
|
||||
|
||||
|
||||
def full(memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Compile as much as possible.
|
||||
|
||||
Typical use is for small scripts performing intensive computations
|
||||
or string handling."""
|
||||
import profiler
|
||||
p = profiler.FullCompiler()
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def profile(watermark = default_watermark,
|
||||
halflife = default_halflife,
|
||||
pollfreq = default_pollfreq_profile,
|
||||
parentframe = default_parentframe,
|
||||
memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Turn on profiling.
|
||||
|
||||
The 'watermark' parameter controls how easily running functions will
|
||||
be compiled. The smaller the value, the more functions are compiled."""
|
||||
import profiler
|
||||
p = profiler.ActivePassiveProfiler(watermark, halflife,
|
||||
pollfreq, parentframe)
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def background(watermark = default_watermark,
|
||||
halflife = default_halflife,
|
||||
pollfreq = default_pollfreq_background,
|
||||
parentframe = default_parentframe,
|
||||
memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Turn on passive profiling.
|
||||
|
||||
This is a very lightweight mode in which only intensively computing
|
||||
functions can be detected. The smaller the 'watermark', the more functions
|
||||
are compiled."""
|
||||
import profiler
|
||||
p = profiler.PassiveProfiler(watermark, halflife, pollfreq, parentframe)
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def runonly(memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Nonprofiler.
|
||||
|
||||
XXX check if this is useful and document."""
|
||||
import profiler
|
||||
p = profiler.RunOnly()
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def stop():
|
||||
"""Turn off all automatic compilation. bind() calls remain in effect."""
|
||||
import profiler
|
||||
profiler.go([])
|
||||
|
||||
|
||||
def log(logfile='', mode='w', top=10):
|
||||
"""Enable logging to the given file.
|
||||
|
||||
If the file name is unspecified, a default name is built by appending
|
||||
a 'log-psyco' extension to the main script name.
|
||||
|
||||
Mode is 'a' to append to a possibly existing file or 'w' to overwrite
|
||||
an existing file. Note that the log file may grow quickly in 'a' mode."""
|
||||
import profiler, logger
|
||||
if not logfile:
|
||||
import os
|
||||
logfile, dummy = os.path.splitext(sys.argv[0])
|
||||
if os.path.basename(logfile):
|
||||
logfile += '.'
|
||||
logfile += 'log-psyco'
|
||||
if hasattr(_psyco, 'VERBOSE_LEVEL'):
|
||||
print >> sys.stderr, 'psyco: logging to', logfile
|
||||
# logger.current should be a real file object; subtle problems
|
||||
# will show up if its write() and flush() methods are written
|
||||
# in Python, as Psyco will invoke them while compiling.
|
||||
logger.current = open(logfile, mode)
|
||||
logger.print_charges = top
|
||||
profiler.logger = logger
|
||||
logger.writedate('Logging started')
|
||||
cannotcompile(logger.psycowrite)
|
||||
_psyco.statwrite(logger=logger.psycowrite)
|
||||
|
||||
|
||||
def bind(x, rec=None):
|
||||
"""Enable compilation of the given function, method, or class object.
|
||||
|
||||
If C is a class (or anything with a '__dict__' attribute), bind(C) will
|
||||
rebind all functions and methods found in C.__dict__ (which means, for
|
||||
classes, all methods defined in the class but not in its parents).
|
||||
|
||||
The optional second argument specifies the number of recursive
|
||||
compilation levels: all functions called by func are compiled
|
||||
up to the given depth of indirection."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
if rec is None:
|
||||
x.func_code = _psyco.proxycode(x)
|
||||
else:
|
||||
x.func_code = _psyco.proxycode(x, rec)
|
||||
return
|
||||
if hasattr(x, '__dict__'):
|
||||
funcs = [o for o in x.__dict__.values()
|
||||
if isinstance(o, types.MethodType)
|
||||
or isinstance(o, types.FunctionType)]
|
||||
if not funcs:
|
||||
raise error, ("nothing bindable found in %s object" %
|
||||
type(x).__name__)
|
||||
for o in funcs:
|
||||
bind(o, rec)
|
||||
return
|
||||
raise TypeError, "cannot bind %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def unbind(x):
|
||||
"""Reverse of bind()."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
try:
|
||||
f = _psyco.unproxycode(x.func_code)
|
||||
except error:
|
||||
pass
|
||||
else:
|
||||
x.func_code = f.func_code
|
||||
return
|
||||
if hasattr(x, '__dict__'):
|
||||
for o in x.__dict__.values():
|
||||
if (isinstance(o, types.MethodType)
|
||||
or isinstance(o, types.FunctionType)):
|
||||
unbind(o)
|
||||
return
|
||||
raise TypeError, "cannot unbind %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def proxy(x, rec=None):
|
||||
"""Return a Psyco-enabled copy of the function.
|
||||
|
||||
The original function is still available for non-compiled calls.
|
||||
The optional second argument specifies the number of recursive
|
||||
compilation levels: all functions called by func are compiled
|
||||
up to the given depth of indirection."""
|
||||
if isinstance(x, types.FunctionType):
|
||||
if rec is None:
|
||||
code = _psyco.proxycode(x)
|
||||
else:
|
||||
code = _psyco.proxycode(x, rec)
|
||||
return newfunction(code, x.func_globals, x.func_name)
|
||||
if isinstance(x, types.MethodType):
|
||||
p = proxy(x.im_func, rec)
|
||||
return newinstancemethod(p, x.im_self, x.im_class)
|
||||
raise TypeError, "cannot proxy %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def unproxy(proxy):
|
||||
"""Return a new copy of the original function of method behind a proxy.
|
||||
The result behaves like the original function in that calling it
|
||||
does not trigger compilation nor execution of any compiled code."""
|
||||
if isinstance(proxy, types.FunctionType):
|
||||
return _psyco.unproxycode(proxy.func_code)
|
||||
if isinstance(proxy, types.MethodType):
|
||||
f = unproxy(proxy.im_func)
|
||||
return newinstancemethod(f, proxy.im_self, proxy.im_class)
|
||||
raise TypeError, "%s objects cannot be proxies" % type(proxy).__name__
|
||||
|
||||
|
||||
def cannotcompile(x):
|
||||
"""Instruct Psyco never to compile the given function, method
|
||||
or code object."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
x = x.func_code
|
||||
if isinstance(x, types.CodeType):
|
||||
_psyco.cannotcompile(x)
|
||||
else:
|
||||
raise TypeError, "unexpected %s object" % type(x).__name__
|
||||
|
||||
|
||||
def dumpcodebuf():
|
||||
"""Write in file psyco.dump a copy of the emitted machine code,
|
||||
provided Psyco was compiled with a non-zero CODE_DUMP.
|
||||
See py-utils/httpxam.py to examine psyco.dump."""
|
||||
if hasattr(_psyco, 'dumpcodebuf'):
|
||||
_psyco.dumpcodebuf()
|
||||
|
||||
|
||||
###########################################################################
|
||||
# Psyco variables
|
||||
# error * the error raised by Psyco
|
||||
# warning * the warning raised by Psyco
|
||||
# __in_psyco__ * a new built-in variable which is always zero, but which
|
||||
# Psyco special-cases by returning 1 instead. So
|
||||
# __in_psyco__ can be used in a function to know if
|
||||
# that function is being executed by Psyco or not.
|
||||
133
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/kdictproxy.py
Normal file
133
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/kdictproxy.py
Normal file
@@ -0,0 +1,133 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Support code for the 'psyco.compact' type.
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
try:
|
||||
from UserDict import DictMixin
|
||||
except ImportError:
|
||||
|
||||
# backported from Python 2.3 to Python 2.2
|
||||
class DictMixin:
|
||||
# Mixin defining all dictionary methods for classes that already have
|
||||
# a minimum dictionary interface including getitem, setitem, delitem,
|
||||
# and keys. Without knowledge of the subclass constructor, the mixin
|
||||
# does not define __init__() or copy(). In addition to the four base
|
||||
# methods, progressively more efficiency comes with defining
|
||||
# __contains__(), __iter__(), and iteritems().
|
||||
|
||||
# second level definitions support higher levels
|
||||
def __iter__(self):
|
||||
for k in self.keys():
|
||||
yield k
|
||||
def has_key(self, key):
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
return False
|
||||
return True
|
||||
def __contains__(self, key):
|
||||
return self.has_key(key)
|
||||
|
||||
# third level takes advantage of second level definitions
|
||||
def iteritems(self):
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
def iterkeys(self):
|
||||
return self.__iter__()
|
||||
|
||||
# fourth level uses definitions from lower levels
|
||||
def itervalues(self):
|
||||
for _, v in self.iteritems():
|
||||
yield v
|
||||
def values(self):
|
||||
return [v for _, v in self.iteritems()]
|
||||
def items(self):
|
||||
return list(self.iteritems())
|
||||
def clear(self):
|
||||
for key in self.keys():
|
||||
del self[key]
|
||||
def setdefault(self, key, default):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = default
|
||||
return default
|
||||
def pop(self, key, *args):
|
||||
if len(args) > 1:
|
||||
raise TypeError, "pop expected at most 2 arguments, got "\
|
||||
+ repr(1 + len(args))
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
if args:
|
||||
return args[0]
|
||||
raise
|
||||
del self[key]
|
||||
return value
|
||||
def popitem(self):
|
||||
try:
|
||||
k, v = self.iteritems().next()
|
||||
except StopIteration:
|
||||
raise KeyError, 'container is empty'
|
||||
del self[k]
|
||||
return (k, v)
|
||||
def update(self, other):
|
||||
# Make progressively weaker assumptions about "other"
|
||||
if hasattr(other, 'iteritems'): # iteritems saves memory and lookups
|
||||
for k, v in other.iteritems():
|
||||
self[k] = v
|
||||
elif hasattr(other, '__iter__'): # iter saves memory
|
||||
for k in other:
|
||||
self[k] = other[k]
|
||||
else:
|
||||
for k in other.keys():
|
||||
self[k] = other[k]
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
def __repr__(self):
|
||||
return repr(dict(self.iteritems()))
|
||||
def __cmp__(self, other):
|
||||
if other is None:
|
||||
return 1
|
||||
if isinstance(other, DictMixin):
|
||||
other = dict(other.iteritems())
|
||||
return cmp(dict(self.iteritems()), other)
|
||||
def __len__(self):
|
||||
return len(self.keys())
|
||||
|
||||
###########################################################################
|
||||
|
||||
from _psyco import compact
|
||||
|
||||
|
||||
class compactdictproxy(DictMixin):
|
||||
|
||||
def __init__(self, ko):
|
||||
self._ko = ko # compact object of which 'self' is the dict
|
||||
|
||||
def __getitem__(self, key):
|
||||
return compact.__getslot__(self._ko, key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
compact.__setslot__(self._ko, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
compact.__delslot__(self._ko, key)
|
||||
|
||||
def keys(self):
|
||||
return compact.__members__.__get__(self._ko)
|
||||
|
||||
def clear(self):
|
||||
keys = self.keys()
|
||||
keys.reverse()
|
||||
for key in keys:
|
||||
del self[key]
|
||||
|
||||
def __repr__(self):
|
||||
keys = ', '.join(self.keys())
|
||||
return '<compactdictproxy object {%s}>' % (keys,)
|
||||
96
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/logger.py
Normal file
96
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/logger.py
Normal file
@@ -0,0 +1,96 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco logger.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco logger.
|
||||
|
||||
See log() in core.py.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
|
||||
import _psyco
|
||||
from time import time, localtime, strftime
|
||||
|
||||
|
||||
current = None
|
||||
print_charges = 10
|
||||
dump_delay = 0.2
|
||||
dump_last = 0.0
|
||||
|
||||
def write(s, level):
|
||||
t = time()
|
||||
f = t-int(t)
|
||||
try:
|
||||
current.write("%s.%02d %-*s%s\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0), 63-level, s,
|
||||
"%"*level))
|
||||
current.flush()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
def psycowrite(s):
|
||||
t = time()
|
||||
f = t-int(t)
|
||||
try:
|
||||
current.write("%s.%02d %-*s%s\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0), 60, s.strip(),
|
||||
"% %"))
|
||||
current.flush()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
##def writelines(lines, level=0):
|
||||
## if lines:
|
||||
## t = time()
|
||||
## f = t-int(t)
|
||||
## timedesc = strftime("%x %X", localtime(int(t)))
|
||||
## print >> current, "%s.%03d %-*s %s" % (
|
||||
## timedesc, int(f*1000),
|
||||
## 50-level, lines[0],
|
||||
## "+"*level)
|
||||
## timedesc = " " * (len(timedesc)+5)
|
||||
## for line in lines[1:]:
|
||||
## print >> current, timedesc, line
|
||||
|
||||
def writememory():
|
||||
write("memory usage: %d+ kb" % _psyco.memory(), 1)
|
||||
|
||||
def dumpcharges():
|
||||
global dump_last
|
||||
if print_charges:
|
||||
t = time()
|
||||
if not (dump_last <= t < dump_last+dump_delay):
|
||||
if t <= dump_last+1.5*dump_delay:
|
||||
dump_last += dump_delay
|
||||
else:
|
||||
dump_last = t
|
||||
#write("%s: charges:" % who, 0)
|
||||
lst = _psyco.stattop(print_charges)
|
||||
if lst:
|
||||
f = t-int(t)
|
||||
lines = ["%s.%02d ______\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0))]
|
||||
i = 1
|
||||
for co, charge in lst:
|
||||
detail = co.co_filename
|
||||
if len(detail) > 19:
|
||||
detail = '...' + detail[-17:]
|
||||
lines.append(" #%-3d |%4.1f %%| %-26s%20s:%d\n" %
|
||||
(i, charge*100.0, co.co_name, detail,
|
||||
co.co_firstlineno))
|
||||
i += 1
|
||||
current.writelines(lines)
|
||||
current.flush()
|
||||
|
||||
def writefinalstats():
|
||||
dumpcharges()
|
||||
writememory()
|
||||
writedate("program exit")
|
||||
|
||||
def writedate(msg):
|
||||
write('%s, %s' % (msg, strftime("%x")), 20)
|
||||
379
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/profiler.py
Normal file
379
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/profiler.py
Normal file
@@ -0,0 +1,379 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco profiler (Python part).
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco profiler (Python part).
|
||||
|
||||
The implementation of the non-time-critical parts of the profiler.
|
||||
See profile() and full() in core.py for the easy interface.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import _psyco
|
||||
from support import *
|
||||
import math, time, types, atexit
|
||||
now = time.time
|
||||
try:
|
||||
import thread
|
||||
except ImportError:
|
||||
import dummy_thread as thread
|
||||
|
||||
|
||||
# current profiler instance
|
||||
current = None
|
||||
|
||||
# enabled profilers, in order of priority
|
||||
profilers = []
|
||||
|
||||
# logger module (when enabled by core.log())
|
||||
logger = None
|
||||
|
||||
# a lock for a thread-safe go()
|
||||
go_lock = thread.allocate_lock()
|
||||
|
||||
def go(stop=0):
|
||||
# run the highest-priority profiler in 'profilers'
|
||||
global current
|
||||
go_lock.acquire()
|
||||
try:
|
||||
prev = current
|
||||
if stop:
|
||||
del profilers[:]
|
||||
if prev:
|
||||
if profilers and profilers[0] is prev:
|
||||
return # best profiler already running
|
||||
prev.stop()
|
||||
current = None
|
||||
for p in profilers[:]:
|
||||
if p.start():
|
||||
current = p
|
||||
if logger: # and p is not prev:
|
||||
logger.write("%s: starting" % p.__class__.__name__, 5)
|
||||
return
|
||||
finally:
|
||||
go_lock.release()
|
||||
# no profiler is running now
|
||||
if stop:
|
||||
if logger:
|
||||
logger.writefinalstats()
|
||||
else:
|
||||
tag2bind()
|
||||
|
||||
atexit.register(go, 1)
|
||||
|
||||
|
||||
def buildfncache(globals, cache):
|
||||
if hasattr(types.IntType, '__dict__'):
|
||||
clstypes = (types.ClassType, types.TypeType)
|
||||
else:
|
||||
clstypes = types.ClassType
|
||||
for x in globals.values():
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
cache[x.func_code] = x, ''
|
||||
elif isinstance(x, clstypes):
|
||||
for y in x.__dict__.values():
|
||||
if isinstance(y, types.MethodType):
|
||||
y = y.im_func
|
||||
if isinstance(y, types.FunctionType):
|
||||
cache[y.func_code] = y, x.__name__
|
||||
|
||||
# code-to-function mapping (cache)
|
||||
function_cache = {}
|
||||
|
||||
def trytobind(co, globals, log=1):
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
buildfncache(globals, function_cache)
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
if logger:
|
||||
logger.write('warning: cannot find function %s in %s' %
|
||||
(co.co_name, globals.get('__name__', '?')), 3)
|
||||
return # give up
|
||||
if logger and log:
|
||||
modulename = globals.get('__name__', '?')
|
||||
if clsname:
|
||||
modulename += '.' + clsname
|
||||
logger.write('bind function: %s.%s' % (modulename, co.co_name), 1)
|
||||
f.func_code = _psyco.proxycode(f)
|
||||
|
||||
|
||||
# the list of code objects that have been tagged
|
||||
tagged_codes = []
|
||||
|
||||
def tag(co, globals):
|
||||
if logger:
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
buildfncache(globals, function_cache)
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
clsname = '' # give up
|
||||
modulename = globals.get('__name__', '?')
|
||||
if clsname:
|
||||
modulename += '.' + clsname
|
||||
logger.write('tag function: %s.%s' % (modulename, co.co_name), 1)
|
||||
tagged_codes.append((co, globals))
|
||||
_psyco.turbo_frame(co)
|
||||
_psyco.turbo_code(co)
|
||||
|
||||
def tag2bind():
|
||||
if tagged_codes:
|
||||
if logger:
|
||||
logger.write('profiling stopped, binding %d functions' %
|
||||
len(tagged_codes), 2)
|
||||
for co, globals in tagged_codes:
|
||||
trytobind(co, globals, 0)
|
||||
function_cache.clear()
|
||||
del tagged_codes[:]
|
||||
|
||||
|
||||
class Profiler:
|
||||
MemoryTimerResolution = 0.103
|
||||
|
||||
def run(self, memory, time, memorymax, timemax):
|
||||
self.memory = memory
|
||||
self.memorymax = memorymax
|
||||
self.time = time
|
||||
if timemax is None:
|
||||
self.endtime = None
|
||||
else:
|
||||
self.endtime = now() + timemax
|
||||
self.alarms = []
|
||||
profilers.append(self)
|
||||
go()
|
||||
|
||||
def start(self):
|
||||
curmem = _psyco.memory()
|
||||
memlimits = []
|
||||
if self.memorymax is not None:
|
||||
if curmem >= self.memorymax:
|
||||
if logger:
|
||||
logger.writememory()
|
||||
return self.limitreached('memorymax')
|
||||
memlimits.append(self.memorymax)
|
||||
if self.memory is not None:
|
||||
if self.memory <= 0:
|
||||
if logger:
|
||||
logger.writememory()
|
||||
return self.limitreached('memory')
|
||||
memlimits.append(curmem + self.memory)
|
||||
self.memory_at_start = curmem
|
||||
|
||||
curtime = now()
|
||||
timelimits = []
|
||||
if self.endtime is not None:
|
||||
if curtime >= self.endtime:
|
||||
return self.limitreached('timemax')
|
||||
timelimits.append(self.endtime - curtime)
|
||||
if self.time is not None:
|
||||
if self.time <= 0.0:
|
||||
return self.limitreached('time')
|
||||
timelimits.append(self.time)
|
||||
self.time_at_start = curtime
|
||||
|
||||
try:
|
||||
self.do_start()
|
||||
except error, e:
|
||||
if logger:
|
||||
logger.write('%s: disabled by psyco.error:' % (
|
||||
self.__class__.__name__), 4)
|
||||
logger.write(' %s' % str(e), 3)
|
||||
return 0
|
||||
|
||||
if memlimits:
|
||||
self.memlimits_args = (time.sleep, (self.MemoryTimerResolution,),
|
||||
self.check_memory, (min(memlimits),))
|
||||
self.alarms.append(_psyco.alarm(*self.memlimits_args))
|
||||
if timelimits:
|
||||
self.alarms.append(_psyco.alarm(time.sleep, (min(timelimits),),
|
||||
self.time_out))
|
||||
return 1
|
||||
|
||||
def stop(self):
|
||||
for alarm in self.alarms:
|
||||
alarm.stop(0)
|
||||
for alarm in self.alarms:
|
||||
alarm.stop(1) # wait for parallel threads to stop
|
||||
del self.alarms[:]
|
||||
if self.time is not None:
|
||||
self.time -= now() - self.time_at_start
|
||||
if self.memory is not None:
|
||||
self.memory -= _psyco.memory() - self.memory_at_start
|
||||
|
||||
try:
|
||||
self.do_stop()
|
||||
except error:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def check_memory(self, limit):
|
||||
if _psyco.memory() < limit:
|
||||
return self.memlimits_args
|
||||
go()
|
||||
|
||||
def time_out(self):
|
||||
self.time = 0.0
|
||||
go()
|
||||
|
||||
def limitreached(self, limitname):
|
||||
try:
|
||||
profilers.remove(self)
|
||||
except ValueError:
|
||||
pass
|
||||
if logger:
|
||||
logger.write('%s: disabled (%s limit reached)' % (
|
||||
self.__class__.__name__, limitname), 4)
|
||||
return 0
|
||||
|
||||
|
||||
class FullCompiler(Profiler):
|
||||
|
||||
def do_start(self):
|
||||
_psyco.profiling('f')
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
|
||||
|
||||
class RunOnly(Profiler):
|
||||
|
||||
def do_start(self):
|
||||
_psyco.profiling('n')
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
|
||||
|
||||
class ChargeProfiler(Profiler):
|
||||
|
||||
def __init__(self, watermark, parentframe):
|
||||
self.watermark = watermark
|
||||
self.parent2 = parentframe * 2.0
|
||||
self.lock = thread.allocate_lock()
|
||||
|
||||
def init_charges(self):
|
||||
_psyco.statwrite(watermark = self.watermark,
|
||||
parent2 = self.parent2)
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
_psyco.statwrite(callback = None)
|
||||
|
||||
|
||||
class ActiveProfiler(ChargeProfiler):
|
||||
|
||||
def active_start(self):
|
||||
_psyco.profiling('p')
|
||||
|
||||
def do_start(self):
|
||||
self.init_charges()
|
||||
self.active_start()
|
||||
_psyco.statwrite(callback = self.charge_callback)
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
tag(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
class PassiveProfiler(ChargeProfiler):
|
||||
|
||||
initial_charge_unit = _psyco.statread('unit')
|
||||
reset_stats_after = 120 # half-lives (maximum 200!)
|
||||
reset_limit = initial_charge_unit * (2.0 ** reset_stats_after)
|
||||
|
||||
def __init__(self, watermark, halflife, pollfreq, parentframe):
|
||||
ChargeProfiler.__init__(self, watermark, parentframe)
|
||||
self.pollfreq = pollfreq
|
||||
# self.progress is slightly more than 1.0, and computed so that
|
||||
# do_profile() will double the change_unit every 'halflife' seconds.
|
||||
self.progress = 2.0 ** (1.0 / (halflife * pollfreq))
|
||||
|
||||
def reset(self):
|
||||
_psyco.statwrite(unit = self.initial_charge_unit, callback = None)
|
||||
_psyco.statreset()
|
||||
if logger:
|
||||
logger.write("%s: resetting stats" % self.__class__.__name__, 1)
|
||||
|
||||
def passive_start(self):
|
||||
self.passivealarm_args = (time.sleep, (1.0 / self.pollfreq,),
|
||||
self.do_profile)
|
||||
self.alarms.append(_psyco.alarm(*self.passivealarm_args))
|
||||
|
||||
def do_start(self):
|
||||
tag2bind()
|
||||
self.init_charges()
|
||||
self.passive_start()
|
||||
|
||||
def do_profile(self):
|
||||
_psyco.statcollect()
|
||||
if logger:
|
||||
logger.dumpcharges()
|
||||
nunit = _psyco.statread('unit') * self.progress
|
||||
if nunit > self.reset_limit:
|
||||
self.reset()
|
||||
else:
|
||||
_psyco.statwrite(unit = nunit, callback = self.charge_callback)
|
||||
return self.passivealarm_args
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
trytobind(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
class ActivePassiveProfiler(PassiveProfiler, ActiveProfiler):
|
||||
|
||||
def do_start(self):
|
||||
self.init_charges()
|
||||
self.active_start()
|
||||
self.passive_start()
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
tag(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
|
||||
#
|
||||
# we register our own version of sys.settrace(), sys.setprofile()
|
||||
# and thread.start_new_thread().
|
||||
#
|
||||
|
||||
def psyco_settrace(*args, **kw):
|
||||
"This is the Psyco-aware version of sys.settrace()."
|
||||
result = original_settrace(*args, **kw)
|
||||
go()
|
||||
return result
|
||||
|
||||
def psyco_setprofile(*args, **kw):
|
||||
"This is the Psyco-aware version of sys.setprofile()."
|
||||
result = original_setprofile(*args, **kw)
|
||||
go()
|
||||
return result
|
||||
|
||||
def psyco_thread_stub(callable, args, kw):
|
||||
_psyco.statcollect()
|
||||
if kw is None:
|
||||
return callable(*args)
|
||||
else:
|
||||
return callable(*args, **kw)
|
||||
|
||||
def psyco_start_new_thread(callable, args, kw=None):
|
||||
"This is the Psyco-aware version of thread.start_new_thread()."
|
||||
return original_start_new_thread(psyco_thread_stub, (callable, args, kw))
|
||||
|
||||
original_settrace = sys.settrace
|
||||
original_setprofile = sys.setprofile
|
||||
original_start_new_thread = thread.start_new_thread
|
||||
sys.settrace = psyco_settrace
|
||||
sys.setprofile = psyco_setprofile
|
||||
thread.start_new_thread = psyco_start_new_thread
|
||||
# hack to patch threading._start_new_thread if the module is
|
||||
# already loaded
|
||||
if ('threading' in sys.modules and
|
||||
hasattr(sys.modules['threading'], '_start_new_thread')):
|
||||
sys.modules['threading']._start_new_thread = psyco_start_new_thread
|
||||
191
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/support.py
Normal file
191
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/support.py
Normal file
@@ -0,0 +1,191 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco general support module.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco general support module.
|
||||
|
||||
For internal use.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import sys, _psyco, __builtin__
|
||||
|
||||
error = _psyco.error
|
||||
class warning(Warning):
|
||||
pass
|
||||
|
||||
_psyco.NoLocalsWarning = warning
|
||||
|
||||
def warn(msg):
|
||||
from warnings import warn
|
||||
warn(msg, warning, stacklevel=2)
|
||||
|
||||
#
|
||||
# Version checks
|
||||
#
|
||||
__version__ = 0x010600f0
|
||||
if _psyco.PSYVER != __version__:
|
||||
raise error, "version mismatch between Psyco parts, reinstall it"
|
||||
|
||||
version_info = (__version__ >> 24,
|
||||
(__version__ >> 16) & 0xff,
|
||||
(__version__ >> 8) & 0xff,
|
||||
{0xa0: 'alpha',
|
||||
0xb0: 'beta',
|
||||
0xc0: 'candidate',
|
||||
0xf0: 'final'}[__version__ & 0xf0],
|
||||
__version__ & 0xf)
|
||||
|
||||
|
||||
VERSION_LIMITS = [0x02020200, # 2.2.2
|
||||
0x02030000, # 2.3
|
||||
0x02040000] # 2.4
|
||||
|
||||
if ([v for v in VERSION_LIMITS if v <= sys.hexversion] !=
|
||||
[v for v in VERSION_LIMITS if v <= _psyco.PYVER ]):
|
||||
if sys.hexversion < VERSION_LIMITS[0]:
|
||||
warn("Psyco requires Python version 2.2.2 or later")
|
||||
else:
|
||||
warn("Psyco version does not match Python version. "
|
||||
"Psyco must be updated or recompiled")
|
||||
|
||||
|
||||
if hasattr(_psyco, 'ALL_CHECKS') and hasattr(_psyco, 'VERBOSE_LEVEL'):
|
||||
print >> sys.stderr, ('psyco: running in debugging mode on %s' %
|
||||
_psyco.PROCESSOR)
|
||||
|
||||
|
||||
###########################################################################
|
||||
# sys._getframe() gives strange results on a mixed Psyco- and Python-style
|
||||
# stack frame. Psyco provides a replacement that partially emulates Python
|
||||
# frames from Psyco frames. The new sys._getframe() may return objects of
|
||||
# a custom "Psyco frame" type, which is a subtype of the normal frame type.
|
||||
#
|
||||
# The same problems require some other built-in functions to be replaced
|
||||
# as well. Note that the local variables are not available in any
|
||||
# dictionary with Psyco.
|
||||
|
||||
|
||||
class Frame:
|
||||
pass
|
||||
|
||||
|
||||
class PythonFrame(Frame):
|
||||
|
||||
def __init__(self, frame):
|
||||
self.__dict__.update({
|
||||
'_frame': frame,
|
||||
})
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'f_back':
|
||||
try:
|
||||
result = embedframe(_psyco.getframe(self._frame))
|
||||
except ValueError:
|
||||
result = None
|
||||
except error:
|
||||
warn("f_back is skipping dead Psyco frames")
|
||||
result = self._frame.f_back
|
||||
self.__dict__['f_back'] = result
|
||||
return result
|
||||
else:
|
||||
return getattr(self._frame, attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
setattr(self._frame, attr, value)
|
||||
|
||||
def __delattr__(self, attr):
|
||||
delattr(self._frame, attr)
|
||||
|
||||
|
||||
class PsycoFrame(Frame):
|
||||
|
||||
def __init__(self, tag):
|
||||
self.__dict__.update({
|
||||
'_tag' : tag,
|
||||
'f_code' : tag[0],
|
||||
'f_globals': tag[1],
|
||||
})
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'f_back':
|
||||
try:
|
||||
result = embedframe(_psyco.getframe(self._tag))
|
||||
except ValueError:
|
||||
result = None
|
||||
elif attr == 'f_lineno':
|
||||
result = self.f_code.co_firstlineno # better than nothing
|
||||
elif attr == 'f_builtins':
|
||||
result = self.f_globals['__builtins__']
|
||||
elif attr == 'f_restricted':
|
||||
result = self.f_builtins is not __builtins__
|
||||
elif attr == 'f_locals':
|
||||
raise AttributeError, ("local variables of functions run by Psyco "
|
||||
"cannot be accessed in any way, sorry")
|
||||
else:
|
||||
raise AttributeError, ("emulated Psyco frames have "
|
||||
"no '%s' attribute" % attr)
|
||||
self.__dict__[attr] = result
|
||||
return result
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
raise AttributeError, "Psyco frame objects are read-only"
|
||||
|
||||
def __delattr__(self, attr):
|
||||
if attr == 'f_trace':
|
||||
# for bdb which relies on CPython frames exhibiting a slightly
|
||||
# buggy behavior: you can 'del f.f_trace' as often as you like
|
||||
# even without having set it previously.
|
||||
return
|
||||
raise AttributeError, "Psyco frame objects are read-only"
|
||||
|
||||
|
||||
def embedframe(result):
|
||||
if type(result) is type(()):
|
||||
return PsycoFrame(result)
|
||||
else:
|
||||
return PythonFrame(result)
|
||||
|
||||
def _getframe(depth=0):
|
||||
"""Return a frame object from the call stack. This is a replacement for
|
||||
sys._getframe() which is aware of Psyco frames.
|
||||
|
||||
The returned objects are instances of either PythonFrame or PsycoFrame
|
||||
instead of being real Python-level frame object, so that they can emulate
|
||||
the common attributes of frame objects.
|
||||
|
||||
The original sys._getframe() ignoring Psyco frames altogether is stored in
|
||||
psyco._getrealframe(). See also psyco._getemulframe()."""
|
||||
# 'depth+1' to account for this _getframe() Python function
|
||||
return embedframe(_psyco.getframe(depth+1))
|
||||
|
||||
def _getemulframe(depth=0):
|
||||
"""As _getframe(), but the returned objects are real Python frame objects
|
||||
emulating Psyco frames. Some of their attributes can be wrong or missing,
|
||||
however."""
|
||||
# 'depth+1' to account for this _getemulframe() Python function
|
||||
return _psyco.getframe(depth+1, 1)
|
||||
|
||||
def patch(name, module=__builtin__):
|
||||
f = getattr(_psyco, name)
|
||||
org = getattr(module, name)
|
||||
if org is not f:
|
||||
setattr(module, name, f)
|
||||
setattr(_psyco, 'original_' + name, org)
|
||||
|
||||
_getrealframe = sys._getframe
|
||||
sys._getframe = _getframe
|
||||
patch('globals')
|
||||
patch('eval')
|
||||
patch('execfile')
|
||||
patch('locals')
|
||||
patch('vars')
|
||||
patch('dir')
|
||||
patch('input')
|
||||
_psyco.original_raw_input = raw_input
|
||||
__builtin__.__in_psyco__ = 0==1 # False
|
||||
|
||||
if hasattr(_psyco, 'compact'):
|
||||
import kdictproxy
|
||||
_psyco.compactdictproxy = kdictproxy.compactdictproxy
|
||||
Reference in New Issue
Block a user