tools v2.1
combined kindle/mobi plugin
This commit is contained in:
Binary file not shown.
@@ -1,682 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# This is a WINDOWS python script. You need a Python interpreter to run it.
|
||||
# For example, ActiveState Python, which exists for windows.
|
||||
#
|
||||
# It can run standalone to convert K4PC files, or it can be installed as a
|
||||
# plugin for Calibre (http://calibre-ebook.com/about) so that importing
|
||||
# K4PC files with DRM is no londer a multi-step process.
|
||||
#
|
||||
# ***NOTE*** Calibre and K4PC must be installed on the same windows machine
|
||||
# for the plugin version to function properly.
|
||||
#
|
||||
# To create a Calibre plugin, rename this file so that the filename
|
||||
# ends in '_plugin.py', put it into a ZIP file and import that ZIP into Calibre
|
||||
# using its plugin configuration GUI.
|
||||
#
|
||||
# Thanks to The Dark Reverser for MobiDeDrm and CMBDTC for cmbdtc_dump from
|
||||
# which this script steals most unashamedly.
|
||||
#
|
||||
# Changelog
|
||||
# 0.01 - Initial version - Utilizes skindle and CMBDTC method of obtaining
|
||||
# book specific pids from K4PC books. If Calibre and K4PC are installed
|
||||
# on the same windows machine, Calibre plugin functionality is once
|
||||
# again restored.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
Comprehensive Mazama Book DRM with Topaz Cryptography V2.0
|
||||
|
||||
-----BEGIN PUBLIC KEY-----
|
||||
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDdBHJ4CNc6DNFCw4MRCw4SWAK6
|
||||
M8hYfnNEI0yQmn5Ti+W8biT7EatpauE/5jgQMPBmdNrDr1hbHyHBSP7xeC2qlRWC
|
||||
B62UCxeu/fpfnvNHDN/wPWWH4jynZ2M6cdcnE5LQ+FfeKqZn7gnG2No1U9h7oOHx
|
||||
y2/pHuYme7U1TsgSjwIDAQAB
|
||||
-----END PUBLIC KEY-----
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
import csv
|
||||
import sys
|
||||
import os
|
||||
import getopt
|
||||
import zlib
|
||||
import binascii
|
||||
from struct import pack
|
||||
from struct import unpack
|
||||
from ctypes import windll, c_char_p, c_wchar_p, c_uint, POINTER, byref, \
|
||||
create_unicode_buffer, create_string_buffer, CFUNCTYPE, addressof, \
|
||||
string_at, Structure, c_void_p, cast
|
||||
import _winreg as winreg
|
||||
import traceback
|
||||
import hashlib
|
||||
|
||||
__version__ = '0.01'
|
||||
|
||||
global kindleDatabase
|
||||
MAX_PATH = 255
|
||||
kernel32 = windll.kernel32
|
||||
advapi32 = windll.advapi32
|
||||
crypt32 = windll.crypt32
|
||||
|
||||
|
||||
#
|
||||
# Various character maps used to decrypt books. Probably supposed to act as obfuscation
|
||||
#
|
||||
charMap1 = "n5Pr6St7Uv8Wx9YzAb0Cd1Ef2Gh3Jk4M"
|
||||
charMap2 = "AaZzB0bYyCc1XxDdW2wEeVv3FfUuG4g-TtHh5SsIiR6rJjQq7KkPpL8lOoMm9Nn_"
|
||||
charMap3 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
||||
charMap4 = "ABCDEFGHIJKLMNPQRSTUVWXYZ123456789"
|
||||
|
||||
|
||||
#
|
||||
# Exceptions for all the problems that might happen during the script
|
||||
#
|
||||
class DrmException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DataBlob(Structure):
|
||||
_fields_ = [('cbData', c_uint),
|
||||
('pbData', c_void_p)]
|
||||
DataBlob_p = POINTER(DataBlob)
|
||||
|
||||
|
||||
def GetSystemDirectory():
|
||||
GetSystemDirectoryW = kernel32.GetSystemDirectoryW
|
||||
GetSystemDirectoryW.argtypes = [c_wchar_p, c_uint]
|
||||
GetSystemDirectoryW.restype = c_uint
|
||||
def GetSystemDirectory():
|
||||
buffer = create_unicode_buffer(MAX_PATH + 1)
|
||||
GetSystemDirectoryW(buffer, len(buffer))
|
||||
return buffer.value
|
||||
return GetSystemDirectory
|
||||
GetSystemDirectory = GetSystemDirectory()
|
||||
|
||||
|
||||
def GetVolumeSerialNumber():
|
||||
GetVolumeInformationW = kernel32.GetVolumeInformationW
|
||||
GetVolumeInformationW.argtypes = [c_wchar_p, c_wchar_p, c_uint,
|
||||
POINTER(c_uint), POINTER(c_uint),
|
||||
POINTER(c_uint), c_wchar_p, c_uint]
|
||||
GetVolumeInformationW.restype = c_uint
|
||||
def GetVolumeSerialNumber(path):
|
||||
vsn = c_uint(0)
|
||||
GetVolumeInformationW(path, None, 0, byref(vsn), None, None, None, 0)
|
||||
return vsn.value
|
||||
return GetVolumeSerialNumber
|
||||
GetVolumeSerialNumber = GetVolumeSerialNumber()
|
||||
|
||||
|
||||
def GetUserName():
|
||||
GetUserNameW = advapi32.GetUserNameW
|
||||
GetUserNameW.argtypes = [c_wchar_p, POINTER(c_uint)]
|
||||
GetUserNameW.restype = c_uint
|
||||
def GetUserName():
|
||||
buffer = create_unicode_buffer(32)
|
||||
size = c_uint(len(buffer))
|
||||
while not GetUserNameW(buffer, byref(size)):
|
||||
buffer = create_unicode_buffer(len(buffer) * 2)
|
||||
size.value = len(buffer)
|
||||
return buffer.value.encode('utf-16-le')[::2]
|
||||
return GetUserName
|
||||
GetUserName = GetUserName()
|
||||
|
||||
|
||||
def CryptUnprotectData():
|
||||
_CryptUnprotectData = crypt32.CryptUnprotectData
|
||||
_CryptUnprotectData.argtypes = [DataBlob_p, c_wchar_p, DataBlob_p,
|
||||
c_void_p, c_void_p, c_uint, DataBlob_p]
|
||||
_CryptUnprotectData.restype = c_uint
|
||||
def CryptUnprotectData(indata, entropy):
|
||||
indatab = create_string_buffer(indata)
|
||||
indata = DataBlob(len(indata), cast(indatab, c_void_p))
|
||||
entropyb = create_string_buffer(entropy)
|
||||
entropy = DataBlob(len(entropy), cast(entropyb, c_void_p))
|
||||
outdata = DataBlob()
|
||||
if not _CryptUnprotectData(byref(indata), None, byref(entropy),
|
||||
None, None, 0, byref(outdata)):
|
||||
raise DrmException("Failed to Unprotect Data")
|
||||
return string_at(outdata.pbData, outdata.cbData)
|
||||
return CryptUnprotectData
|
||||
CryptUnprotectData = CryptUnprotectData()
|
||||
|
||||
|
||||
#
|
||||
# Returns the MD5 digest of "message"
|
||||
#
|
||||
def MD5(message):
|
||||
ctx = hashlib.md5()
|
||||
ctx.update(message)
|
||||
return ctx.digest()
|
||||
|
||||
|
||||
#
|
||||
# Returns the MD5 digest of "message"
|
||||
#
|
||||
def SHA1(message):
|
||||
ctx = hashlib.sha1()
|
||||
ctx.update(message)
|
||||
return ctx.digest()
|
||||
|
||||
|
||||
#
|
||||
# Locate and open the Kindle.info file.
|
||||
#
|
||||
def openKindleInfo():
|
||||
regkey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders\\")
|
||||
path = winreg.QueryValueEx(regkey, 'Local AppData')[0]
|
||||
return open(path+'\\Amazon\\Kindle For PC\\{AMAwzsaPaaZAzmZzZQzgZCAkZ3AjA_AY}\\kindle.info','r')
|
||||
|
||||
|
||||
#
|
||||
# Parse the Kindle.info file and return the records as a list of key-values
|
||||
#
|
||||
def parseKindleInfo():
|
||||
DB = {}
|
||||
infoReader = openKindleInfo()
|
||||
infoReader.read(1)
|
||||
data = infoReader.read()
|
||||
items = data.split('{')
|
||||
|
||||
for item in items:
|
||||
splito = item.split(':')
|
||||
DB[splito[0]] =splito[1]
|
||||
return DB
|
||||
|
||||
|
||||
#
|
||||
# Find if the original string for a hashed/encoded string is known. If so return the original string othwise return an empty string. (Totally not optimal)
|
||||
#
|
||||
def findNameForHash(hash):
|
||||
names = ["kindle.account.tokens","kindle.cookie.item","eulaVersionAccepted","login_date","kindle.token.item","login","kindle.key.item","kindle.name.info","kindle.device.info", "MazamaRandomNumber"]
|
||||
result = ""
|
||||
for name in names:
|
||||
if hash == encodeHash(name, charMap2):
|
||||
result = name
|
||||
break
|
||||
return name
|
||||
|
||||
|
||||
#
|
||||
# Print all the records from the kindle.info file.
|
||||
#
|
||||
def printKindleInfo():
|
||||
for record in kindleDatabase:
|
||||
name = findNameForHash(record)
|
||||
if name != "" :
|
||||
print (name)
|
||||
print ("--------------------------\n")
|
||||
else :
|
||||
print ("Unknown Record")
|
||||
print getKindleInfoValueForHash(record)
|
||||
print "\n"
|
||||
|
||||
|
||||
#
|
||||
# Get a record from the Kindle.info file for the key "hashedKey" (already hashed and encoded). Return the decoded and decrypted record
|
||||
#
|
||||
def getKindleInfoValueForHash(hashedKey):
|
||||
global kindleDatabase
|
||||
encryptedValue = decode(kindleDatabase[hashedKey],charMap2)
|
||||
return CryptUnprotectData(encryptedValue,"")
|
||||
|
||||
|
||||
#
|
||||
# Get a record from the Kindle.info file for the string in "key" (plaintext). Return the decoded and decrypted record
|
||||
#
|
||||
def getKindleInfoValueForKey(key):
|
||||
return getKindleInfoValueForHash(encodeHash(key,charMap2))
|
||||
|
||||
|
||||
#
|
||||
# 8 bits to six bits encoding from hash to generate PID string
|
||||
#
|
||||
def encodePID(hash):
|
||||
global charMap3
|
||||
PID = ""
|
||||
for position in range (0,8):
|
||||
PID += charMap3[getSixBitsFromBitField(hash,position)]
|
||||
return PID
|
||||
|
||||
|
||||
#
|
||||
# Hash the bytes in data and then encode the digest with the characters in map
|
||||
#
|
||||
def encodeHash(data,map):
|
||||
return encode(MD5(data),map)
|
||||
|
||||
|
||||
#
|
||||
# Encode the bytes in data with the characters in map
|
||||
#
|
||||
def encode(data, map):
|
||||
result = ""
|
||||
for char in data:
|
||||
value = ord(char)
|
||||
Q = (value ^ 0x80) // len(map)
|
||||
R = value % len(map)
|
||||
result += map[Q]
|
||||
result += map[R]
|
||||
return result
|
||||
|
||||
|
||||
#
|
||||
# Decode the string in data with the characters in map. Returns the decoded bytes
|
||||
#
|
||||
def decode(data,map):
|
||||
result = ""
|
||||
for i in range (0,len(data),2):
|
||||
high = map.find(data[i])
|
||||
low = map.find(data[i+1])
|
||||
value = (((high * 0x40) ^ 0x80) & 0xFF) + low
|
||||
result += pack("B",value)
|
||||
return result
|
||||
|
||||
|
||||
#
|
||||
# Encryption table used to generate the device PID
|
||||
#
|
||||
def generatePidEncryptionTable() :
|
||||
table = []
|
||||
for counter1 in range (0,0x100):
|
||||
value = counter1
|
||||
for counter2 in range (0,8):
|
||||
if (value & 1 == 0) :
|
||||
value = value >> 1
|
||||
else :
|
||||
value = value >> 1
|
||||
value = value ^ 0xEDB88320
|
||||
table.append(value)
|
||||
return table
|
||||
|
||||
|
||||
#
|
||||
# Seed value used to generate the device PID
|
||||
#
|
||||
def generatePidSeed(table,dsn) :
|
||||
value = 0
|
||||
for counter in range (0,4) :
|
||||
index = (ord(dsn[counter]) ^ value) &0xFF
|
||||
value = (value >> 8) ^ table[index]
|
||||
return value
|
||||
|
||||
|
||||
#
|
||||
# Generate the device PID
|
||||
#
|
||||
def generateDevicePID(table,dsn,nbRoll):
|
||||
seed = generatePidSeed(table,dsn)
|
||||
pidAscii = ""
|
||||
pid = [(seed >>24) &0xFF,(seed >> 16) &0xff,(seed >> 8) &0xFF,(seed) & 0xFF,(seed>>24) & 0xFF,(seed >> 16) &0xff,(seed >> 8) &0xFF,(seed) & 0xFF]
|
||||
index = 0
|
||||
|
||||
for counter in range (0,nbRoll):
|
||||
pid[index] = pid[index] ^ ord(dsn[counter])
|
||||
index = (index+1) %8
|
||||
|
||||
for counter in range (0,8):
|
||||
index = ((((pid[counter] >>5) & 3) ^ pid[counter]) & 0x1f) + (pid[counter] >> 7)
|
||||
pidAscii += charMap4[index]
|
||||
return pidAscii
|
||||
|
||||
|
||||
#
|
||||
# Returns two bit at offset from a bit field
|
||||
#
|
||||
def getTwoBitsFromBitField(bitField,offset):
|
||||
byteNumber = offset // 4
|
||||
bitPosition = 6 - 2*(offset % 4)
|
||||
|
||||
return ord(bitField[byteNumber]) >> bitPosition & 3
|
||||
|
||||
|
||||
#
|
||||
# Returns the six bits at offset from a bit field
|
||||
#
|
||||
def getSixBitsFromBitField(bitField,offset):
|
||||
offset *= 3
|
||||
value = (getTwoBitsFromBitField(bitField,offset) <<4) + (getTwoBitsFromBitField(bitField,offset+1) << 2) +getTwoBitsFromBitField(bitField,offset+2)
|
||||
return value
|
||||
|
||||
|
||||
#
|
||||
# MobiDeDrm-0.16 Stuff
|
||||
#
|
||||
class Unbuffered:
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
def write(self, data):
|
||||
self.stream.write(data)
|
||||
self.stream.flush()
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.stream, attr)
|
||||
|
||||
|
||||
# Implementation of Pukall Cipher 1
|
||||
def PC1(key, src, decryption=True):
|
||||
sum1 = 0;
|
||||
sum2 = 0;
|
||||
keyXorVal = 0;
|
||||
if len(key)!=16:
|
||||
print "Bad key length!"
|
||||
return None
|
||||
wkey = []
|
||||
for i in xrange(8):
|
||||
wkey.append(ord(key[i*2])<<8 | ord(key[i*2+1]))
|
||||
|
||||
dst = ""
|
||||
for i in xrange(len(src)):
|
||||
temp1 = 0;
|
||||
byteXorVal = 0;
|
||||
for j in xrange(8):
|
||||
temp1 ^= wkey[j]
|
||||
sum2 = (sum2+j)*20021 + sum1
|
||||
sum1 = (temp1*346)&0xFFFF
|
||||
sum2 = (sum2+sum1)&0xFFFF
|
||||
temp1 = (temp1*20021+1)&0xFFFF
|
||||
byteXorVal ^= temp1 ^ sum2
|
||||
curByte = ord(src[i])
|
||||
if not decryption:
|
||||
keyXorVal = curByte * 257;
|
||||
curByte = ((curByte ^ (byteXorVal >> 8)) ^ byteXorVal) & 0xFF
|
||||
if decryption:
|
||||
keyXorVal = curByte * 257;
|
||||
for j in xrange(8):
|
||||
wkey[j] ^= keyXorVal;
|
||||
dst+=chr(curByte)
|
||||
return dst
|
||||
|
||||
|
||||
def getSizeOfTrailingDataEntries(ptr, size, flags):
|
||||
def getSizeOfTrailingDataEntry(ptr, size):
|
||||
bitpos, result = 0, 0
|
||||
if size <= 0:
|
||||
return result
|
||||
while True:
|
||||
v = ord(ptr[size-1])
|
||||
result |= (v & 0x7F) << bitpos
|
||||
bitpos += 7
|
||||
size -= 1
|
||||
if (v & 0x80) != 0 or (bitpos >= 28) or (size == 0):
|
||||
return result
|
||||
num = 0
|
||||
testflags = flags >> 1
|
||||
while testflags:
|
||||
if testflags & 1:
|
||||
num += getSizeOfTrailingDataEntry(ptr, size - num)
|
||||
testflags >>= 1
|
||||
# Multibyte data, if present, is included in the encryption, so
|
||||
# we do not need to check the low bit.
|
||||
# if flags & 1:
|
||||
# num += (ord(ptr[size - num - 1]) & 0x3) + 1
|
||||
return num
|
||||
|
||||
|
||||
#
|
||||
# This class does all the heavy lifting.
|
||||
#
|
||||
class DrmStripper:
|
||||
def loadSection(self, section):
|
||||
if (section + 1 == self.num_sections):
|
||||
endoff = len(self.data_file)
|
||||
else:
|
||||
endoff = self.sections[section + 1][0]
|
||||
off = self.sections[section][0]
|
||||
return self.data_file[off:endoff]
|
||||
|
||||
def patch(self, off, new):
|
||||
self.data_file = self.data_file[:off] + new + self.data_file[off+len(new):]
|
||||
|
||||
def patchSection(self, section, new, in_off = 0):
|
||||
if (section + 1 == self.num_sections):
|
||||
endoff = len(self.data_file)
|
||||
else:
|
||||
endoff = self.sections[section + 1][0]
|
||||
off = self.sections[section][0]
|
||||
assert off + in_off + len(new) <= endoff
|
||||
self.patch(off + in_off, new)
|
||||
|
||||
def parseDRM(self, data, count, pid):
|
||||
pid = pid.ljust(16,'\0')
|
||||
keyvec1 = "\x72\x38\x33\xB0\xB4\xF2\xE3\xCA\xDF\x09\x01\xD6\xE2\xE0\x3F\x96"
|
||||
temp_key = PC1(keyvec1, pid, False)
|
||||
temp_key_sum = sum(map(ord,temp_key)) & 0xff
|
||||
found_key = None
|
||||
for i in xrange(count):
|
||||
verification, size, type, cksum, cookie = unpack('>LLLBxxx32s', data[i*0x30:i*0x30+0x30])
|
||||
cookie = PC1(temp_key, cookie)
|
||||
ver,flags,finalkey,expiry,expiry2 = unpack('>LL16sLL', cookie)
|
||||
if verification == ver and cksum == temp_key_sum and (flags & 0x1F) == 1:
|
||||
found_key = finalkey
|
||||
break
|
||||
if not found_key:
|
||||
# Then try the default encoding that doesn't require a PID
|
||||
temp_key = keyvec1
|
||||
temp_key_sum = sum(map(ord,temp_key)) & 0xff
|
||||
for i in xrange(count):
|
||||
verification, size, type, cksum, cookie = unpack('>LLLBxxx32s', data[i*0x30:i*0x30+0x30])
|
||||
cookie = PC1(temp_key, cookie)
|
||||
ver,flags,finalkey,expiry,expiry2 = unpack('>LL16sLL', cookie)
|
||||
if verification == ver and cksum == temp_key_sum:
|
||||
found_key = finalkey
|
||||
break
|
||||
return found_key
|
||||
|
||||
def __init__(self, data_file):
|
||||
self.data_file = data_file
|
||||
header = data_file[0:72]
|
||||
if header[0x3C:0x3C+8] != 'BOOKMOBI':
|
||||
raise DrmException("invalid file format")
|
||||
self.num_sections, = unpack('>H', data_file[76:78])
|
||||
|
||||
self.sections = []
|
||||
for i in xrange(self.num_sections):
|
||||
offset, a1,a2,a3,a4 = unpack('>LBBBB', data_file[78+i*8:78+i*8+8])
|
||||
flags, val = a1, a2<<16|a3<<8|a4
|
||||
self.sections.append( (offset, flags, val) )
|
||||
|
||||
sect = self.loadSection(0)
|
||||
records, = unpack('>H', sect[0x8:0x8+2])
|
||||
mobi_length, = unpack('>L',sect[0x14:0x18])
|
||||
mobi_version, = unpack('>L',sect[0x68:0x6C])
|
||||
extra_data_flags = 0
|
||||
print "MOBI header version = %d, length = %d" %(mobi_version, mobi_length)
|
||||
if (mobi_length >= 0xE4) and (mobi_version >= 5):
|
||||
extra_data_flags, = unpack('>H', sect[0xF2:0xF4])
|
||||
print "Extra Data Flags = %d" %extra_data_flags
|
||||
|
||||
crypto_type, = unpack('>H', sect[0xC:0xC+2])
|
||||
if crypto_type == 0:
|
||||
print "This book is not encrypted."
|
||||
else:
|
||||
if crypto_type == 1:
|
||||
raise DrmException("cannot decode Mobipocket encryption type 1")
|
||||
if crypto_type != 2:
|
||||
raise DrmException("unknown encryption type: %d" % crypto_type)
|
||||
|
||||
# determine the EXTH Offset.
|
||||
exth_off = unpack('>I', sect[20:24])[0] + 16 + self.sections[0][0]
|
||||
# Grab the entire EXTH block and feed it to the getK4PCPids function.
|
||||
exth = data_file[exth_off:self.sections[0+1][0]]
|
||||
pid = getK4PCPids(exth)
|
||||
|
||||
# calculate the keys
|
||||
drm_ptr, drm_count, drm_size, drm_flags = unpack('>LLLL', sect[0xA8:0xA8+16])
|
||||
if drm_count == 0:
|
||||
raise DrmException("no PIDs found in this file")
|
||||
found_key = self.parseDRM(sect[drm_ptr:drm_ptr+drm_size], drm_count, pid)
|
||||
if not found_key:
|
||||
raise DrmException("no key found. maybe the PID is incorrect")
|
||||
|
||||
# kill the drm keys
|
||||
self.patchSection(0, "\0" * drm_size, drm_ptr)
|
||||
# kill the drm pointers
|
||||
self.patchSection(0, "\xff" * 4 + "\0" * 12, 0xA8)
|
||||
# clear the crypto type
|
||||
self.patchSection(0, "\0" * 2, 0xC)
|
||||
|
||||
# decrypt sections
|
||||
print "\nDecrypting. Please wait . . .",
|
||||
new_data = self.data_file[:self.sections[1][0]]
|
||||
for i in xrange(1, records+1):
|
||||
data = self.loadSection(i)
|
||||
extra_size = getSizeOfTrailingDataEntries(data, len(data), extra_data_flags)
|
||||
if i%100 == 0:
|
||||
print ".",
|
||||
# print "record %d, extra_size %d" %(i,extra_size)
|
||||
new_data += PC1(found_key, data[0:len(data) - extra_size])
|
||||
if extra_size > 0:
|
||||
new_data += data[-extra_size:]
|
||||
#self.patchSection(i, PC1(found_key, data[0:len(data) - extra_size]))
|
||||
if self.num_sections > records+1:
|
||||
new_data += self.data_file[self.sections[records+1][0]:]
|
||||
self.data_file = new_data
|
||||
print "done!"
|
||||
print "\nPlease only use your new-found powers for good."
|
||||
|
||||
def getResult(self):
|
||||
return self.data_file
|
||||
|
||||
|
||||
#
|
||||
# DiapDealer's stuff: Parse the EXTH header records and parse the Kindleinfo
|
||||
# file to calculate the book pid.
|
||||
#
|
||||
def getK4PCPids(exth):
|
||||
global kindleDatabase
|
||||
try:
|
||||
kindleDatabase = parseKindleInfo()
|
||||
except Exception as message:
|
||||
print(message)
|
||||
|
||||
if kindleDatabase != None :
|
||||
|
||||
# Get the Mazama Random number
|
||||
MazamaRandomNumber = getKindleInfoValueForKey("MazamaRandomNumber")
|
||||
|
||||
# Get the HDD serial
|
||||
encodedSystemVolumeSerialNumber = encodeHash(str(GetVolumeSerialNumber(GetSystemDirectory().split('\\')[0] + '\\')),charMap1)
|
||||
|
||||
# Get the current user name
|
||||
encodedUsername = encodeHash(GetUserName(),charMap1)
|
||||
|
||||
# concat, hash and encode to calculate the DSN
|
||||
DSN = encode(SHA1(MazamaRandomNumber+encodedSystemVolumeSerialNumber+encodedUsername),charMap1)
|
||||
|
||||
print("\nDSN: " + DSN)
|
||||
|
||||
|
||||
# Compute the device PID (for which I can tell, is used for nothing).
|
||||
# But hey, stuff being printed out is apparently cool.
|
||||
table = generatePidEncryptionTable()
|
||||
devicePID = generateDevicePID(table,DSN,4)
|
||||
|
||||
print("Device PID: " + devicePID)
|
||||
|
||||
# Compute book PID
|
||||
exth_records = {}
|
||||
nitems, = unpack('>I', exth[8:12])
|
||||
pos = 12
|
||||
# Parse the EXTH records, storing data indexed by type
|
||||
for i in xrange(nitems):
|
||||
type, size = unpack('>II', exth[pos: pos + 8])
|
||||
content = exth[pos + 8: pos + size]
|
||||
|
||||
exth_records[type] = content
|
||||
pos += size
|
||||
|
||||
# Grab the contents of the type 209 exth record
|
||||
if exth_records[209] != None:
|
||||
data = exth_records[209]
|
||||
else:
|
||||
raise DrmException("\nNo EXTH record type 209 - Perhaps not a K4PC file?")
|
||||
|
||||
# Parse the 209 data to find the the exth record with the token data.
|
||||
# The last character of the 209 data points to the record with the token.
|
||||
# Always 208 from my experience, but I'll leave the logic in case that changes.
|
||||
for i in xrange(len(data)):
|
||||
if ord(data[i]) != 0:
|
||||
if exth_records[ord(data[i])] != None:
|
||||
token = exth_records[ord(data[i])]
|
||||
|
||||
# Get the kindle account token
|
||||
kindleAccountToken = getKindleInfoValueForKey("kindle.account.tokens")
|
||||
|
||||
print("Account Token: " + kindleAccountToken)
|
||||
|
||||
pidHash = SHA1(DSN+kindleAccountToken+exth_records[209]+token)
|
||||
|
||||
bookPID = encodePID(pidHash)
|
||||
|
||||
if exth_records[503] != None:
|
||||
print "Pid for " + exth_records[503] + ": " + bookPID
|
||||
else:
|
||||
print ("Book PID: " + bookPID )
|
||||
|
||||
return bookPID
|
||||
|
||||
raise DrmException("\nCould not access K4PC data - Perhaps K4PC is not installed/configured?")
|
||||
return null
|
||||
|
||||
if not __name__ == "__main__":
|
||||
from calibre.customize import FileTypePlugin
|
||||
|
||||
class K4PCDeDRM(FileTypePlugin):
|
||||
name = 'K4PCDeDRM' # Name of the plugin
|
||||
description = 'Removes DRM from K4PC files'
|
||||
supported_platforms = ['windows'] # Platforms this plugin will run on
|
||||
author = 'DiapDealer' # The author of this plugin
|
||||
version = (0, 0, 1) # The version number of this plugin
|
||||
file_types = set(['prc','mobi','azw']) # The file types that this plugin will be applied to
|
||||
on_import = True # Run this plugin during the import
|
||||
|
||||
def run(self, path_to_ebook):
|
||||
from calibre.gui2 import is_ok_to_use_qt
|
||||
from PyQt4.Qt import QMessageBox
|
||||
data_file = file(path_to_ebook, 'rb').read()
|
||||
|
||||
try:
|
||||
unlocked_file = DrmStripper(data_file).getResult()
|
||||
except DrmException:
|
||||
# ignore the error
|
||||
pass
|
||||
else:
|
||||
of = self.temporary_file('.mobi')
|
||||
of.write(unlocked_file)
|
||||
of.close()
|
||||
return of.name
|
||||
|
||||
if is_ok_to_use_qt():
|
||||
d = QMessageBox(QMessageBox.Warning, "K4PCDeDRM Plugin", "Couldn't decode: %s\n\nImporting encrypted version." % path_to_ebook)
|
||||
d.show()
|
||||
d.raise_()
|
||||
d.exec_()
|
||||
return path_to_ebook
|
||||
|
||||
#def customization_help(self, gui=False):
|
||||
# return 'Enter PID (separate multiple PIDs with comma)'
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.stdout=Unbuffered(sys.stdout)
|
||||
print ('K4PCDeDrm v%(__version__)s '
|
||||
'provided DiapDealer.' % globals())
|
||||
if len(sys.argv)<3:
|
||||
print "Removes DRM protection from K4PC books"
|
||||
print "Usage:"
|
||||
print " %s <infile> <outfile>" % sys.argv[0]
|
||||
sys.exit(1)
|
||||
else:
|
||||
infile = sys.argv[1]
|
||||
outfile = sys.argv[2]
|
||||
data_file = file(infile, 'rb').read()
|
||||
try:
|
||||
strippedFile = DrmStripper(data_file)
|
||||
file(outfile, 'wb').write(strippedFile.getResult())
|
||||
except DrmException, e:
|
||||
print "Error: %s" % e
|
||||
sys.exit(1)
|
||||
sys.exit(0)
|
||||
Binary file not shown.
23
Calibre_Plugins/README-K4MobiDeDRM-plugin.txt
Normal file
23
Calibre_Plugins/README-K4MobiDeDRM-plugin.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
Plugin for K4PC, K4Mac and Mobi Books
|
||||
|
||||
Will work on Linux (standard DRM Mobi books only), Mac OS X (standard DRM Mobi books and "Kindle for Mac" books, and Windows (standard DRM Mobi books and "Kindle for PC" books.
|
||||
|
||||
This plugin supersedes MobiDeDRM, K4DeDRM, and K4PCDeDRM plugins. If you install this plugin, those plugins can be safely removed.
|
||||
|
||||
This plugin is meant to convert "Kindle for PC", "Kindle for Mac" and "Mobi" ebooks with DRM to unlocked Mobi files. Calibre can then convert them to whatever format you desire. It is meant to function without having to install any dependencies except for Calibre being on your same machine and in the same account as your "Kindle for PC" or "Kindle for Mac" application if you are going to remove the DRM from those types of books.
|
||||
|
||||
Installation:
|
||||
Go to Calibre's Preferences page... click on the Plugins button. Use the file dialog button to select the plugin's zip file (k4mobidedrm_vXX_plugin.zip) and click the 'Add' button. You're done.
|
||||
|
||||
Configuration:
|
||||
Highlight the plugin (K4MobiDeDRM under the "File type plugins" category) and click the "Customize Plugin" button on Calibre's Preferences->Plugins page. Enter a comma separated list of your 10 digit PIDs. This is not needed if you only want to decode "Kindle for PC" or "Kindle for Mac" books.
|
||||
|
||||
|
||||
Troubleshooting:
|
||||
If you find that it's not working for you (imported azw's are not converted to mobi format), you can save a lot of time and trouble by trying to add the azw file to Calibre with the command line tools. This will print out a lot of helpful debugging info that can be copied into any online help requests. I'm going to ask you to do it first, anyway, so you might
|
||||
as well get used to it. ;)
|
||||
|
||||
Open a command prompt (terminal) and change to the directory where the ebook you're trying to import resides. Then type the command "calibredb add your_ebook.azw". Don't type the quotes and obviously change the 'your_ebook.azw' to whatever the filename of your book is. Copy the resulting output and paste it into any online help request you make.
|
||||
|
||||
** Note: the Mac version of Calibre doesn't install the command line tools by default. If you go to the 'Preferences' page and click on the miscellaneous button, you'll see the option to install the command line tools.
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
K4PCDeDRM - K4PCDeDRM_X.XX_plugin.zip
|
||||
Requires Calibre version 0.6.44 or higher.
|
||||
|
||||
This work is based on the work of cmbtc, skindle, mobidedrm. and skindleAll I had the much easier job of converting them to a Calibre plugin.
|
||||
|
||||
This plugin is meant to Kindle for PC azw ebooks that are protected
|
||||
with Amazon's Mobi based encryption. It is meant to function without having to install any dependencies... other than having both Calibre installed and Kindle for PC on the same machine, of course.
|
||||
|
||||
Installation:
|
||||
|
||||
Go to Calibre's Preferences page... click on the Plugins button. Use the file dialog button to select the plugin's zip file (K4PCDeDRM_X.XX_plugin.zip) and click the 'Add' button. you're done.
|
||||
|
||||
21
Calibre_Plugins/README-eReaderPDB2PML-plugin.txt
Normal file
21
Calibre_Plugins/README-eReaderPDB2PML-plugin.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
eReader PDB2PML - eReaderPDB2PML_vXX_plugin.zip
|
||||
|
||||
All credit given to The Dark Reverser for the original standalone script. I had the much easier job of converting it to a Calibre plugin.
|
||||
|
||||
This plugin is meant to convert secure Ereader files (PDB) to unsecured PMLZ files. Calibre can then convert it to whatever format you desire. It is meant to function without having to install any dependencies... other than having Calibre installed, of course. I've included the psyco libraries (compiled for each platform) for speed. If your system can use them, great! Otherwise, they won't be used and things will just work slower.
|
||||
|
||||
Installation:
|
||||
Go to Calibre's Preferences page... click on the Plugins button. Use the file dialog button to select the plugin's zip file (eReaderPDB2PML_vXX_plugin.zip) and click the 'Add' button. You're done.
|
||||
|
||||
Configuration:
|
||||
Highlight the plugin (eReader PDB 2 PML under the "File type plugins" category) and click the "Customize Plugin" button on Calibre's Preferences->Plugins page. Enter your name and last 8 digits of the credit card number separated by a comma: Your Name,12341234
|
||||
|
||||
If you've purchased books with more than one credit card, separate the info with a colon: Your Name,12341234:Other Name,23452345 (NOTE: Do NOT put quotes around your name like you do with the original script!!)
|
||||
|
||||
Troubleshooting:
|
||||
If you find that it's not working for you (imported pdb's are not converted to pmlz format), you can save a lot of time and trouble by trying to add the pdb to Calibre with the command line tools. This will print out a lot of helpful debugging info that can be copied into any online help requests. I'm going to ask you to do it first, anyway, so you might
|
||||
as well get used to it. ;)
|
||||
|
||||
Open a command prompt (terminal) and change to the directory where the ebook you're trying to import resides. Then type the command "calibredb add your_ebook.pdb". Don't type the quotes and obviously change the 'your_ebook.pdb' to whatever the filename of your book is. Copy the resulting output and paste it into any online help request you make.
|
||||
|
||||
** Note: the Mac version of Calibre doesn't install the command line tools by default. If you go to the 'Preferences' page and click on the miscellaneous button, you'll see the option to install the command line tools.
|
||||
@@ -1,13 +0,0 @@
|
||||
MobiDeDRM - MobiDeDRM_X.XX_plugin.zip
|
||||
Requires Calibre version 0.6.44 or higher.
|
||||
|
||||
This work is based on the current mobidedrm.py code.
|
||||
|
||||
This plugin is meant to Mobipocket and Kindle ebooks that are protected
|
||||
with Amazon's Mobi based encryption. It is meant to function without having to install any dependencies... other than having both Calibre installed. You must know the PID orf the device you are using or the book specific PID to use this plugin.
|
||||
|
||||
Installation:
|
||||
|
||||
Go to Calibre's Preferences page... click on the Plugins button. Use the file dialog button to select the plugin's zip file (MobiDeDRM_X.XX_plugin.zip) and click the 'Add' button.
|
||||
|
||||
Then enter your PIDS in the plugin customization window separated by commas (with no spaces).
|
||||
BIN
Calibre_Plugins/eReaderPDB2PML_plugin.zip
Normal file
BIN
Calibre_Plugins/eReaderPDB2PML_plugin.zip
Normal file
Binary file not shown.
@@ -0,0 +1,21 @@
|
||||
eReader PDB2PML - eReaderPDB2PML_vXX_plugin.zip
|
||||
|
||||
All credit given to The Dark Reverser for the original standalone script. I had the much easier job of converting it to a Calibre plugin.
|
||||
|
||||
This plugin is meant to convert secure Ereader files (PDB) to unsecured PMLZ files. Calibre can then convert it to whatever format you desire. It is meant to function without having to install any dependencies... other than having Calibre installed, of course. I've included the psyco libraries (compiled for each platform) for speed. If your system can use them, great! Otherwise, they won't be used and things will just work slower.
|
||||
|
||||
Installation:
|
||||
Go to Calibre's Preferences page... click on the Plugins button. Use the file dialog button to select the plugin's zip file (eReaderPDB2PML_vXX_plugin.zip) and click the 'Add' button. You're done.
|
||||
|
||||
Configuration:
|
||||
Highlight the plugin (eReader PDB 2 PML under the "File type plugins" category) and click the "Customize Plugin" button on Calibre's Preferences->Plugins page. Enter your name and last 8 digits of the credit card number separated by a comma: Your Name,12341234
|
||||
|
||||
If you've purchased books with more than one credit card, separate the info with a colon: Your Name,12341234:Other Name,23452345 (NOTE: Do NOT put quotes around your name like you do with the original script!!)
|
||||
|
||||
Troubleshooting:
|
||||
If you find that it's not working for you (imported pdb's are not converted to pmlz format), you can save a lot of time and trouble by trying to add the pdb to Calibre with the command line tools. This will print out a lot of helpful debugging info that can be copied into any online help requests. I'm going to ask you to do it first, anyway, so you might
|
||||
as well get used to it. ;)
|
||||
|
||||
Open a command prompt (terminal) and change to the directory where the ebook you're trying to import resides. Then type the command "calibredb add your_ebook.pdb". Don't type the quotes and obviously change the 'your_ebook.pdb' to whatever the filename of your book is. Copy the resulting output and paste it into any online help request you make.
|
||||
|
||||
** Note: the Mac version of Calibre doesn't install the command line tools by default. If you go to the 'Preferences' page and click on the miscellaneous button, you'll see the option to install the command line tools.
|
||||
148
Calibre_Plugins/eReaderPDB2PML_plugin/eReaderPDB2PML_plugin.py
Normal file
148
Calibre_Plugins/eReaderPDB2PML_plugin/eReaderPDB2PML_plugin.py
Normal file
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# eReaderPDB2PML_v01_plugin.py
|
||||
# Released under the terms of the GNU General Public Licence, version 3 or
|
||||
# later. <http://www.gnu.org/licenses/>
|
||||
#
|
||||
# All credit given to The Dark Reverser for the original standalone script.
|
||||
# I had the much easier job of converting it to Calibre a plugin.
|
||||
#
|
||||
# This plugin is meant to convert secure Ereader files (PDB) to unsecured PMLZ files.
|
||||
# Calibre can then convert it to whatever format you desire.
|
||||
# It is meant to function without having to install any dependencies...
|
||||
# other than having Calibre installed, of course. I've included the psyco libraries
|
||||
# (compiled for each platform) for speed. If your system can use them, great!
|
||||
# Otherwise, they won't be used and things will just work slower.
|
||||
#
|
||||
# Installation:
|
||||
# Go to Calibre's Preferences page... click on the Plugins button. Use the file
|
||||
# dialog button to select the plugin's zip file (eReaderPDB2PML_vXX_plugin.zip) and
|
||||
# click the 'Add' button. You're done.
|
||||
#
|
||||
# Configuration:
|
||||
# Highlight the plugin (eReader PDB 2 PML) and click the
|
||||
# "Customize Plugin" button on Calibre's Preferences->Plugins page.
|
||||
# Enter your name and the last 8 digits of the credit card number separated by
|
||||
# a comma: Your Name,12341234
|
||||
#
|
||||
# If you've purchased books with more than one credit card, separate the info with
|
||||
# a colon: Your Name,12341234:Other Name,23452345
|
||||
# NOTE: Do NOT put quotes around your name like you do with the original script!!
|
||||
#
|
||||
# Revision history:
|
||||
# 0.1 - Initial release
|
||||
|
||||
import sys, os
|
||||
|
||||
from calibre.customize import FileTypePlugin
|
||||
|
||||
class eRdrDeDRM(FileTypePlugin):
|
||||
name = 'eReader PDB 2 PML' # Name of the plugin
|
||||
description = 'Removes DRM from secure pdb files. \
|
||||
Credit given to The Dark Reverser for the original standalone script.'
|
||||
supported_platforms = ['linux', 'osx', 'windows'] # Platforms this plugin will run on
|
||||
author = 'DiapDealer' # The author of this plugin
|
||||
version = (0, 0, 1) # The version number of this plugin
|
||||
file_types = set(['pdb']) # The file types that this plugin will be applied to
|
||||
on_import = True # Run this plugin during the import
|
||||
|
||||
def run(self, path_to_ebook):
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
from calibre.constants import iswindows, isosx
|
||||
pdir = 'windows' if iswindows else 'osx' if isosx else 'linux'
|
||||
ppath = os.path.join(self.sys_insertion_path, pdir)
|
||||
sys.path.insert(0, ppath)
|
||||
#sys.path.append(ppath)
|
||||
|
||||
global bookname, erdr2pml
|
||||
import erdr2pml
|
||||
|
||||
if 'psyco' in sys.modules:
|
||||
print 'Using psyco acceleration for %s.' % pdir
|
||||
else:
|
||||
print 'NOT using psyco acceleration for %s. Conversion may be slow.' % pdir
|
||||
|
||||
infile = path_to_ebook
|
||||
bookname = os.path.splitext(os.path.basename(infile))[0]
|
||||
outdir = PersistentTemporaryDirectory()
|
||||
pmlzfile = self.temporary_file(bookname + '.pmlz')
|
||||
|
||||
if self.site_customization:
|
||||
keydata = self.site_customization
|
||||
ar = keydata.split(':')
|
||||
for i in ar:
|
||||
try:
|
||||
name, cc = i.split(',')
|
||||
except ValueError:
|
||||
sys.path.remove(ppath)
|
||||
print ' Error parsing user supplied data.'
|
||||
return path_to_ebook
|
||||
|
||||
try:
|
||||
print "Processing..."
|
||||
import time
|
||||
start_time = time.time()
|
||||
pmlfilepath = self.convertEreaderToPml(infile, name, cc, outdir)
|
||||
|
||||
if pmlfilepath and pmlfilepath != 1:
|
||||
import zipfile
|
||||
import shutil
|
||||
print " Creating PMLZ file"
|
||||
myZipFile = zipfile.ZipFile(pmlzfile.name,'w',zipfile.ZIP_STORED, False)
|
||||
list = os.listdir(outdir)
|
||||
for file in list:
|
||||
localname = file
|
||||
filePath = os.path.join(outdir,file)
|
||||
if os.path.isfile(filePath):
|
||||
myZipFile.write(filePath, localname)
|
||||
elif os.path.isdir(filePath):
|
||||
imageList = os.listdir(filePath)
|
||||
localimgdir = os.path.basename(filePath)
|
||||
for image in imageList:
|
||||
localname = os.path.join(localimgdir,image)
|
||||
imagePath = os.path.join(filePath,image)
|
||||
if os.path.isfile(imagePath):
|
||||
myZipFile.write(imagePath, localname)
|
||||
myZipFile.close()
|
||||
end_time = time.time()
|
||||
search_time = end_time - start_time
|
||||
print 'elapsed time: %.2f seconds' % (search_time, )
|
||||
print "done"
|
||||
return pmlzfile.name
|
||||
else:
|
||||
raise ValueError('Error Creating PML file.')
|
||||
except ValueError, e:
|
||||
print "Error: %s" % e
|
||||
pass
|
||||
raise Exception('Couldn\'t decrypt pdb file.')
|
||||
else:
|
||||
raise Exception('No name and CC# provided.')
|
||||
|
||||
def convertEreaderToPml(self, infile, name, cc, outdir):
|
||||
|
||||
print " Decoding File"
|
||||
sect = erdr2pml.Sectionizer(infile, 'PNRdPPrs')
|
||||
er = erdr2pml.EreaderProcessor(sect.loadSection, name, cc)
|
||||
|
||||
if er.getNumImages() > 0:
|
||||
print " Extracting images"
|
||||
#imagedir = bookname + '_img/'
|
||||
imagedir = 'images/'
|
||||
imagedirpath = os.path.join(outdir,imagedir)
|
||||
if not os.path.exists(imagedirpath):
|
||||
os.makedirs(imagedirpath)
|
||||
for i in xrange(er.getNumImages()):
|
||||
name, contents = er.getImage(i)
|
||||
file(os.path.join(imagedirpath, name), 'wb').write(contents)
|
||||
|
||||
print " Extracting pml"
|
||||
pml_string = er.getText()
|
||||
pmlfilename = bookname + ".pml"
|
||||
try:
|
||||
file(os.path.join(outdir, pmlfilename),'wb').write(erdr2pml.cleanPML(pml_string))
|
||||
return os.path.join(outdir, pmlfilename)
|
||||
except:
|
||||
return 1
|
||||
|
||||
def customization_help(self, gui=False):
|
||||
return 'Enter Account Name & Last 8 digits of Credit Card number (separate with a comma)'
|
||||
692
Calibre_Plugins/eReaderPDB2PML_plugin/erdr2pml.py
Normal file
692
Calibre_Plugins/eReaderPDB2PML_plugin/erdr2pml.py
Normal file
@@ -0,0 +1,692 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
|
||||
#
|
||||
# erdr2pml.py
|
||||
#
|
||||
# This is a python script. You need a Python interpreter to run it.
|
||||
# For example, ActiveState Python, which exists for windows.
|
||||
# Changelog
|
||||
#
|
||||
# Based on ereader2html version 0.08 plus some later small fixes
|
||||
#
|
||||
# 0.01 - Initial version
|
||||
# 0.02 - Support more eReader files. Support bold text and links. Fix PML decoder parsing bug.
|
||||
# 0.03 - Fix incorrect variable usage at one place.
|
||||
# 0.03b - enhancement by DeBockle (version 259 support)
|
||||
# Custom version 0.03 - no change to eReader support, only usability changes
|
||||
# - start of pep-8 indentation (spaces not tab), fix trailing blanks
|
||||
# - version variable, only one place to change
|
||||
# - added main routine, now callable as a library/module,
|
||||
# means tools can add optional support for ereader2html
|
||||
# - outdir is no longer a mandatory parameter (defaults based on input name if missing)
|
||||
# - time taken output to stdout
|
||||
# - Psyco support - reduces runtime by a factor of (over) 3!
|
||||
# E.g. (~600Kb file) 90 secs down to 24 secs
|
||||
# - newstyle classes
|
||||
# - changed map call to list comprehension
|
||||
# may not work with python 2.3
|
||||
# without Psyco this reduces runtime to 90%
|
||||
# E.g. 90 secs down to 77 secs
|
||||
# Psyco with map calls takes longer, do not run with map in Psyco JIT!
|
||||
# - izip calls used instead of zip (if available), further reduction
|
||||
# in run time (factor of 4.5).
|
||||
# E.g. (~600Kb file) 90 secs down to 20 secs
|
||||
# - Python 2.6+ support, avoid DeprecationWarning with sha/sha1
|
||||
# 0.04 - Footnote support, PML output, correct charset in html, support more PML tags
|
||||
# - Feature change, dump out PML file
|
||||
# - Added supprt for footnote tags. NOTE footnote ids appear to be bad (not usable)
|
||||
# in some pdb files :-( due to the same id being used multiple times
|
||||
# - Added correct charset encoding (pml is based on cp1252)
|
||||
# - Added logging support.
|
||||
# 0.05 - Improved type 272 support for sidebars, links, chapters, metainfo, etc
|
||||
# 0.06 - Merge of 0.04 and 0.05. Improved HTML output
|
||||
# Placed images in subfolder, so that it's possible to just
|
||||
# drop the book.pml file onto DropBook to make an unencrypted
|
||||
# copy of the eReader file.
|
||||
# Using that with Calibre works a lot better than the HTML
|
||||
# conversion in this code.
|
||||
# 0.07 - Further Improved type 272 support for sidebars with all earlier fixes
|
||||
# 0.08 - fixed typos, removed extraneous things
|
||||
# 0.09 - fixed typos in first_pages to first_page to again support older formats
|
||||
# 0.10 - minor cleanups
|
||||
# 0.11 - fixups for using correct xml for footnotes and sidebars for use with Dropbook
|
||||
# 0.12 - Fix added to prevent lowercasing of image names when the pml code itself uses a different case in the link name.
|
||||
# 0.13 - change to unbuffered stdout for use with gui front ends
|
||||
# 0.14 - contributed enhancement to support --make-pmlz switch
|
||||
# 0.15 - enabled high-ascii to pml character encoding. DropBook now works on Mac.
|
||||
|
||||
__version__='0.15'
|
||||
|
||||
# Import Psyco if available
|
||||
try:
|
||||
# Dumb speed hack 1
|
||||
# http://psyco.sourceforge.net
|
||||
import psyco
|
||||
psyco.full()
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
# Dumb speed hack 2
|
||||
# All map() calls converted to list comprehension (some use zip)
|
||||
# override zip with izip - saves memory and in rough testing
|
||||
# appears to be faster zip() is only used in the converted map() calls
|
||||
from itertools import izip as zip
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
class Unbuffered:
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
def write(self, data):
|
||||
self.stream.write(data)
|
||||
self.stream.flush()
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.stream, attr)
|
||||
|
||||
import sys
|
||||
sys.stdout=Unbuffered(sys.stdout)
|
||||
|
||||
import struct, binascii, getopt, zlib, os, os.path, urllib, tempfile
|
||||
|
||||
try:
|
||||
from hashlib import sha1
|
||||
except ImportError:
|
||||
# older Python release
|
||||
import sha
|
||||
sha1 = lambda s: sha.new(s)
|
||||
import cgi
|
||||
import logging
|
||||
|
||||
logging.basicConfig()
|
||||
#logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
ECB = 0
|
||||
CBC = 1
|
||||
class Des(object):
|
||||
__pc1 = [56, 48, 40, 32, 24, 16, 8, 0, 57, 49, 41, 33, 25, 17,
|
||||
9, 1, 58, 50, 42, 34, 26, 18, 10, 2, 59, 51, 43, 35,
|
||||
62, 54, 46, 38, 30, 22, 14, 6, 61, 53, 45, 37, 29, 21,
|
||||
13, 5, 60, 52, 44, 36, 28, 20, 12, 4, 27, 19, 11, 3]
|
||||
__left_rotations = [1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1]
|
||||
__pc2 = [13, 16, 10, 23, 0, 4,2, 27, 14, 5, 20, 9,
|
||||
22, 18, 11, 3, 25, 7, 15, 6, 26, 19, 12, 1,
|
||||
40, 51, 30, 36, 46, 54, 29, 39, 50, 44, 32, 47,
|
||||
43, 48, 38, 55, 33, 52, 45, 41, 49, 35, 28, 31]
|
||||
__ip = [57, 49, 41, 33, 25, 17, 9, 1, 59, 51, 43, 35, 27, 19, 11, 3,
|
||||
61, 53, 45, 37, 29, 21, 13, 5, 63, 55, 47, 39, 31, 23, 15, 7,
|
||||
56, 48, 40, 32, 24, 16, 8, 0, 58, 50, 42, 34, 26, 18, 10, 2,
|
||||
60, 52, 44, 36, 28, 20, 12, 4, 62, 54, 46, 38, 30, 22, 14, 6]
|
||||
__expansion_table = [31, 0, 1, 2, 3, 4, 3, 4, 5, 6, 7, 8,
|
||||
7, 8, 9, 10, 11, 12,11, 12, 13, 14, 15, 16,
|
||||
15, 16, 17, 18, 19, 20,19, 20, 21, 22, 23, 24,
|
||||
23, 24, 25, 26, 27, 28,27, 28, 29, 30, 31, 0]
|
||||
__sbox = [[14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7,
|
||||
0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8,
|
||||
4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0,
|
||||
15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13],
|
||||
[15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10,
|
||||
3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5,
|
||||
0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15,
|
||||
13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9],
|
||||
[10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8,
|
||||
13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1,
|
||||
13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7,
|
||||
1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12],
|
||||
[7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15,
|
||||
13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9,
|
||||
10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4,
|
||||
3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14],
|
||||
[2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9,
|
||||
14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6,
|
||||
4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14,
|
||||
11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3],
|
||||
[12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11,
|
||||
10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8,
|
||||
9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6,
|
||||
4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13],
|
||||
[4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1,
|
||||
13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6,
|
||||
1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2,
|
||||
6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12],
|
||||
[13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7,
|
||||
1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2,
|
||||
7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8,
|
||||
2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11],]
|
||||
__p = [15, 6, 19, 20, 28, 11,27, 16, 0, 14, 22, 25,
|
||||
4, 17, 30, 9, 1, 7,23,13, 31, 26, 2, 8,18, 12, 29, 5, 21, 10,3, 24]
|
||||
__fp = [39, 7, 47, 15, 55, 23, 63, 31,38, 6, 46, 14, 54, 22, 62, 30,
|
||||
37, 5, 45, 13, 53, 21, 61, 29,36, 4, 44, 12, 52, 20, 60, 28,
|
||||
35, 3, 43, 11, 51, 19, 59, 27,34, 2, 42, 10, 50, 18, 58, 26,
|
||||
33, 1, 41, 9, 49, 17, 57, 25,32, 0, 40, 8, 48, 16, 56, 24]
|
||||
# Type of crypting being done
|
||||
ENCRYPT = 0x00
|
||||
DECRYPT = 0x01
|
||||
def __init__(self, key, mode=ECB, IV=None):
|
||||
if len(key) != 8:
|
||||
raise ValueError("Invalid DES key size. Key must be exactly 8 bytes long.")
|
||||
self.block_size = 8
|
||||
self.key_size = 8
|
||||
self.__padding = ''
|
||||
self.setMode(mode)
|
||||
if IV:
|
||||
self.setIV(IV)
|
||||
self.L = []
|
||||
self.R = []
|
||||
self.Kn = [ [0] * 48 ] * 16 # 16 48-bit keys (K1 - K16)
|
||||
self.final = []
|
||||
self.setKey(key)
|
||||
def getKey(self):
|
||||
return self.__key
|
||||
def setKey(self, key):
|
||||
self.__key = key
|
||||
self.__create_sub_keys()
|
||||
def getMode(self):
|
||||
return self.__mode
|
||||
def setMode(self, mode):
|
||||
self.__mode = mode
|
||||
def getIV(self):
|
||||
return self.__iv
|
||||
def setIV(self, IV):
|
||||
if not IV or len(IV) != self.block_size:
|
||||
raise ValueError("Invalid Initial Value (IV), must be a multiple of " + str(self.block_size) + " bytes")
|
||||
self.__iv = IV
|
||||
def getPadding(self):
|
||||
return self.__padding
|
||||
def __String_to_BitList(self, data):
|
||||
l = len(data) * 8
|
||||
result = [0] * l
|
||||
pos = 0
|
||||
for c in data:
|
||||
i = 7
|
||||
ch = ord(c)
|
||||
while i >= 0:
|
||||
if ch & (1 << i) != 0:
|
||||
result[pos] = 1
|
||||
else:
|
||||
result[pos] = 0
|
||||
pos += 1
|
||||
i -= 1
|
||||
return result
|
||||
def __BitList_to_String(self, data):
|
||||
result = ''
|
||||
pos = 0
|
||||
c = 0
|
||||
while pos < len(data):
|
||||
c += data[pos] << (7 - (pos % 8))
|
||||
if (pos % 8) == 7:
|
||||
result += chr(c)
|
||||
c = 0
|
||||
pos += 1
|
||||
return result
|
||||
def __permutate(self, table, block):
|
||||
return [block[x] for x in table]
|
||||
def __create_sub_keys(self):
|
||||
key = self.__permutate(Des.__pc1, self.__String_to_BitList(self.getKey()))
|
||||
i = 0
|
||||
self.L = key[:28]
|
||||
self.R = key[28:]
|
||||
while i < 16:
|
||||
j = 0
|
||||
while j < Des.__left_rotations[i]:
|
||||
self.L.append(self.L[0])
|
||||
del self.L[0]
|
||||
self.R.append(self.R[0])
|
||||
del self.R[0]
|
||||
j += 1
|
||||
self.Kn[i] = self.__permutate(Des.__pc2, self.L + self.R)
|
||||
i += 1
|
||||
def __des_crypt(self, block, crypt_type):
|
||||
block = self.__permutate(Des.__ip, block)
|
||||
self.L = block[:32]
|
||||
self.R = block[32:]
|
||||
if crypt_type == Des.ENCRYPT:
|
||||
iteration = 0
|
||||
iteration_adjustment = 1
|
||||
else:
|
||||
iteration = 15
|
||||
iteration_adjustment = -1
|
||||
i = 0
|
||||
while i < 16:
|
||||
tempR = self.R[:]
|
||||
self.R = self.__permutate(Des.__expansion_table, self.R)
|
||||
self.R = [x ^ y for x,y in zip(self.R, self.Kn[iteration])]
|
||||
B = [self.R[:6], self.R[6:12], self.R[12:18], self.R[18:24], self.R[24:30], self.R[30:36], self.R[36:42], self.R[42:]]
|
||||
j = 0
|
||||
Bn = [0] * 32
|
||||
pos = 0
|
||||
while j < 8:
|
||||
m = (B[j][0] << 1) + B[j][5]
|
||||
n = (B[j][1] << 3) + (B[j][2] << 2) + (B[j][3] << 1) + B[j][4]
|
||||
v = Des.__sbox[j][(m << 4) + n]
|
||||
Bn[pos] = (v & 8) >> 3
|
||||
Bn[pos + 1] = (v & 4) >> 2
|
||||
Bn[pos + 2] = (v & 2) >> 1
|
||||
Bn[pos + 3] = v & 1
|
||||
pos += 4
|
||||
j += 1
|
||||
self.R = self.__permutate(Des.__p, Bn)
|
||||
self.R = [x ^ y for x, y in zip(self.R, self.L)]
|
||||
self.L = tempR
|
||||
i += 1
|
||||
iteration += iteration_adjustment
|
||||
self.final = self.__permutate(Des.__fp, self.R + self.L)
|
||||
return self.final
|
||||
def crypt(self, data, crypt_type):
|
||||
if not data:
|
||||
return ''
|
||||
if len(data) % self.block_size != 0:
|
||||
if crypt_type == Des.DECRYPT: # Decryption must work on 8 byte blocks
|
||||
raise ValueError("Invalid data length, data must be a multiple of " + str(self.block_size) + " bytes\n.")
|
||||
if not self.getPadding():
|
||||
raise ValueError("Invalid data length, data must be a multiple of " + str(self.block_size) + " bytes\n. Try setting the optional padding character")
|
||||
else:
|
||||
data += (self.block_size - (len(data) % self.block_size)) * self.getPadding()
|
||||
if self.getMode() == CBC:
|
||||
if self.getIV():
|
||||
iv = self.__String_to_BitList(self.getIV())
|
||||
else:
|
||||
raise ValueError("For CBC mode, you must supply the Initial Value (IV) for ciphering")
|
||||
i = 0
|
||||
dict = {}
|
||||
result = []
|
||||
while i < len(data):
|
||||
block = self.__String_to_BitList(data[i:i+8])
|
||||
if self.getMode() == CBC:
|
||||
if crypt_type == Des.ENCRYPT:
|
||||
block = [x ^ y for x, y in zip(block, iv)]
|
||||
processed_block = self.__des_crypt(block, crypt_type)
|
||||
if crypt_type == Des.DECRYPT:
|
||||
processed_block = [x ^ y for x, y in zip(processed_block, iv)]
|
||||
iv = block
|
||||
else:
|
||||
iv = processed_block
|
||||
else:
|
||||
processed_block = self.__des_crypt(block, crypt_type)
|
||||
result.append(self.__BitList_to_String(processed_block))
|
||||
i += 8
|
||||
if crypt_type == Des.DECRYPT and self.getPadding():
|
||||
s = result[-1]
|
||||
while s[-1] == self.getPadding():
|
||||
s = s[:-1]
|
||||
result[-1] = s
|
||||
return ''.join(result)
|
||||
def encrypt(self, data, pad=''):
|
||||
self.__padding = pad
|
||||
return self.crypt(data, Des.ENCRYPT)
|
||||
def decrypt(self, data, pad=''):
|
||||
self.__padding = pad
|
||||
return self.crypt(data, Des.DECRYPT)
|
||||
|
||||
class Sectionizer(object):
|
||||
def __init__(self, filename, ident):
|
||||
self.contents = file(filename, 'rb').read()
|
||||
self.header = self.contents[0:72]
|
||||
self.num_sections, = struct.unpack('>H', self.contents[76:78])
|
||||
if self.header[0x3C:0x3C+8] != ident:
|
||||
raise ValueError('Invalid file format')
|
||||
self.sections = []
|
||||
for i in xrange(self.num_sections):
|
||||
offset, a1,a2,a3,a4 = struct.unpack('>LBBBB', self.contents[78+i*8:78+i*8+8])
|
||||
flags, val = a1, a2<<16|a3<<8|a4
|
||||
self.sections.append( (offset, flags, val) )
|
||||
def loadSection(self, section):
|
||||
if section + 1 == self.num_sections:
|
||||
end_off = len(self.contents)
|
||||
else:
|
||||
end_off = self.sections[section + 1][0]
|
||||
off = self.sections[section][0]
|
||||
return self.contents[off:end_off]
|
||||
|
||||
def sanitizeFileName(s):
|
||||
r = ''
|
||||
for c in s:
|
||||
if c in "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_.-":
|
||||
r += c
|
||||
return r
|
||||
|
||||
def fixKey(key):
|
||||
def fixByte(b):
|
||||
return b ^ ((b ^ (b<<1) ^ (b<<2) ^ (b<<3) ^ (b<<4) ^ (b<<5) ^ (b<<6) ^ (b<<7) ^ 0x80) & 0x80)
|
||||
return "".join([chr(fixByte(ord(a))) for a in key])
|
||||
|
||||
def deXOR(text, sp, table):
|
||||
r=''
|
||||
j = sp
|
||||
for i in xrange(len(text)):
|
||||
r += chr(ord(table[j]) ^ ord(text[i]))
|
||||
j = j + 1
|
||||
if j == len(table):
|
||||
j = 0
|
||||
return r
|
||||
|
||||
class EreaderProcessor(object):
|
||||
def __init__(self, section_reader, username, creditcard):
|
||||
self.section_reader = section_reader
|
||||
data = section_reader(0)
|
||||
version, = struct.unpack('>H', data[0:2])
|
||||
self.version = version
|
||||
logging.info('eReader file format version %s', version)
|
||||
if version != 272 and version != 260 and version != 259:
|
||||
raise ValueError('incorrect eReader version %d (error 1)' % version)
|
||||
data = section_reader(1)
|
||||
self.data = data
|
||||
des = Des(fixKey(data[0:8]))
|
||||
cookie_shuf, cookie_size = struct.unpack('>LL', des.decrypt(data[-8:]))
|
||||
if cookie_shuf < 3 or cookie_shuf > 0x14 or cookie_size < 0xf0 or cookie_size > 0x200:
|
||||
raise ValueError('incorrect eReader version (error 2)')
|
||||
input = des.decrypt(data[-cookie_size:])
|
||||
def unshuff(data, shuf):
|
||||
r = [''] * len(data)
|
||||
j = 0
|
||||
for i in xrange(len(data)):
|
||||
j = (j + shuf) % len(data)
|
||||
r[j] = data[i]
|
||||
assert len("".join(r)) == len(data)
|
||||
return "".join(r)
|
||||
r = unshuff(input[0:-8], cookie_shuf)
|
||||
|
||||
def fixUsername(s):
|
||||
r = ''
|
||||
for c in s.lower():
|
||||
if (c >= 'a' and c <= 'z' or c >= '0' and c <= '9'):
|
||||
r += c
|
||||
return r
|
||||
|
||||
user_key = struct.pack('>LL', binascii.crc32(fixUsername(username)) & 0xffffffff, binascii.crc32(creditcard[-8:])& 0xffffffff)
|
||||
drm_sub_version = struct.unpack('>H', r[0:2])[0]
|
||||
self.num_text_pages = struct.unpack('>H', r[2:4])[0] - 1
|
||||
self.num_image_pages = struct.unpack('>H', r[26:26+2])[0]
|
||||
self.first_image_page = struct.unpack('>H', r[24:24+2])[0]
|
||||
if self.version == 272:
|
||||
self.num_footnote_pages = struct.unpack('>H', r[46:46+2])[0]
|
||||
self.first_footnote_page = struct.unpack('>H', r[44:44+2])[0]
|
||||
self.num_sidebar_pages = struct.unpack('>H', r[38:38+2])[0]
|
||||
self.first_sidebar_page = struct.unpack('>H', r[36:36+2])[0]
|
||||
# self.num_bookinfo_pages = struct.unpack('>H', r[34:34+2])[0]
|
||||
# self.first_bookinfo_page = struct.unpack('>H', r[32:32+2])[0]
|
||||
# self.num_chapter_pages = struct.unpack('>H', r[22:22+2])[0]
|
||||
# self.first_chapter_page = struct.unpack('>H', r[20:20+2])[0]
|
||||
# self.num_link_pages = struct.unpack('>H', r[30:30+2])[0]
|
||||
# self.first_link_page = struct.unpack('>H', r[28:28+2])[0]
|
||||
# self.num_xtextsize_pages = struct.unpack('>H', r[54:54+2])[0]
|
||||
# self.first_xtextsize_page = struct.unpack('>H', r[52:52+2])[0]
|
||||
|
||||
# **before** data record 1 was decrypted and unshuffled, it contained data
|
||||
# to create an XOR table and which is used to fix footnote record 0, link records, chapter records, etc
|
||||
self.xortable_offset = struct.unpack('>H', r[40:40+2])[0]
|
||||
self.xortable_size = struct.unpack('>H', r[42:42+2])[0]
|
||||
self.xortable = self.data[self.xortable_offset:self.xortable_offset + self.xortable_size]
|
||||
else:
|
||||
self.num_footnote_pages = 0
|
||||
self.num_sidebar_pages = 0
|
||||
self.first_footnote_page = -1
|
||||
self.first_sidebar_page = -1
|
||||
# self.num_bookinfo_pages = 0
|
||||
# self.num_chapter_pages = 0
|
||||
# self.num_link_pages = 0
|
||||
# self.num_xtextsize_pages = 0
|
||||
# self.first_bookinfo_page = -1
|
||||
# self.first_chapter_page = -1
|
||||
# self.first_link_page = -1
|
||||
# self.first_xtextsize_page = -1
|
||||
|
||||
logging.debug('self.num_text_pages %d', self.num_text_pages)
|
||||
logging.debug('self.num_footnote_pages %d, self.first_footnote_page %d', self.num_footnote_pages , self.first_footnote_page)
|
||||
logging.debug('self.num_sidebar_pages %d, self.first_sidebar_page %d', self.num_sidebar_pages , self.first_sidebar_page)
|
||||
self.flags = struct.unpack('>L', r[4:8])[0]
|
||||
reqd_flags = (1<<9) | (1<<7) | (1<<10)
|
||||
if (self.flags & reqd_flags) != reqd_flags:
|
||||
print "Flags: 0x%X" % self.flags
|
||||
raise ValueError('incompatible eReader file')
|
||||
des = Des(fixKey(user_key))
|
||||
if version == 259:
|
||||
if drm_sub_version != 7:
|
||||
raise ValueError('incorrect eReader version %d (error 3)' % drm_sub_version)
|
||||
encrypted_key_sha = r[44:44+20]
|
||||
encrypted_key = r[64:64+8]
|
||||
elif version == 260:
|
||||
if drm_sub_version != 13:
|
||||
raise ValueError('incorrect eReader version %d (error 3)' % drm_sub_version)
|
||||
encrypted_key = r[44:44+8]
|
||||
encrypted_key_sha = r[52:52+20]
|
||||
elif version == 272:
|
||||
encrypted_key = r[172:172+8]
|
||||
encrypted_key_sha = r[56:56+20]
|
||||
self.content_key = des.decrypt(encrypted_key)
|
||||
if sha1(self.content_key).digest() != encrypted_key_sha:
|
||||
raise ValueError('Incorrect Name and/or Credit Card')
|
||||
|
||||
def getNumImages(self):
|
||||
return self.num_image_pages
|
||||
|
||||
def getImage(self, i):
|
||||
sect = self.section_reader(self.first_image_page + i)
|
||||
name = sect[4:4+32].strip('\0')
|
||||
data = sect[62:]
|
||||
return sanitizeFileName(name), data
|
||||
|
||||
|
||||
# def getChapterNamePMLOffsetData(self):
|
||||
# cv = ''
|
||||
# if self.num_chapter_pages > 0:
|
||||
# for i in xrange(self.num_chapter_pages):
|
||||
# chaps = self.section_reader(self.first_chapter_page + i)
|
||||
# j = i % self.xortable_size
|
||||
# offname = deXOR(chaps, j, self.xortable)
|
||||
# offset = struct.unpack('>L', offname[0:4])[0]
|
||||
# name = offname[4:].strip('\0')
|
||||
# cv += '%d|%s\n' % (offset, name)
|
||||
# return cv
|
||||
|
||||
# def getLinkNamePMLOffsetData(self):
|
||||
# lv = ''
|
||||
# if self.num_link_pages > 0:
|
||||
# for i in xrange(self.num_link_pages):
|
||||
# links = self.section_reader(self.first_link_page + i)
|
||||
# j = i % self.xortable_size
|
||||
# offname = deXOR(links, j, self.xortable)
|
||||
# offset = struct.unpack('>L', offname[0:4])[0]
|
||||
# name = offname[4:].strip('\0')
|
||||
# lv += '%d|%s\n' % (offset, name)
|
||||
# return lv
|
||||
|
||||
# def getExpandedTextSizesData(self):
|
||||
# ts = ''
|
||||
# if self.num_xtextsize_pages > 0:
|
||||
# tsize = deXOR(self.section_reader(self.first_xtextsize_page), 0, self.xortable)
|
||||
# for i in xrange(self.num_text_pages):
|
||||
# xsize = struct.unpack('>H', tsize[0:2])[0]
|
||||
# ts += "%d\n" % xsize
|
||||
# tsize = tsize[2:]
|
||||
# return ts
|
||||
|
||||
# def getBookInfo(self):
|
||||
# bkinfo = ''
|
||||
# if self.num_bookinfo_pages > 0:
|
||||
# info = self.section_reader(self.first_bookinfo_page)
|
||||
# bkinfo = deXOR(info, 0, self.xortable)
|
||||
# bkinfo = bkinfo.replace('\0','|')
|
||||
# bkinfo += '\n'
|
||||
# return bkinfo
|
||||
|
||||
def getText(self):
|
||||
des = Des(fixKey(self.content_key))
|
||||
r = ''
|
||||
for i in xrange(self.num_text_pages):
|
||||
logging.debug('get page %d', i)
|
||||
r += zlib.decompress(des.decrypt(self.section_reader(1 + i)))
|
||||
|
||||
# now handle footnotes pages
|
||||
if self.num_footnote_pages > 0:
|
||||
r += '\n'
|
||||
# the record 0 of the footnote section must pass through the Xor Table to make it useful
|
||||
sect = self.section_reader(self.first_footnote_page)
|
||||
fnote_ids = deXOR(sect, 0, self.xortable)
|
||||
# the remaining records of the footnote sections need to be decoded with the content_key and zlib inflated
|
||||
des = Des(fixKey(self.content_key))
|
||||
for i in xrange(1,self.num_footnote_pages):
|
||||
logging.debug('get footnotepage %d', i)
|
||||
id_len = ord(fnote_ids[2])
|
||||
id = fnote_ids[3:3+id_len]
|
||||
fmarker = '<footnote id="%s">\n' % id
|
||||
fmarker += zlib.decompress(des.decrypt(self.section_reader(self.first_footnote_page + i)))
|
||||
fmarker += '\n</footnote>\n'
|
||||
r += fmarker
|
||||
fnote_ids = fnote_ids[id_len+4:]
|
||||
|
||||
# now handle sidebar pages
|
||||
if self.num_sidebar_pages > 0:
|
||||
r += '\n'
|
||||
# the record 0 of the sidebar section must pass through the Xor Table to make it useful
|
||||
sect = self.section_reader(self.first_sidebar_page)
|
||||
sbar_ids = deXOR(sect, 0, self.xortable)
|
||||
# the remaining records of the sidebar sections need to be decoded with the content_key and zlib inflated
|
||||
des = Des(fixKey(self.content_key))
|
||||
for i in xrange(1,self.num_sidebar_pages):
|
||||
id_len = ord(sbar_ids[2])
|
||||
id = sbar_ids[3:3+id_len]
|
||||
smarker = '<sidebar id="%s">\n' % id
|
||||
smarker += zlib.decompress(des.decrypt(self.section_reader(self.first_footnote_page + i)))
|
||||
smarker += '\n</sidebar>\n'
|
||||
r += smarker
|
||||
sbar_ids = sbar_ids[id_len+4:]
|
||||
|
||||
return r
|
||||
|
||||
def cleanPML(pml):
|
||||
# Convert special characters to proper PML code. High ASCII start at (\x80, \a128) and go up to (\xff, \a255)
|
||||
pml2 = pml
|
||||
for k in xrange(128,256):
|
||||
badChar = chr(k)
|
||||
pml2 = pml2.replace(badChar, '\\a%03d' % k)
|
||||
return pml2
|
||||
|
||||
def convertEreaderToPml(infile, name, cc, outdir):
|
||||
if not os.path.exists(outdir):
|
||||
os.makedirs(outdir)
|
||||
|
||||
print " Decoding File"
|
||||
sect = Sectionizer(infile, 'PNRdPPrs')
|
||||
er = EreaderProcessor(sect.loadSection, name, cc)
|
||||
|
||||
if er.getNumImages() > 0:
|
||||
print " Extracting images"
|
||||
imagedir = bookname + '_img/'
|
||||
imagedirpath = os.path.join(outdir,imagedir)
|
||||
if not os.path.exists(imagedirpath):
|
||||
os.makedirs(imagedirpath)
|
||||
for i in xrange(er.getNumImages()):
|
||||
name, contents = er.getImage(i)
|
||||
file(os.path.join(imagedirpath, name), 'wb').write(contents)
|
||||
|
||||
print " Extracting pml"
|
||||
pml_string = er.getText()
|
||||
pmlfilename = bookname + ".pml"
|
||||
file(os.path.join(outdir, pmlfilename),'wb').write(cleanPML(pml_string))
|
||||
|
||||
# bkinfo = er.getBookInfo()
|
||||
# if bkinfo != '':
|
||||
# print " Extracting book meta information"
|
||||
# file(os.path.join(outdir, 'bookinfo.txt'),'wb').write(bkinfo)
|
||||
|
||||
|
||||
def usage():
|
||||
print "Converts DRMed eReader books to PML Source"
|
||||
print "Usage:"
|
||||
print " erdr2pml [options] infile.pdb [outdir] \"your name\" credit_card_number "
|
||||
print " "
|
||||
print "Options: "
|
||||
print " -h prints this message"
|
||||
print " --make-pmlz create PMLZ instead of using output directory"
|
||||
print " "
|
||||
print "Note:"
|
||||
print " if ommitted, outdir defaults based on 'infile.pdb'"
|
||||
print " It's enough to enter the last 8 digits of the credit card number"
|
||||
return
|
||||
|
||||
def main(argv=None):
|
||||
global bookname
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "h", ["make-pmlz"])
|
||||
except getopt.GetoptError, err:
|
||||
print str(err)
|
||||
usage()
|
||||
return 1
|
||||
make_pmlz = False
|
||||
zipname = None
|
||||
for o, a in opts:
|
||||
if o == "-h":
|
||||
usage()
|
||||
return 0
|
||||
elif o == "--make-pmlz":
|
||||
make_pmlz = True
|
||||
zipname = ''
|
||||
|
||||
print "eRdr2Pml v%s. Copyright (c) 2009 The Dark Reverser" % __version__
|
||||
|
||||
if len(args)!=3 and len(args)!=4:
|
||||
usage()
|
||||
return 1
|
||||
else:
|
||||
if len(args)==3:
|
||||
infile, name, cc = args[0], args[1], args[2]
|
||||
outdir = infile[:-4] + '_Source'
|
||||
elif len(args)==4:
|
||||
infile, outdir, name, cc = args[0], args[1], args[2], args[3]
|
||||
|
||||
if make_pmlz :
|
||||
# ignore specified outdir, use tempdir instead
|
||||
outdir = tempfile.mkdtemp()
|
||||
|
||||
bookname = os.path.splitext(os.path.basename(infile))[0]
|
||||
|
||||
try:
|
||||
print "Processing..."
|
||||
import time
|
||||
start_time = time.time()
|
||||
convertEreaderToPml(infile, name, cc, outdir)
|
||||
|
||||
if make_pmlz :
|
||||
import zipfile
|
||||
import shutil
|
||||
print " Creating PMLZ file"
|
||||
zipname = infile[:-4] + '.pmlz'
|
||||
myZipFile = zipfile.ZipFile(zipname,'w',zipfile.ZIP_STORED, False)
|
||||
list = os.listdir(outdir)
|
||||
for file in list:
|
||||
localname = file
|
||||
filePath = os.path.join(outdir,file)
|
||||
if os.path.isfile(filePath):
|
||||
myZipFile.write(filePath, localname)
|
||||
elif os.path.isdir(filePath):
|
||||
imageList = os.listdir(filePath)
|
||||
localimgdir = os.path.basename(filePath)
|
||||
for image in imageList:
|
||||
localname = os.path.join(localimgdir,image)
|
||||
imagePath = os.path.join(filePath,image)
|
||||
if os.path.isfile(imagePath):
|
||||
myZipFile.write(imagePath, localname)
|
||||
myZipFile.close()
|
||||
# remove temporary directory
|
||||
shutil.rmtree(outdir)
|
||||
|
||||
end_time = time.time()
|
||||
search_time = end_time - start_time
|
||||
print 'elapsed time: %.2f seconds' % (search_time, )
|
||||
if make_pmlz :
|
||||
print 'output is %s' % zipname
|
||||
else :
|
||||
print 'output in %s' % outdir
|
||||
print "done"
|
||||
except ValueError, e:
|
||||
print "Error: %s" % e
|
||||
return 1
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import cProfile
|
||||
#command = """sys.exit(main())"""
|
||||
#cProfile.runctx( command, globals(), locals(), filename="cprofile.profile" )
|
||||
|
||||
sys.exit(main())
|
||||
@@ -0,0 +1,47 @@
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 41
|
||||
/svn/!svn/ver/70200/psyco/dist/py-support
|
||||
END
|
||||
core.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 49
|
||||
/svn/!svn/ver/70200/psyco/dist/py-support/core.py
|
||||
END
|
||||
support.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 52
|
||||
/svn/!svn/ver/49315/psyco/dist/py-support/support.py
|
||||
END
|
||||
classes.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 52
|
||||
/svn/!svn/ver/35003/psyco/dist/py-support/classes.py
|
||||
END
|
||||
__init__.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 53
|
||||
/svn/!svn/ver/35003/psyco/dist/py-support/__init__.py
|
||||
END
|
||||
logger.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 51
|
||||
/svn/!svn/ver/23284/psyco/dist/py-support/logger.py
|
||||
END
|
||||
kdictproxy.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 55
|
||||
/svn/!svn/ver/35003/psyco/dist/py-support/kdictproxy.py
|
||||
END
|
||||
profiler.py
|
||||
K 25
|
||||
svn:wc:ra_dav:version-url
|
||||
V 53
|
||||
/svn/!svn/ver/70200/psyco/dist/py-support/profiler.py
|
||||
END
|
||||
@@ -0,0 +1,7 @@
|
||||
K 10
|
||||
svn:ignore
|
||||
V 14
|
||||
*~
|
||||
*.pyc
|
||||
*.pyo
|
||||
END
|
||||
266
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/.svn/entries
Normal file
266
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/.svn/entries
Normal file
@@ -0,0 +1,266 @@
|
||||
10
|
||||
|
||||
dir
|
||||
78269
|
||||
http://codespeak.net/svn/psyco/dist/py-support
|
||||
http://codespeak.net/svn
|
||||
|
||||
|
||||
|
||||
2009-12-18T16:35:35.119276Z
|
||||
70200
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
|
||||
|
||||
core.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
3b362177a839893c9e867880b3a7cef3
|
||||
2009-12-18T16:35:35.119276Z
|
||||
70200
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
8144
|
||||
|
||||
support.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
b0551e975d774f2f7f58a29ed4b6b90e
|
||||
2007-12-03T12:27:25.632574Z
|
||||
49315
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
6043
|
||||
|
||||
classes.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
5932ed955198d16ec17285dfb195d341
|
||||
2006-11-26T13:03:26.949973Z
|
||||
35003
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
1440
|
||||
|
||||
__init__.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
219582b5182dfa38a9119d059a71965f
|
||||
2006-11-26T13:03:26.949973Z
|
||||
35003
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
1895
|
||||
|
||||
logger.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
aa21f905df036af43082e1ea2a2561ee
|
||||
2006-02-13T15:02:51.744168Z
|
||||
23284
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
2678
|
||||
|
||||
kdictproxy.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
1c8611748dcee5b29848bf25be3ec473
|
||||
2006-11-26T13:03:26.949973Z
|
||||
35003
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
4369
|
||||
|
||||
profiler.py
|
||||
file
|
||||
|
||||
|
||||
|
||||
|
||||
2010-10-25T15:10:42.000000Z
|
||||
858162366cbc39cd9e249e35e6f510c4
|
||||
2009-12-18T16:35:35.119276Z
|
||||
70200
|
||||
arigo
|
||||
has-props
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
11238
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,9 @@
|
||||
K 13
|
||||
svn:eol-style
|
||||
V 6
|
||||
native
|
||||
K 12
|
||||
svn:keywords
|
||||
V 23
|
||||
Author Date Id Revision
|
||||
END
|
||||
@@ -0,0 +1,54 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco top-level file of the Psyco package.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco -- the Python Specializing Compiler.
|
||||
|
||||
Typical usage: add the following lines to your application's main module,
|
||||
preferably after the other imports:
|
||||
|
||||
try:
|
||||
import psyco
|
||||
psyco.full()
|
||||
except ImportError:
|
||||
print 'Psyco not installed, the program will just run slower'
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
|
||||
#
|
||||
# This module is present to make 'psyco' a package and to
|
||||
# publish the main functions and variables.
|
||||
#
|
||||
# More documentation can be found in core.py.
|
||||
#
|
||||
|
||||
|
||||
# Try to import the dynamic-loading _psyco and report errors
|
||||
try:
|
||||
import _psyco
|
||||
except ImportError, e:
|
||||
extramsg = ''
|
||||
import sys, imp
|
||||
try:
|
||||
file, filename, (suffix, mode, type) = imp.find_module('_psyco', __path__)
|
||||
except ImportError:
|
||||
ext = [suffix for suffix, mode, type in imp.get_suffixes()
|
||||
if type == imp.C_EXTENSION]
|
||||
if ext:
|
||||
extramsg = (" (cannot locate the compiled extension '_psyco%s' "
|
||||
"in the package path '%s')" % (ext[0], '; '.join(__path__)))
|
||||
else:
|
||||
extramsg = (" (check that the compiled extension '%s' is for "
|
||||
"the correct Python version; this is Python %s)" %
|
||||
(filename, sys.version.split()[0]))
|
||||
raise ImportError, str(e) + extramsg
|
||||
|
||||
# Publish important data by importing them in the package
|
||||
from support import __version__, error, warning, _getrealframe, _getemulframe
|
||||
from support import version_info, __version__ as hexversion
|
||||
from core import full, profile, background, runonly, stop, cannotcompile
|
||||
from core import log, bind, unbind, proxy, unproxy, dumpcodebuf
|
||||
from _psyco import setfilter
|
||||
from _psyco import compact, compacttype
|
||||
@@ -0,0 +1,42 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco class support module.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco class support module.
|
||||
|
||||
'psyco.classes.psyobj' is an alternate Psyco-optimized root for classes.
|
||||
Any class inheriting from it or using the metaclass '__metaclass__' might
|
||||
get optimized specifically for Psyco. It is equivalent to call
|
||||
psyco.bind() on the class object after its creation.
|
||||
|
||||
Importing everything from psyco.classes in a module will import the
|
||||
'__metaclass__' name, so all classes defined after a
|
||||
|
||||
from psyco.classes import *
|
||||
|
||||
will automatically use the Psyco-optimized metaclass.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
__all__ = ['psyobj', 'psymetaclass', '__metaclass__']
|
||||
|
||||
|
||||
from _psyco import compacttype
|
||||
import core
|
||||
from types import FunctionType
|
||||
|
||||
class psymetaclass(compacttype):
|
||||
"Psyco-optimized meta-class. Turns all methods into Psyco proxies."
|
||||
|
||||
def __new__(cls, name, bases, dict):
|
||||
bindlist = dict.get('__psyco__bind__')
|
||||
if bindlist is None:
|
||||
bindlist = [key for key, value in dict.items()
|
||||
if isinstance(value, FunctionType)]
|
||||
for attr in bindlist:
|
||||
dict[attr] = core.proxy(dict[attr])
|
||||
return super(psymetaclass, cls).__new__(cls, name, bases, dict)
|
||||
|
||||
psyobj = psymetaclass("psyobj", (), {})
|
||||
__metaclass__ = psymetaclass
|
||||
@@ -0,0 +1,231 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco main functions.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco main functions.
|
||||
|
||||
Here are the routines that you can use from your applications.
|
||||
These are mostly interfaces to the C core, but they depend on
|
||||
the Python version.
|
||||
|
||||
You can use these functions from the 'psyco' module instead of
|
||||
'psyco.core', e.g.
|
||||
|
||||
import psyco
|
||||
psyco.log('/tmp/psyco.log')
|
||||
psyco.profile()
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import _psyco
|
||||
import types
|
||||
from support import *
|
||||
|
||||
newfunction = types.FunctionType
|
||||
newinstancemethod = types.MethodType
|
||||
|
||||
|
||||
# Default charge profiler values
|
||||
default_watermark = 0.09 # between 0.0 (0%) and 1.0 (100%)
|
||||
default_halflife = 0.5 # seconds
|
||||
default_pollfreq_profile = 20 # Hz
|
||||
default_pollfreq_background = 100 # Hz -- a maximum for sleep's resolution
|
||||
default_parentframe = 0.25 # should not be more than 0.5 (50%)
|
||||
|
||||
|
||||
def full(memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Compile as much as possible.
|
||||
|
||||
Typical use is for small scripts performing intensive computations
|
||||
or string handling."""
|
||||
import profiler
|
||||
p = profiler.FullCompiler()
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def profile(watermark = default_watermark,
|
||||
halflife = default_halflife,
|
||||
pollfreq = default_pollfreq_profile,
|
||||
parentframe = default_parentframe,
|
||||
memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Turn on profiling.
|
||||
|
||||
The 'watermark' parameter controls how easily running functions will
|
||||
be compiled. The smaller the value, the more functions are compiled."""
|
||||
import profiler
|
||||
p = profiler.ActivePassiveProfiler(watermark, halflife,
|
||||
pollfreq, parentframe)
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def background(watermark = default_watermark,
|
||||
halflife = default_halflife,
|
||||
pollfreq = default_pollfreq_background,
|
||||
parentframe = default_parentframe,
|
||||
memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Turn on passive profiling.
|
||||
|
||||
This is a very lightweight mode in which only intensively computing
|
||||
functions can be detected. The smaller the 'watermark', the more functions
|
||||
are compiled."""
|
||||
import profiler
|
||||
p = profiler.PassiveProfiler(watermark, halflife, pollfreq, parentframe)
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def runonly(memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Nonprofiler.
|
||||
|
||||
XXX check if this is useful and document."""
|
||||
import profiler
|
||||
p = profiler.RunOnly()
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def stop():
|
||||
"""Turn off all automatic compilation. bind() calls remain in effect."""
|
||||
import profiler
|
||||
profiler.go([])
|
||||
|
||||
|
||||
def log(logfile='', mode='w', top=10):
|
||||
"""Enable logging to the given file.
|
||||
|
||||
If the file name is unspecified, a default name is built by appending
|
||||
a 'log-psyco' extension to the main script name.
|
||||
|
||||
Mode is 'a' to append to a possibly existing file or 'w' to overwrite
|
||||
an existing file. Note that the log file may grow quickly in 'a' mode."""
|
||||
import profiler, logger
|
||||
if not logfile:
|
||||
import os
|
||||
logfile, dummy = os.path.splitext(sys.argv[0])
|
||||
if os.path.basename(logfile):
|
||||
logfile += '.'
|
||||
logfile += 'log-psyco'
|
||||
if hasattr(_psyco, 'VERBOSE_LEVEL'):
|
||||
print >> sys.stderr, 'psyco: logging to', logfile
|
||||
# logger.current should be a real file object; subtle problems
|
||||
# will show up if its write() and flush() methods are written
|
||||
# in Python, as Psyco will invoke them while compiling.
|
||||
logger.current = open(logfile, mode)
|
||||
logger.print_charges = top
|
||||
profiler.logger = logger
|
||||
logger.writedate('Logging started')
|
||||
cannotcompile(logger.psycowrite)
|
||||
_psyco.statwrite(logger=logger.psycowrite)
|
||||
|
||||
|
||||
def bind(x, rec=None):
|
||||
"""Enable compilation of the given function, method, or class object.
|
||||
|
||||
If C is a class (or anything with a '__dict__' attribute), bind(C) will
|
||||
rebind all functions and methods found in C.__dict__ (which means, for
|
||||
classes, all methods defined in the class but not in its parents).
|
||||
|
||||
The optional second argument specifies the number of recursive
|
||||
compilation levels: all functions called by func are compiled
|
||||
up to the given depth of indirection."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
if rec is None:
|
||||
x.func_code = _psyco.proxycode(x)
|
||||
else:
|
||||
x.func_code = _psyco.proxycode(x, rec)
|
||||
return
|
||||
if hasattr(x, '__dict__'):
|
||||
funcs = [o for o in x.__dict__.values()
|
||||
if isinstance(o, types.MethodType)
|
||||
or isinstance(o, types.FunctionType)]
|
||||
if not funcs:
|
||||
raise error, ("nothing bindable found in %s object" %
|
||||
type(x).__name__)
|
||||
for o in funcs:
|
||||
bind(o, rec)
|
||||
return
|
||||
raise TypeError, "cannot bind %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def unbind(x):
|
||||
"""Reverse of bind()."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
try:
|
||||
f = _psyco.unproxycode(x.func_code)
|
||||
except error:
|
||||
pass
|
||||
else:
|
||||
x.func_code = f.func_code
|
||||
return
|
||||
if hasattr(x, '__dict__'):
|
||||
for o in x.__dict__.values():
|
||||
if (isinstance(o, types.MethodType)
|
||||
or isinstance(o, types.FunctionType)):
|
||||
unbind(o)
|
||||
return
|
||||
raise TypeError, "cannot unbind %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def proxy(x, rec=None):
|
||||
"""Return a Psyco-enabled copy of the function.
|
||||
|
||||
The original function is still available for non-compiled calls.
|
||||
The optional second argument specifies the number of recursive
|
||||
compilation levels: all functions called by func are compiled
|
||||
up to the given depth of indirection."""
|
||||
if isinstance(x, types.FunctionType):
|
||||
if rec is None:
|
||||
code = _psyco.proxycode(x)
|
||||
else:
|
||||
code = _psyco.proxycode(x, rec)
|
||||
return newfunction(code, x.func_globals, x.func_name)
|
||||
if isinstance(x, types.MethodType):
|
||||
p = proxy(x.im_func, rec)
|
||||
return newinstancemethod(p, x.im_self, x.im_class)
|
||||
raise TypeError, "cannot proxy %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def unproxy(proxy):
|
||||
"""Return a new copy of the original function of method behind a proxy.
|
||||
The result behaves like the original function in that calling it
|
||||
does not trigger compilation nor execution of any compiled code."""
|
||||
if isinstance(proxy, types.FunctionType):
|
||||
return _psyco.unproxycode(proxy.func_code)
|
||||
if isinstance(proxy, types.MethodType):
|
||||
f = unproxy(proxy.im_func)
|
||||
return newinstancemethod(f, proxy.im_self, proxy.im_class)
|
||||
raise TypeError, "%s objects cannot be proxies" % type(proxy).__name__
|
||||
|
||||
|
||||
def cannotcompile(x):
|
||||
"""Instruct Psyco never to compile the given function, method
|
||||
or code object."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
x = x.func_code
|
||||
if isinstance(x, types.CodeType):
|
||||
_psyco.cannotcompile(x)
|
||||
else:
|
||||
raise TypeError, "unexpected %s object" % type(x).__name__
|
||||
|
||||
|
||||
def dumpcodebuf():
|
||||
"""Write in file psyco.dump a copy of the emitted machine code,
|
||||
provided Psyco was compiled with a non-zero CODE_DUMP.
|
||||
See py-utils/httpxam.py to examine psyco.dump."""
|
||||
if hasattr(_psyco, 'dumpcodebuf'):
|
||||
_psyco.dumpcodebuf()
|
||||
|
||||
|
||||
###########################################################################
|
||||
# Psyco variables
|
||||
# error * the error raised by Psyco
|
||||
# warning * the warning raised by Psyco
|
||||
# __in_psyco__ * a new built-in variable which is always zero, but which
|
||||
# Psyco special-cases by returning 1 instead. So
|
||||
# __in_psyco__ can be used in a function to know if
|
||||
# that function is being executed by Psyco or not.
|
||||
@@ -0,0 +1,133 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Support code for the 'psyco.compact' type.
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
try:
|
||||
from UserDict import DictMixin
|
||||
except ImportError:
|
||||
|
||||
# backported from Python 2.3 to Python 2.2
|
||||
class DictMixin:
|
||||
# Mixin defining all dictionary methods for classes that already have
|
||||
# a minimum dictionary interface including getitem, setitem, delitem,
|
||||
# and keys. Without knowledge of the subclass constructor, the mixin
|
||||
# does not define __init__() or copy(). In addition to the four base
|
||||
# methods, progressively more efficiency comes with defining
|
||||
# __contains__(), __iter__(), and iteritems().
|
||||
|
||||
# second level definitions support higher levels
|
||||
def __iter__(self):
|
||||
for k in self.keys():
|
||||
yield k
|
||||
def has_key(self, key):
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
return False
|
||||
return True
|
||||
def __contains__(self, key):
|
||||
return self.has_key(key)
|
||||
|
||||
# third level takes advantage of second level definitions
|
||||
def iteritems(self):
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
def iterkeys(self):
|
||||
return self.__iter__()
|
||||
|
||||
# fourth level uses definitions from lower levels
|
||||
def itervalues(self):
|
||||
for _, v in self.iteritems():
|
||||
yield v
|
||||
def values(self):
|
||||
return [v for _, v in self.iteritems()]
|
||||
def items(self):
|
||||
return list(self.iteritems())
|
||||
def clear(self):
|
||||
for key in self.keys():
|
||||
del self[key]
|
||||
def setdefault(self, key, default):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = default
|
||||
return default
|
||||
def pop(self, key, *args):
|
||||
if len(args) > 1:
|
||||
raise TypeError, "pop expected at most 2 arguments, got "\
|
||||
+ repr(1 + len(args))
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
if args:
|
||||
return args[0]
|
||||
raise
|
||||
del self[key]
|
||||
return value
|
||||
def popitem(self):
|
||||
try:
|
||||
k, v = self.iteritems().next()
|
||||
except StopIteration:
|
||||
raise KeyError, 'container is empty'
|
||||
del self[k]
|
||||
return (k, v)
|
||||
def update(self, other):
|
||||
# Make progressively weaker assumptions about "other"
|
||||
if hasattr(other, 'iteritems'): # iteritems saves memory and lookups
|
||||
for k, v in other.iteritems():
|
||||
self[k] = v
|
||||
elif hasattr(other, '__iter__'): # iter saves memory
|
||||
for k in other:
|
||||
self[k] = other[k]
|
||||
else:
|
||||
for k in other.keys():
|
||||
self[k] = other[k]
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
def __repr__(self):
|
||||
return repr(dict(self.iteritems()))
|
||||
def __cmp__(self, other):
|
||||
if other is None:
|
||||
return 1
|
||||
if isinstance(other, DictMixin):
|
||||
other = dict(other.iteritems())
|
||||
return cmp(dict(self.iteritems()), other)
|
||||
def __len__(self):
|
||||
return len(self.keys())
|
||||
|
||||
###########################################################################
|
||||
|
||||
from _psyco import compact
|
||||
|
||||
|
||||
class compactdictproxy(DictMixin):
|
||||
|
||||
def __init__(self, ko):
|
||||
self._ko = ko # compact object of which 'self' is the dict
|
||||
|
||||
def __getitem__(self, key):
|
||||
return compact.__getslot__(self._ko, key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
compact.__setslot__(self._ko, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
compact.__delslot__(self._ko, key)
|
||||
|
||||
def keys(self):
|
||||
return compact.__members__.__get__(self._ko)
|
||||
|
||||
def clear(self):
|
||||
keys = self.keys()
|
||||
keys.reverse()
|
||||
for key in keys:
|
||||
del self[key]
|
||||
|
||||
def __repr__(self):
|
||||
keys = ', '.join(self.keys())
|
||||
return '<compactdictproxy object {%s}>' % (keys,)
|
||||
@@ -0,0 +1,96 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco logger.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco logger.
|
||||
|
||||
See log() in core.py.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
|
||||
import _psyco
|
||||
from time import time, localtime, strftime
|
||||
|
||||
|
||||
current = None
|
||||
print_charges = 10
|
||||
dump_delay = 0.2
|
||||
dump_last = 0.0
|
||||
|
||||
def write(s, level):
|
||||
t = time()
|
||||
f = t-int(t)
|
||||
try:
|
||||
current.write("%s.%02d %-*s%s\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0), 63-level, s,
|
||||
"%"*level))
|
||||
current.flush()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
def psycowrite(s):
|
||||
t = time()
|
||||
f = t-int(t)
|
||||
try:
|
||||
current.write("%s.%02d %-*s%s\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0), 60, s.strip(),
|
||||
"% %"))
|
||||
current.flush()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
##def writelines(lines, level=0):
|
||||
## if lines:
|
||||
## t = time()
|
||||
## f = t-int(t)
|
||||
## timedesc = strftime("%x %X", localtime(int(t)))
|
||||
## print >> current, "%s.%03d %-*s %s" % (
|
||||
## timedesc, int(f*1000),
|
||||
## 50-level, lines[0],
|
||||
## "+"*level)
|
||||
## timedesc = " " * (len(timedesc)+5)
|
||||
## for line in lines[1:]:
|
||||
## print >> current, timedesc, line
|
||||
|
||||
def writememory():
|
||||
write("memory usage: %d+ kb" % _psyco.memory(), 1)
|
||||
|
||||
def dumpcharges():
|
||||
global dump_last
|
||||
if print_charges:
|
||||
t = time()
|
||||
if not (dump_last <= t < dump_last+dump_delay):
|
||||
if t <= dump_last+1.5*dump_delay:
|
||||
dump_last += dump_delay
|
||||
else:
|
||||
dump_last = t
|
||||
#write("%s: charges:" % who, 0)
|
||||
lst = _psyco.stattop(print_charges)
|
||||
if lst:
|
||||
f = t-int(t)
|
||||
lines = ["%s.%02d ______\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0))]
|
||||
i = 1
|
||||
for co, charge in lst:
|
||||
detail = co.co_filename
|
||||
if len(detail) > 19:
|
||||
detail = '...' + detail[-17:]
|
||||
lines.append(" #%-3d |%4.1f %%| %-26s%20s:%d\n" %
|
||||
(i, charge*100.0, co.co_name, detail,
|
||||
co.co_firstlineno))
|
||||
i += 1
|
||||
current.writelines(lines)
|
||||
current.flush()
|
||||
|
||||
def writefinalstats():
|
||||
dumpcharges()
|
||||
writememory()
|
||||
writedate("program exit")
|
||||
|
||||
def writedate(msg):
|
||||
write('%s, %s' % (msg, strftime("%x")), 20)
|
||||
@@ -0,0 +1,379 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco profiler (Python part).
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco profiler (Python part).
|
||||
|
||||
The implementation of the non-time-critical parts of the profiler.
|
||||
See profile() and full() in core.py for the easy interface.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import _psyco
|
||||
from support import *
|
||||
import math, time, types, atexit
|
||||
now = time.time
|
||||
try:
|
||||
import thread
|
||||
except ImportError:
|
||||
import dummy_thread as thread
|
||||
|
||||
|
||||
# current profiler instance
|
||||
current = None
|
||||
|
||||
# enabled profilers, in order of priority
|
||||
profilers = []
|
||||
|
||||
# logger module (when enabled by core.log())
|
||||
logger = None
|
||||
|
||||
# a lock for a thread-safe go()
|
||||
go_lock = thread.allocate_lock()
|
||||
|
||||
def go(stop=0):
|
||||
# run the highest-priority profiler in 'profilers'
|
||||
global current
|
||||
go_lock.acquire()
|
||||
try:
|
||||
prev = current
|
||||
if stop:
|
||||
del profilers[:]
|
||||
if prev:
|
||||
if profilers and profilers[0] is prev:
|
||||
return # best profiler already running
|
||||
prev.stop()
|
||||
current = None
|
||||
for p in profilers[:]:
|
||||
if p.start():
|
||||
current = p
|
||||
if logger: # and p is not prev:
|
||||
logger.write("%s: starting" % p.__class__.__name__, 5)
|
||||
return
|
||||
finally:
|
||||
go_lock.release()
|
||||
# no profiler is running now
|
||||
if stop:
|
||||
if logger:
|
||||
logger.writefinalstats()
|
||||
else:
|
||||
tag2bind()
|
||||
|
||||
atexit.register(go, 1)
|
||||
|
||||
|
||||
def buildfncache(globals, cache):
|
||||
if hasattr(types.IntType, '__dict__'):
|
||||
clstypes = (types.ClassType, types.TypeType)
|
||||
else:
|
||||
clstypes = types.ClassType
|
||||
for x in globals.values():
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
cache[x.func_code] = x, ''
|
||||
elif isinstance(x, clstypes):
|
||||
for y in x.__dict__.values():
|
||||
if isinstance(y, types.MethodType):
|
||||
y = y.im_func
|
||||
if isinstance(y, types.FunctionType):
|
||||
cache[y.func_code] = y, x.__name__
|
||||
|
||||
# code-to-function mapping (cache)
|
||||
function_cache = {}
|
||||
|
||||
def trytobind(co, globals, log=1):
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
buildfncache(globals, function_cache)
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
if logger:
|
||||
logger.write('warning: cannot find function %s in %s' %
|
||||
(co.co_name, globals.get('__name__', '?')), 3)
|
||||
return # give up
|
||||
if logger and log:
|
||||
modulename = globals.get('__name__', '?')
|
||||
if clsname:
|
||||
modulename += '.' + clsname
|
||||
logger.write('bind function: %s.%s' % (modulename, co.co_name), 1)
|
||||
f.func_code = _psyco.proxycode(f)
|
||||
|
||||
|
||||
# the list of code objects that have been tagged
|
||||
tagged_codes = []
|
||||
|
||||
def tag(co, globals):
|
||||
if logger:
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
buildfncache(globals, function_cache)
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
clsname = '' # give up
|
||||
modulename = globals.get('__name__', '?')
|
||||
if clsname:
|
||||
modulename += '.' + clsname
|
||||
logger.write('tag function: %s.%s' % (modulename, co.co_name), 1)
|
||||
tagged_codes.append((co, globals))
|
||||
_psyco.turbo_frame(co)
|
||||
_psyco.turbo_code(co)
|
||||
|
||||
def tag2bind():
|
||||
if tagged_codes:
|
||||
if logger:
|
||||
logger.write('profiling stopped, binding %d functions' %
|
||||
len(tagged_codes), 2)
|
||||
for co, globals in tagged_codes:
|
||||
trytobind(co, globals, 0)
|
||||
function_cache.clear()
|
||||
del tagged_codes[:]
|
||||
|
||||
|
||||
class Profiler:
|
||||
MemoryTimerResolution = 0.103
|
||||
|
||||
def run(self, memory, time, memorymax, timemax):
|
||||
self.memory = memory
|
||||
self.memorymax = memorymax
|
||||
self.time = time
|
||||
if timemax is None:
|
||||
self.endtime = None
|
||||
else:
|
||||
self.endtime = now() + timemax
|
||||
self.alarms = []
|
||||
profilers.append(self)
|
||||
go()
|
||||
|
||||
def start(self):
|
||||
curmem = _psyco.memory()
|
||||
memlimits = []
|
||||
if self.memorymax is not None:
|
||||
if curmem >= self.memorymax:
|
||||
if logger:
|
||||
logger.writememory()
|
||||
return self.limitreached('memorymax')
|
||||
memlimits.append(self.memorymax)
|
||||
if self.memory is not None:
|
||||
if self.memory <= 0:
|
||||
if logger:
|
||||
logger.writememory()
|
||||
return self.limitreached('memory')
|
||||
memlimits.append(curmem + self.memory)
|
||||
self.memory_at_start = curmem
|
||||
|
||||
curtime = now()
|
||||
timelimits = []
|
||||
if self.endtime is not None:
|
||||
if curtime >= self.endtime:
|
||||
return self.limitreached('timemax')
|
||||
timelimits.append(self.endtime - curtime)
|
||||
if self.time is not None:
|
||||
if self.time <= 0.0:
|
||||
return self.limitreached('time')
|
||||
timelimits.append(self.time)
|
||||
self.time_at_start = curtime
|
||||
|
||||
try:
|
||||
self.do_start()
|
||||
except error, e:
|
||||
if logger:
|
||||
logger.write('%s: disabled by psyco.error:' % (
|
||||
self.__class__.__name__), 4)
|
||||
logger.write(' %s' % str(e), 3)
|
||||
return 0
|
||||
|
||||
if memlimits:
|
||||
self.memlimits_args = (time.sleep, (self.MemoryTimerResolution,),
|
||||
self.check_memory, (min(memlimits),))
|
||||
self.alarms.append(_psyco.alarm(*self.memlimits_args))
|
||||
if timelimits:
|
||||
self.alarms.append(_psyco.alarm(time.sleep, (min(timelimits),),
|
||||
self.time_out))
|
||||
return 1
|
||||
|
||||
def stop(self):
|
||||
for alarm in self.alarms:
|
||||
alarm.stop(0)
|
||||
for alarm in self.alarms:
|
||||
alarm.stop(1) # wait for parallel threads to stop
|
||||
del self.alarms[:]
|
||||
if self.time is not None:
|
||||
self.time -= now() - self.time_at_start
|
||||
if self.memory is not None:
|
||||
self.memory -= _psyco.memory() - self.memory_at_start
|
||||
|
||||
try:
|
||||
self.do_stop()
|
||||
except error:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def check_memory(self, limit):
|
||||
if _psyco.memory() < limit:
|
||||
return self.memlimits_args
|
||||
go()
|
||||
|
||||
def time_out(self):
|
||||
self.time = 0.0
|
||||
go()
|
||||
|
||||
def limitreached(self, limitname):
|
||||
try:
|
||||
profilers.remove(self)
|
||||
except ValueError:
|
||||
pass
|
||||
if logger:
|
||||
logger.write('%s: disabled (%s limit reached)' % (
|
||||
self.__class__.__name__, limitname), 4)
|
||||
return 0
|
||||
|
||||
|
||||
class FullCompiler(Profiler):
|
||||
|
||||
def do_start(self):
|
||||
_psyco.profiling('f')
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
|
||||
|
||||
class RunOnly(Profiler):
|
||||
|
||||
def do_start(self):
|
||||
_psyco.profiling('n')
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
|
||||
|
||||
class ChargeProfiler(Profiler):
|
||||
|
||||
def __init__(self, watermark, parentframe):
|
||||
self.watermark = watermark
|
||||
self.parent2 = parentframe * 2.0
|
||||
self.lock = thread.allocate_lock()
|
||||
|
||||
def init_charges(self):
|
||||
_psyco.statwrite(watermark = self.watermark,
|
||||
parent2 = self.parent2)
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
_psyco.statwrite(callback = None)
|
||||
|
||||
|
||||
class ActiveProfiler(ChargeProfiler):
|
||||
|
||||
def active_start(self):
|
||||
_psyco.profiling('p')
|
||||
|
||||
def do_start(self):
|
||||
self.init_charges()
|
||||
self.active_start()
|
||||
_psyco.statwrite(callback = self.charge_callback)
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
tag(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
class PassiveProfiler(ChargeProfiler):
|
||||
|
||||
initial_charge_unit = _psyco.statread('unit')
|
||||
reset_stats_after = 120 # half-lives (maximum 200!)
|
||||
reset_limit = initial_charge_unit * (2.0 ** reset_stats_after)
|
||||
|
||||
def __init__(self, watermark, halflife, pollfreq, parentframe):
|
||||
ChargeProfiler.__init__(self, watermark, parentframe)
|
||||
self.pollfreq = pollfreq
|
||||
# self.progress is slightly more than 1.0, and computed so that
|
||||
# do_profile() will double the change_unit every 'halflife' seconds.
|
||||
self.progress = 2.0 ** (1.0 / (halflife * pollfreq))
|
||||
|
||||
def reset(self):
|
||||
_psyco.statwrite(unit = self.initial_charge_unit, callback = None)
|
||||
_psyco.statreset()
|
||||
if logger:
|
||||
logger.write("%s: resetting stats" % self.__class__.__name__, 1)
|
||||
|
||||
def passive_start(self):
|
||||
self.passivealarm_args = (time.sleep, (1.0 / self.pollfreq,),
|
||||
self.do_profile)
|
||||
self.alarms.append(_psyco.alarm(*self.passivealarm_args))
|
||||
|
||||
def do_start(self):
|
||||
tag2bind()
|
||||
self.init_charges()
|
||||
self.passive_start()
|
||||
|
||||
def do_profile(self):
|
||||
_psyco.statcollect()
|
||||
if logger:
|
||||
logger.dumpcharges()
|
||||
nunit = _psyco.statread('unit') * self.progress
|
||||
if nunit > self.reset_limit:
|
||||
self.reset()
|
||||
else:
|
||||
_psyco.statwrite(unit = nunit, callback = self.charge_callback)
|
||||
return self.passivealarm_args
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
trytobind(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
class ActivePassiveProfiler(PassiveProfiler, ActiveProfiler):
|
||||
|
||||
def do_start(self):
|
||||
self.init_charges()
|
||||
self.active_start()
|
||||
self.passive_start()
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
tag(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
|
||||
#
|
||||
# we register our own version of sys.settrace(), sys.setprofile()
|
||||
# and thread.start_new_thread().
|
||||
#
|
||||
|
||||
def psyco_settrace(*args, **kw):
|
||||
"This is the Psyco-aware version of sys.settrace()."
|
||||
result = original_settrace(*args, **kw)
|
||||
go()
|
||||
return result
|
||||
|
||||
def psyco_setprofile(*args, **kw):
|
||||
"This is the Psyco-aware version of sys.setprofile()."
|
||||
result = original_setprofile(*args, **kw)
|
||||
go()
|
||||
return result
|
||||
|
||||
def psyco_thread_stub(callable, args, kw):
|
||||
_psyco.statcollect()
|
||||
if kw is None:
|
||||
return callable(*args)
|
||||
else:
|
||||
return callable(*args, **kw)
|
||||
|
||||
def psyco_start_new_thread(callable, args, kw=None):
|
||||
"This is the Psyco-aware version of thread.start_new_thread()."
|
||||
return original_start_new_thread(psyco_thread_stub, (callable, args, kw))
|
||||
|
||||
original_settrace = sys.settrace
|
||||
original_setprofile = sys.setprofile
|
||||
original_start_new_thread = thread.start_new_thread
|
||||
sys.settrace = psyco_settrace
|
||||
sys.setprofile = psyco_setprofile
|
||||
thread.start_new_thread = psyco_start_new_thread
|
||||
# hack to patch threading._start_new_thread if the module is
|
||||
# already loaded
|
||||
if ('threading' in sys.modules and
|
||||
hasattr(sys.modules['threading'], '_start_new_thread')):
|
||||
sys.modules['threading']._start_new_thread = psyco_start_new_thread
|
||||
@@ -0,0 +1,191 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco general support module.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco general support module.
|
||||
|
||||
For internal use.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import sys, _psyco, __builtin__
|
||||
|
||||
error = _psyco.error
|
||||
class warning(Warning):
|
||||
pass
|
||||
|
||||
_psyco.NoLocalsWarning = warning
|
||||
|
||||
def warn(msg):
|
||||
from warnings import warn
|
||||
warn(msg, warning, stacklevel=2)
|
||||
|
||||
#
|
||||
# Version checks
|
||||
#
|
||||
__version__ = 0x010600f0
|
||||
if _psyco.PSYVER != __version__:
|
||||
raise error, "version mismatch between Psyco parts, reinstall it"
|
||||
|
||||
version_info = (__version__ >> 24,
|
||||
(__version__ >> 16) & 0xff,
|
||||
(__version__ >> 8) & 0xff,
|
||||
{0xa0: 'alpha',
|
||||
0xb0: 'beta',
|
||||
0xc0: 'candidate',
|
||||
0xf0: 'final'}[__version__ & 0xf0],
|
||||
__version__ & 0xf)
|
||||
|
||||
|
||||
VERSION_LIMITS = [0x02020200, # 2.2.2
|
||||
0x02030000, # 2.3
|
||||
0x02040000] # 2.4
|
||||
|
||||
if ([v for v in VERSION_LIMITS if v <= sys.hexversion] !=
|
||||
[v for v in VERSION_LIMITS if v <= _psyco.PYVER ]):
|
||||
if sys.hexversion < VERSION_LIMITS[0]:
|
||||
warn("Psyco requires Python version 2.2.2 or later")
|
||||
else:
|
||||
warn("Psyco version does not match Python version. "
|
||||
"Psyco must be updated or recompiled")
|
||||
|
||||
|
||||
if hasattr(_psyco, 'ALL_CHECKS') and hasattr(_psyco, 'VERBOSE_LEVEL'):
|
||||
print >> sys.stderr, ('psyco: running in debugging mode on %s' %
|
||||
_psyco.PROCESSOR)
|
||||
|
||||
|
||||
###########################################################################
|
||||
# sys._getframe() gives strange results on a mixed Psyco- and Python-style
|
||||
# stack frame. Psyco provides a replacement that partially emulates Python
|
||||
# frames from Psyco frames. The new sys._getframe() may return objects of
|
||||
# a custom "Psyco frame" type, which is a subtype of the normal frame type.
|
||||
#
|
||||
# The same problems require some other built-in functions to be replaced
|
||||
# as well. Note that the local variables are not available in any
|
||||
# dictionary with Psyco.
|
||||
|
||||
|
||||
class Frame:
|
||||
pass
|
||||
|
||||
|
||||
class PythonFrame(Frame):
|
||||
|
||||
def __init__(self, frame):
|
||||
self.__dict__.update({
|
||||
'_frame': frame,
|
||||
})
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'f_back':
|
||||
try:
|
||||
result = embedframe(_psyco.getframe(self._frame))
|
||||
except ValueError:
|
||||
result = None
|
||||
except error:
|
||||
warn("f_back is skipping dead Psyco frames")
|
||||
result = self._frame.f_back
|
||||
self.__dict__['f_back'] = result
|
||||
return result
|
||||
else:
|
||||
return getattr(self._frame, attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
setattr(self._frame, attr, value)
|
||||
|
||||
def __delattr__(self, attr):
|
||||
delattr(self._frame, attr)
|
||||
|
||||
|
||||
class PsycoFrame(Frame):
|
||||
|
||||
def __init__(self, tag):
|
||||
self.__dict__.update({
|
||||
'_tag' : tag,
|
||||
'f_code' : tag[0],
|
||||
'f_globals': tag[1],
|
||||
})
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'f_back':
|
||||
try:
|
||||
result = embedframe(_psyco.getframe(self._tag))
|
||||
except ValueError:
|
||||
result = None
|
||||
elif attr == 'f_lineno':
|
||||
result = self.f_code.co_firstlineno # better than nothing
|
||||
elif attr == 'f_builtins':
|
||||
result = self.f_globals['__builtins__']
|
||||
elif attr == 'f_restricted':
|
||||
result = self.f_builtins is not __builtins__
|
||||
elif attr == 'f_locals':
|
||||
raise AttributeError, ("local variables of functions run by Psyco "
|
||||
"cannot be accessed in any way, sorry")
|
||||
else:
|
||||
raise AttributeError, ("emulated Psyco frames have "
|
||||
"no '%s' attribute" % attr)
|
||||
self.__dict__[attr] = result
|
||||
return result
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
raise AttributeError, "Psyco frame objects are read-only"
|
||||
|
||||
def __delattr__(self, attr):
|
||||
if attr == 'f_trace':
|
||||
# for bdb which relies on CPython frames exhibiting a slightly
|
||||
# buggy behavior: you can 'del f.f_trace' as often as you like
|
||||
# even without having set it previously.
|
||||
return
|
||||
raise AttributeError, "Psyco frame objects are read-only"
|
||||
|
||||
|
||||
def embedframe(result):
|
||||
if type(result) is type(()):
|
||||
return PsycoFrame(result)
|
||||
else:
|
||||
return PythonFrame(result)
|
||||
|
||||
def _getframe(depth=0):
|
||||
"""Return a frame object from the call stack. This is a replacement for
|
||||
sys._getframe() which is aware of Psyco frames.
|
||||
|
||||
The returned objects are instances of either PythonFrame or PsycoFrame
|
||||
instead of being real Python-level frame object, so that they can emulate
|
||||
the common attributes of frame objects.
|
||||
|
||||
The original sys._getframe() ignoring Psyco frames altogether is stored in
|
||||
psyco._getrealframe(). See also psyco._getemulframe()."""
|
||||
# 'depth+1' to account for this _getframe() Python function
|
||||
return embedframe(_psyco.getframe(depth+1))
|
||||
|
||||
def _getemulframe(depth=0):
|
||||
"""As _getframe(), but the returned objects are real Python frame objects
|
||||
emulating Psyco frames. Some of their attributes can be wrong or missing,
|
||||
however."""
|
||||
# 'depth+1' to account for this _getemulframe() Python function
|
||||
return _psyco.getframe(depth+1, 1)
|
||||
|
||||
def patch(name, module=__builtin__):
|
||||
f = getattr(_psyco, name)
|
||||
org = getattr(module, name)
|
||||
if org is not f:
|
||||
setattr(module, name, f)
|
||||
setattr(_psyco, 'original_' + name, org)
|
||||
|
||||
_getrealframe = sys._getframe
|
||||
sys._getframe = _getframe
|
||||
patch('globals')
|
||||
patch('eval')
|
||||
patch('execfile')
|
||||
patch('locals')
|
||||
patch('vars')
|
||||
patch('dir')
|
||||
patch('input')
|
||||
_psyco.original_raw_input = raw_input
|
||||
__builtin__.__in_psyco__ = 0==1 # False
|
||||
|
||||
if hasattr(_psyco, 'compact'):
|
||||
import kdictproxy
|
||||
_psyco.compactdictproxy = kdictproxy.compactdictproxy
|
||||
54
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/__init__.py
Normal file
54
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/__init__.py
Normal file
@@ -0,0 +1,54 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco top-level file of the Psyco package.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco -- the Python Specializing Compiler.
|
||||
|
||||
Typical usage: add the following lines to your application's main module,
|
||||
preferably after the other imports:
|
||||
|
||||
try:
|
||||
import psyco
|
||||
psyco.full()
|
||||
except ImportError:
|
||||
print 'Psyco not installed, the program will just run slower'
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
|
||||
#
|
||||
# This module is present to make 'psyco' a package and to
|
||||
# publish the main functions and variables.
|
||||
#
|
||||
# More documentation can be found in core.py.
|
||||
#
|
||||
|
||||
|
||||
# Try to import the dynamic-loading _psyco and report errors
|
||||
try:
|
||||
import _psyco
|
||||
except ImportError, e:
|
||||
extramsg = ''
|
||||
import sys, imp
|
||||
try:
|
||||
file, filename, (suffix, mode, type) = imp.find_module('_psyco', __path__)
|
||||
except ImportError:
|
||||
ext = [suffix for suffix, mode, type in imp.get_suffixes()
|
||||
if type == imp.C_EXTENSION]
|
||||
if ext:
|
||||
extramsg = (" (cannot locate the compiled extension '_psyco%s' "
|
||||
"in the package path '%s')" % (ext[0], '; '.join(__path__)))
|
||||
else:
|
||||
extramsg = (" (check that the compiled extension '%s' is for "
|
||||
"the correct Python version; this is Python %s)" %
|
||||
(filename, sys.version.split()[0]))
|
||||
raise ImportError, str(e) + extramsg
|
||||
|
||||
# Publish important data by importing them in the package
|
||||
from support import __version__, error, warning, _getrealframe, _getemulframe
|
||||
from support import version_info, __version__ as hexversion
|
||||
from core import full, profile, background, runonly, stop, cannotcompile
|
||||
from core import log, bind, unbind, proxy, unproxy, dumpcodebuf
|
||||
from _psyco import setfilter
|
||||
from _psyco import compact, compacttype
|
||||
42
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/classes.py
Normal file
42
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/classes.py
Normal file
@@ -0,0 +1,42 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco class support module.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco class support module.
|
||||
|
||||
'psyco.classes.psyobj' is an alternate Psyco-optimized root for classes.
|
||||
Any class inheriting from it or using the metaclass '__metaclass__' might
|
||||
get optimized specifically for Psyco. It is equivalent to call
|
||||
psyco.bind() on the class object after its creation.
|
||||
|
||||
Importing everything from psyco.classes in a module will import the
|
||||
'__metaclass__' name, so all classes defined after a
|
||||
|
||||
from psyco.classes import *
|
||||
|
||||
will automatically use the Psyco-optimized metaclass.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
__all__ = ['psyobj', 'psymetaclass', '__metaclass__']
|
||||
|
||||
|
||||
from _psyco import compacttype
|
||||
import core
|
||||
from types import FunctionType
|
||||
|
||||
class psymetaclass(compacttype):
|
||||
"Psyco-optimized meta-class. Turns all methods into Psyco proxies."
|
||||
|
||||
def __new__(cls, name, bases, dict):
|
||||
bindlist = dict.get('__psyco__bind__')
|
||||
if bindlist is None:
|
||||
bindlist = [key for key, value in dict.items()
|
||||
if isinstance(value, FunctionType)]
|
||||
for attr in bindlist:
|
||||
dict[attr] = core.proxy(dict[attr])
|
||||
return super(psymetaclass, cls).__new__(cls, name, bases, dict)
|
||||
|
||||
psyobj = psymetaclass("psyobj", (), {})
|
||||
__metaclass__ = psymetaclass
|
||||
231
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/core.py
Normal file
231
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/core.py
Normal file
@@ -0,0 +1,231 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco main functions.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco main functions.
|
||||
|
||||
Here are the routines that you can use from your applications.
|
||||
These are mostly interfaces to the C core, but they depend on
|
||||
the Python version.
|
||||
|
||||
You can use these functions from the 'psyco' module instead of
|
||||
'psyco.core', e.g.
|
||||
|
||||
import psyco
|
||||
psyco.log('/tmp/psyco.log')
|
||||
psyco.profile()
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import _psyco
|
||||
import types
|
||||
from support import *
|
||||
|
||||
newfunction = types.FunctionType
|
||||
newinstancemethod = types.MethodType
|
||||
|
||||
|
||||
# Default charge profiler values
|
||||
default_watermark = 0.09 # between 0.0 (0%) and 1.0 (100%)
|
||||
default_halflife = 0.5 # seconds
|
||||
default_pollfreq_profile = 20 # Hz
|
||||
default_pollfreq_background = 100 # Hz -- a maximum for sleep's resolution
|
||||
default_parentframe = 0.25 # should not be more than 0.5 (50%)
|
||||
|
||||
|
||||
def full(memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Compile as much as possible.
|
||||
|
||||
Typical use is for small scripts performing intensive computations
|
||||
or string handling."""
|
||||
import profiler
|
||||
p = profiler.FullCompiler()
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def profile(watermark = default_watermark,
|
||||
halflife = default_halflife,
|
||||
pollfreq = default_pollfreq_profile,
|
||||
parentframe = default_parentframe,
|
||||
memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Turn on profiling.
|
||||
|
||||
The 'watermark' parameter controls how easily running functions will
|
||||
be compiled. The smaller the value, the more functions are compiled."""
|
||||
import profiler
|
||||
p = profiler.ActivePassiveProfiler(watermark, halflife,
|
||||
pollfreq, parentframe)
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def background(watermark = default_watermark,
|
||||
halflife = default_halflife,
|
||||
pollfreq = default_pollfreq_background,
|
||||
parentframe = default_parentframe,
|
||||
memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Turn on passive profiling.
|
||||
|
||||
This is a very lightweight mode in which only intensively computing
|
||||
functions can be detected. The smaller the 'watermark', the more functions
|
||||
are compiled."""
|
||||
import profiler
|
||||
p = profiler.PassiveProfiler(watermark, halflife, pollfreq, parentframe)
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def runonly(memory=None, time=None, memorymax=None, timemax=None):
|
||||
"""Nonprofiler.
|
||||
|
||||
XXX check if this is useful and document."""
|
||||
import profiler
|
||||
p = profiler.RunOnly()
|
||||
p.run(memory, time, memorymax, timemax)
|
||||
|
||||
|
||||
def stop():
|
||||
"""Turn off all automatic compilation. bind() calls remain in effect."""
|
||||
import profiler
|
||||
profiler.go([])
|
||||
|
||||
|
||||
def log(logfile='', mode='w', top=10):
|
||||
"""Enable logging to the given file.
|
||||
|
||||
If the file name is unspecified, a default name is built by appending
|
||||
a 'log-psyco' extension to the main script name.
|
||||
|
||||
Mode is 'a' to append to a possibly existing file or 'w' to overwrite
|
||||
an existing file. Note that the log file may grow quickly in 'a' mode."""
|
||||
import profiler, logger
|
||||
if not logfile:
|
||||
import os
|
||||
logfile, dummy = os.path.splitext(sys.argv[0])
|
||||
if os.path.basename(logfile):
|
||||
logfile += '.'
|
||||
logfile += 'log-psyco'
|
||||
if hasattr(_psyco, 'VERBOSE_LEVEL'):
|
||||
print >> sys.stderr, 'psyco: logging to', logfile
|
||||
# logger.current should be a real file object; subtle problems
|
||||
# will show up if its write() and flush() methods are written
|
||||
# in Python, as Psyco will invoke them while compiling.
|
||||
logger.current = open(logfile, mode)
|
||||
logger.print_charges = top
|
||||
profiler.logger = logger
|
||||
logger.writedate('Logging started')
|
||||
cannotcompile(logger.psycowrite)
|
||||
_psyco.statwrite(logger=logger.psycowrite)
|
||||
|
||||
|
||||
def bind(x, rec=None):
|
||||
"""Enable compilation of the given function, method, or class object.
|
||||
|
||||
If C is a class (or anything with a '__dict__' attribute), bind(C) will
|
||||
rebind all functions and methods found in C.__dict__ (which means, for
|
||||
classes, all methods defined in the class but not in its parents).
|
||||
|
||||
The optional second argument specifies the number of recursive
|
||||
compilation levels: all functions called by func are compiled
|
||||
up to the given depth of indirection."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
if rec is None:
|
||||
x.func_code = _psyco.proxycode(x)
|
||||
else:
|
||||
x.func_code = _psyco.proxycode(x, rec)
|
||||
return
|
||||
if hasattr(x, '__dict__'):
|
||||
funcs = [o for o in x.__dict__.values()
|
||||
if isinstance(o, types.MethodType)
|
||||
or isinstance(o, types.FunctionType)]
|
||||
if not funcs:
|
||||
raise error, ("nothing bindable found in %s object" %
|
||||
type(x).__name__)
|
||||
for o in funcs:
|
||||
bind(o, rec)
|
||||
return
|
||||
raise TypeError, "cannot bind %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def unbind(x):
|
||||
"""Reverse of bind()."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
try:
|
||||
f = _psyco.unproxycode(x.func_code)
|
||||
except error:
|
||||
pass
|
||||
else:
|
||||
x.func_code = f.func_code
|
||||
return
|
||||
if hasattr(x, '__dict__'):
|
||||
for o in x.__dict__.values():
|
||||
if (isinstance(o, types.MethodType)
|
||||
or isinstance(o, types.FunctionType)):
|
||||
unbind(o)
|
||||
return
|
||||
raise TypeError, "cannot unbind %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def proxy(x, rec=None):
|
||||
"""Return a Psyco-enabled copy of the function.
|
||||
|
||||
The original function is still available for non-compiled calls.
|
||||
The optional second argument specifies the number of recursive
|
||||
compilation levels: all functions called by func are compiled
|
||||
up to the given depth of indirection."""
|
||||
if isinstance(x, types.FunctionType):
|
||||
if rec is None:
|
||||
code = _psyco.proxycode(x)
|
||||
else:
|
||||
code = _psyco.proxycode(x, rec)
|
||||
return newfunction(code, x.func_globals, x.func_name)
|
||||
if isinstance(x, types.MethodType):
|
||||
p = proxy(x.im_func, rec)
|
||||
return newinstancemethod(p, x.im_self, x.im_class)
|
||||
raise TypeError, "cannot proxy %s objects" % type(x).__name__
|
||||
|
||||
|
||||
def unproxy(proxy):
|
||||
"""Return a new copy of the original function of method behind a proxy.
|
||||
The result behaves like the original function in that calling it
|
||||
does not trigger compilation nor execution of any compiled code."""
|
||||
if isinstance(proxy, types.FunctionType):
|
||||
return _psyco.unproxycode(proxy.func_code)
|
||||
if isinstance(proxy, types.MethodType):
|
||||
f = unproxy(proxy.im_func)
|
||||
return newinstancemethod(f, proxy.im_self, proxy.im_class)
|
||||
raise TypeError, "%s objects cannot be proxies" % type(proxy).__name__
|
||||
|
||||
|
||||
def cannotcompile(x):
|
||||
"""Instruct Psyco never to compile the given function, method
|
||||
or code object."""
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
x = x.func_code
|
||||
if isinstance(x, types.CodeType):
|
||||
_psyco.cannotcompile(x)
|
||||
else:
|
||||
raise TypeError, "unexpected %s object" % type(x).__name__
|
||||
|
||||
|
||||
def dumpcodebuf():
|
||||
"""Write in file psyco.dump a copy of the emitted machine code,
|
||||
provided Psyco was compiled with a non-zero CODE_DUMP.
|
||||
See py-utils/httpxam.py to examine psyco.dump."""
|
||||
if hasattr(_psyco, 'dumpcodebuf'):
|
||||
_psyco.dumpcodebuf()
|
||||
|
||||
|
||||
###########################################################################
|
||||
# Psyco variables
|
||||
# error * the error raised by Psyco
|
||||
# warning * the warning raised by Psyco
|
||||
# __in_psyco__ * a new built-in variable which is always zero, but which
|
||||
# Psyco special-cases by returning 1 instead. So
|
||||
# __in_psyco__ can be used in a function to know if
|
||||
# that function is being executed by Psyco or not.
|
||||
133
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/kdictproxy.py
Normal file
133
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/kdictproxy.py
Normal file
@@ -0,0 +1,133 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Support code for the 'psyco.compact' type.
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
try:
|
||||
from UserDict import DictMixin
|
||||
except ImportError:
|
||||
|
||||
# backported from Python 2.3 to Python 2.2
|
||||
class DictMixin:
|
||||
# Mixin defining all dictionary methods for classes that already have
|
||||
# a minimum dictionary interface including getitem, setitem, delitem,
|
||||
# and keys. Without knowledge of the subclass constructor, the mixin
|
||||
# does not define __init__() or copy(). In addition to the four base
|
||||
# methods, progressively more efficiency comes with defining
|
||||
# __contains__(), __iter__(), and iteritems().
|
||||
|
||||
# second level definitions support higher levels
|
||||
def __iter__(self):
|
||||
for k in self.keys():
|
||||
yield k
|
||||
def has_key(self, key):
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
return False
|
||||
return True
|
||||
def __contains__(self, key):
|
||||
return self.has_key(key)
|
||||
|
||||
# third level takes advantage of second level definitions
|
||||
def iteritems(self):
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
def iterkeys(self):
|
||||
return self.__iter__()
|
||||
|
||||
# fourth level uses definitions from lower levels
|
||||
def itervalues(self):
|
||||
for _, v in self.iteritems():
|
||||
yield v
|
||||
def values(self):
|
||||
return [v for _, v in self.iteritems()]
|
||||
def items(self):
|
||||
return list(self.iteritems())
|
||||
def clear(self):
|
||||
for key in self.keys():
|
||||
del self[key]
|
||||
def setdefault(self, key, default):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = default
|
||||
return default
|
||||
def pop(self, key, *args):
|
||||
if len(args) > 1:
|
||||
raise TypeError, "pop expected at most 2 arguments, got "\
|
||||
+ repr(1 + len(args))
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
if args:
|
||||
return args[0]
|
||||
raise
|
||||
del self[key]
|
||||
return value
|
||||
def popitem(self):
|
||||
try:
|
||||
k, v = self.iteritems().next()
|
||||
except StopIteration:
|
||||
raise KeyError, 'container is empty'
|
||||
del self[k]
|
||||
return (k, v)
|
||||
def update(self, other):
|
||||
# Make progressively weaker assumptions about "other"
|
||||
if hasattr(other, 'iteritems'): # iteritems saves memory and lookups
|
||||
for k, v in other.iteritems():
|
||||
self[k] = v
|
||||
elif hasattr(other, '__iter__'): # iter saves memory
|
||||
for k in other:
|
||||
self[k] = other[k]
|
||||
else:
|
||||
for k in other.keys():
|
||||
self[k] = other[k]
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
def __repr__(self):
|
||||
return repr(dict(self.iteritems()))
|
||||
def __cmp__(self, other):
|
||||
if other is None:
|
||||
return 1
|
||||
if isinstance(other, DictMixin):
|
||||
other = dict(other.iteritems())
|
||||
return cmp(dict(self.iteritems()), other)
|
||||
def __len__(self):
|
||||
return len(self.keys())
|
||||
|
||||
###########################################################################
|
||||
|
||||
from _psyco import compact
|
||||
|
||||
|
||||
class compactdictproxy(DictMixin):
|
||||
|
||||
def __init__(self, ko):
|
||||
self._ko = ko # compact object of which 'self' is the dict
|
||||
|
||||
def __getitem__(self, key):
|
||||
return compact.__getslot__(self._ko, key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
compact.__setslot__(self._ko, key, value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
compact.__delslot__(self._ko, key)
|
||||
|
||||
def keys(self):
|
||||
return compact.__members__.__get__(self._ko)
|
||||
|
||||
def clear(self):
|
||||
keys = self.keys()
|
||||
keys.reverse()
|
||||
for key in keys:
|
||||
del self[key]
|
||||
|
||||
def __repr__(self):
|
||||
keys = ', '.join(self.keys())
|
||||
return '<compactdictproxy object {%s}>' % (keys,)
|
||||
96
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/logger.py
Normal file
96
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/logger.py
Normal file
@@ -0,0 +1,96 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco logger.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco logger.
|
||||
|
||||
See log() in core.py.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
|
||||
import _psyco
|
||||
from time import time, localtime, strftime
|
||||
|
||||
|
||||
current = None
|
||||
print_charges = 10
|
||||
dump_delay = 0.2
|
||||
dump_last = 0.0
|
||||
|
||||
def write(s, level):
|
||||
t = time()
|
||||
f = t-int(t)
|
||||
try:
|
||||
current.write("%s.%02d %-*s%s\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0), 63-level, s,
|
||||
"%"*level))
|
||||
current.flush()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
def psycowrite(s):
|
||||
t = time()
|
||||
f = t-int(t)
|
||||
try:
|
||||
current.write("%s.%02d %-*s%s\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0), 60, s.strip(),
|
||||
"% %"))
|
||||
current.flush()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
##def writelines(lines, level=0):
|
||||
## if lines:
|
||||
## t = time()
|
||||
## f = t-int(t)
|
||||
## timedesc = strftime("%x %X", localtime(int(t)))
|
||||
## print >> current, "%s.%03d %-*s %s" % (
|
||||
## timedesc, int(f*1000),
|
||||
## 50-level, lines[0],
|
||||
## "+"*level)
|
||||
## timedesc = " " * (len(timedesc)+5)
|
||||
## for line in lines[1:]:
|
||||
## print >> current, timedesc, line
|
||||
|
||||
def writememory():
|
||||
write("memory usage: %d+ kb" % _psyco.memory(), 1)
|
||||
|
||||
def dumpcharges():
|
||||
global dump_last
|
||||
if print_charges:
|
||||
t = time()
|
||||
if not (dump_last <= t < dump_last+dump_delay):
|
||||
if t <= dump_last+1.5*dump_delay:
|
||||
dump_last += dump_delay
|
||||
else:
|
||||
dump_last = t
|
||||
#write("%s: charges:" % who, 0)
|
||||
lst = _psyco.stattop(print_charges)
|
||||
if lst:
|
||||
f = t-int(t)
|
||||
lines = ["%s.%02d ______\n" % (
|
||||
strftime("%X", localtime(int(t))),
|
||||
int(f*100.0))]
|
||||
i = 1
|
||||
for co, charge in lst:
|
||||
detail = co.co_filename
|
||||
if len(detail) > 19:
|
||||
detail = '...' + detail[-17:]
|
||||
lines.append(" #%-3d |%4.1f %%| %-26s%20s:%d\n" %
|
||||
(i, charge*100.0, co.co_name, detail,
|
||||
co.co_firstlineno))
|
||||
i += 1
|
||||
current.writelines(lines)
|
||||
current.flush()
|
||||
|
||||
def writefinalstats():
|
||||
dumpcharges()
|
||||
writememory()
|
||||
writedate("program exit")
|
||||
|
||||
def writedate(msg):
|
||||
write('%s, %s' % (msg, strftime("%x")), 20)
|
||||
379
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/profiler.py
Normal file
379
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/profiler.py
Normal file
@@ -0,0 +1,379 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco profiler (Python part).
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco profiler (Python part).
|
||||
|
||||
The implementation of the non-time-critical parts of the profiler.
|
||||
See profile() and full() in core.py for the easy interface.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import _psyco
|
||||
from support import *
|
||||
import math, time, types, atexit
|
||||
now = time.time
|
||||
try:
|
||||
import thread
|
||||
except ImportError:
|
||||
import dummy_thread as thread
|
||||
|
||||
|
||||
# current profiler instance
|
||||
current = None
|
||||
|
||||
# enabled profilers, in order of priority
|
||||
profilers = []
|
||||
|
||||
# logger module (when enabled by core.log())
|
||||
logger = None
|
||||
|
||||
# a lock for a thread-safe go()
|
||||
go_lock = thread.allocate_lock()
|
||||
|
||||
def go(stop=0):
|
||||
# run the highest-priority profiler in 'profilers'
|
||||
global current
|
||||
go_lock.acquire()
|
||||
try:
|
||||
prev = current
|
||||
if stop:
|
||||
del profilers[:]
|
||||
if prev:
|
||||
if profilers and profilers[0] is prev:
|
||||
return # best profiler already running
|
||||
prev.stop()
|
||||
current = None
|
||||
for p in profilers[:]:
|
||||
if p.start():
|
||||
current = p
|
||||
if logger: # and p is not prev:
|
||||
logger.write("%s: starting" % p.__class__.__name__, 5)
|
||||
return
|
||||
finally:
|
||||
go_lock.release()
|
||||
# no profiler is running now
|
||||
if stop:
|
||||
if logger:
|
||||
logger.writefinalstats()
|
||||
else:
|
||||
tag2bind()
|
||||
|
||||
atexit.register(go, 1)
|
||||
|
||||
|
||||
def buildfncache(globals, cache):
|
||||
if hasattr(types.IntType, '__dict__'):
|
||||
clstypes = (types.ClassType, types.TypeType)
|
||||
else:
|
||||
clstypes = types.ClassType
|
||||
for x in globals.values():
|
||||
if isinstance(x, types.MethodType):
|
||||
x = x.im_func
|
||||
if isinstance(x, types.FunctionType):
|
||||
cache[x.func_code] = x, ''
|
||||
elif isinstance(x, clstypes):
|
||||
for y in x.__dict__.values():
|
||||
if isinstance(y, types.MethodType):
|
||||
y = y.im_func
|
||||
if isinstance(y, types.FunctionType):
|
||||
cache[y.func_code] = y, x.__name__
|
||||
|
||||
# code-to-function mapping (cache)
|
||||
function_cache = {}
|
||||
|
||||
def trytobind(co, globals, log=1):
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
buildfncache(globals, function_cache)
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
if logger:
|
||||
logger.write('warning: cannot find function %s in %s' %
|
||||
(co.co_name, globals.get('__name__', '?')), 3)
|
||||
return # give up
|
||||
if logger and log:
|
||||
modulename = globals.get('__name__', '?')
|
||||
if clsname:
|
||||
modulename += '.' + clsname
|
||||
logger.write('bind function: %s.%s' % (modulename, co.co_name), 1)
|
||||
f.func_code = _psyco.proxycode(f)
|
||||
|
||||
|
||||
# the list of code objects that have been tagged
|
||||
tagged_codes = []
|
||||
|
||||
def tag(co, globals):
|
||||
if logger:
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
buildfncache(globals, function_cache)
|
||||
try:
|
||||
f, clsname = function_cache[co]
|
||||
except KeyError:
|
||||
clsname = '' # give up
|
||||
modulename = globals.get('__name__', '?')
|
||||
if clsname:
|
||||
modulename += '.' + clsname
|
||||
logger.write('tag function: %s.%s' % (modulename, co.co_name), 1)
|
||||
tagged_codes.append((co, globals))
|
||||
_psyco.turbo_frame(co)
|
||||
_psyco.turbo_code(co)
|
||||
|
||||
def tag2bind():
|
||||
if tagged_codes:
|
||||
if logger:
|
||||
logger.write('profiling stopped, binding %d functions' %
|
||||
len(tagged_codes), 2)
|
||||
for co, globals in tagged_codes:
|
||||
trytobind(co, globals, 0)
|
||||
function_cache.clear()
|
||||
del tagged_codes[:]
|
||||
|
||||
|
||||
class Profiler:
|
||||
MemoryTimerResolution = 0.103
|
||||
|
||||
def run(self, memory, time, memorymax, timemax):
|
||||
self.memory = memory
|
||||
self.memorymax = memorymax
|
||||
self.time = time
|
||||
if timemax is None:
|
||||
self.endtime = None
|
||||
else:
|
||||
self.endtime = now() + timemax
|
||||
self.alarms = []
|
||||
profilers.append(self)
|
||||
go()
|
||||
|
||||
def start(self):
|
||||
curmem = _psyco.memory()
|
||||
memlimits = []
|
||||
if self.memorymax is not None:
|
||||
if curmem >= self.memorymax:
|
||||
if logger:
|
||||
logger.writememory()
|
||||
return self.limitreached('memorymax')
|
||||
memlimits.append(self.memorymax)
|
||||
if self.memory is not None:
|
||||
if self.memory <= 0:
|
||||
if logger:
|
||||
logger.writememory()
|
||||
return self.limitreached('memory')
|
||||
memlimits.append(curmem + self.memory)
|
||||
self.memory_at_start = curmem
|
||||
|
||||
curtime = now()
|
||||
timelimits = []
|
||||
if self.endtime is not None:
|
||||
if curtime >= self.endtime:
|
||||
return self.limitreached('timemax')
|
||||
timelimits.append(self.endtime - curtime)
|
||||
if self.time is not None:
|
||||
if self.time <= 0.0:
|
||||
return self.limitreached('time')
|
||||
timelimits.append(self.time)
|
||||
self.time_at_start = curtime
|
||||
|
||||
try:
|
||||
self.do_start()
|
||||
except error, e:
|
||||
if logger:
|
||||
logger.write('%s: disabled by psyco.error:' % (
|
||||
self.__class__.__name__), 4)
|
||||
logger.write(' %s' % str(e), 3)
|
||||
return 0
|
||||
|
||||
if memlimits:
|
||||
self.memlimits_args = (time.sleep, (self.MemoryTimerResolution,),
|
||||
self.check_memory, (min(memlimits),))
|
||||
self.alarms.append(_psyco.alarm(*self.memlimits_args))
|
||||
if timelimits:
|
||||
self.alarms.append(_psyco.alarm(time.sleep, (min(timelimits),),
|
||||
self.time_out))
|
||||
return 1
|
||||
|
||||
def stop(self):
|
||||
for alarm in self.alarms:
|
||||
alarm.stop(0)
|
||||
for alarm in self.alarms:
|
||||
alarm.stop(1) # wait for parallel threads to stop
|
||||
del self.alarms[:]
|
||||
if self.time is not None:
|
||||
self.time -= now() - self.time_at_start
|
||||
if self.memory is not None:
|
||||
self.memory -= _psyco.memory() - self.memory_at_start
|
||||
|
||||
try:
|
||||
self.do_stop()
|
||||
except error:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def check_memory(self, limit):
|
||||
if _psyco.memory() < limit:
|
||||
return self.memlimits_args
|
||||
go()
|
||||
|
||||
def time_out(self):
|
||||
self.time = 0.0
|
||||
go()
|
||||
|
||||
def limitreached(self, limitname):
|
||||
try:
|
||||
profilers.remove(self)
|
||||
except ValueError:
|
||||
pass
|
||||
if logger:
|
||||
logger.write('%s: disabled (%s limit reached)' % (
|
||||
self.__class__.__name__, limitname), 4)
|
||||
return 0
|
||||
|
||||
|
||||
class FullCompiler(Profiler):
|
||||
|
||||
def do_start(self):
|
||||
_psyco.profiling('f')
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
|
||||
|
||||
class RunOnly(Profiler):
|
||||
|
||||
def do_start(self):
|
||||
_psyco.profiling('n')
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
|
||||
|
||||
class ChargeProfiler(Profiler):
|
||||
|
||||
def __init__(self, watermark, parentframe):
|
||||
self.watermark = watermark
|
||||
self.parent2 = parentframe * 2.0
|
||||
self.lock = thread.allocate_lock()
|
||||
|
||||
def init_charges(self):
|
||||
_psyco.statwrite(watermark = self.watermark,
|
||||
parent2 = self.parent2)
|
||||
|
||||
def do_stop(self):
|
||||
_psyco.profiling('.')
|
||||
_psyco.statwrite(callback = None)
|
||||
|
||||
|
||||
class ActiveProfiler(ChargeProfiler):
|
||||
|
||||
def active_start(self):
|
||||
_psyco.profiling('p')
|
||||
|
||||
def do_start(self):
|
||||
self.init_charges()
|
||||
self.active_start()
|
||||
_psyco.statwrite(callback = self.charge_callback)
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
tag(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
class PassiveProfiler(ChargeProfiler):
|
||||
|
||||
initial_charge_unit = _psyco.statread('unit')
|
||||
reset_stats_after = 120 # half-lives (maximum 200!)
|
||||
reset_limit = initial_charge_unit * (2.0 ** reset_stats_after)
|
||||
|
||||
def __init__(self, watermark, halflife, pollfreq, parentframe):
|
||||
ChargeProfiler.__init__(self, watermark, parentframe)
|
||||
self.pollfreq = pollfreq
|
||||
# self.progress is slightly more than 1.0, and computed so that
|
||||
# do_profile() will double the change_unit every 'halflife' seconds.
|
||||
self.progress = 2.0 ** (1.0 / (halflife * pollfreq))
|
||||
|
||||
def reset(self):
|
||||
_psyco.statwrite(unit = self.initial_charge_unit, callback = None)
|
||||
_psyco.statreset()
|
||||
if logger:
|
||||
logger.write("%s: resetting stats" % self.__class__.__name__, 1)
|
||||
|
||||
def passive_start(self):
|
||||
self.passivealarm_args = (time.sleep, (1.0 / self.pollfreq,),
|
||||
self.do_profile)
|
||||
self.alarms.append(_psyco.alarm(*self.passivealarm_args))
|
||||
|
||||
def do_start(self):
|
||||
tag2bind()
|
||||
self.init_charges()
|
||||
self.passive_start()
|
||||
|
||||
def do_profile(self):
|
||||
_psyco.statcollect()
|
||||
if logger:
|
||||
logger.dumpcharges()
|
||||
nunit = _psyco.statread('unit') * self.progress
|
||||
if nunit > self.reset_limit:
|
||||
self.reset()
|
||||
else:
|
||||
_psyco.statwrite(unit = nunit, callback = self.charge_callback)
|
||||
return self.passivealarm_args
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
trytobind(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
class ActivePassiveProfiler(PassiveProfiler, ActiveProfiler):
|
||||
|
||||
def do_start(self):
|
||||
self.init_charges()
|
||||
self.active_start()
|
||||
self.passive_start()
|
||||
|
||||
def charge_callback(self, frame, charge):
|
||||
tag(frame.f_code, frame.f_globals)
|
||||
|
||||
|
||||
|
||||
#
|
||||
# we register our own version of sys.settrace(), sys.setprofile()
|
||||
# and thread.start_new_thread().
|
||||
#
|
||||
|
||||
def psyco_settrace(*args, **kw):
|
||||
"This is the Psyco-aware version of sys.settrace()."
|
||||
result = original_settrace(*args, **kw)
|
||||
go()
|
||||
return result
|
||||
|
||||
def psyco_setprofile(*args, **kw):
|
||||
"This is the Psyco-aware version of sys.setprofile()."
|
||||
result = original_setprofile(*args, **kw)
|
||||
go()
|
||||
return result
|
||||
|
||||
def psyco_thread_stub(callable, args, kw):
|
||||
_psyco.statcollect()
|
||||
if kw is None:
|
||||
return callable(*args)
|
||||
else:
|
||||
return callable(*args, **kw)
|
||||
|
||||
def psyco_start_new_thread(callable, args, kw=None):
|
||||
"This is the Psyco-aware version of thread.start_new_thread()."
|
||||
return original_start_new_thread(psyco_thread_stub, (callable, args, kw))
|
||||
|
||||
original_settrace = sys.settrace
|
||||
original_setprofile = sys.setprofile
|
||||
original_start_new_thread = thread.start_new_thread
|
||||
sys.settrace = psyco_settrace
|
||||
sys.setprofile = psyco_setprofile
|
||||
thread.start_new_thread = psyco_start_new_thread
|
||||
# hack to patch threading._start_new_thread if the module is
|
||||
# already loaded
|
||||
if ('threading' in sys.modules and
|
||||
hasattr(sys.modules['threading'], '_start_new_thread')):
|
||||
sys.modules['threading']._start_new_thread = psyco_start_new_thread
|
||||
191
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/support.py
Normal file
191
Calibre_Plugins/eReaderPDB2PML_plugin/osx/psyco/support.py
Normal file
@@ -0,0 +1,191 @@
|
||||
###########################################################################
|
||||
#
|
||||
# Psyco general support module.
|
||||
# Copyright (C) 2001-2002 Armin Rigo et.al.
|
||||
|
||||
"""Psyco general support module.
|
||||
|
||||
For internal use.
|
||||
"""
|
||||
###########################################################################
|
||||
|
||||
import sys, _psyco, __builtin__
|
||||
|
||||
error = _psyco.error
|
||||
class warning(Warning):
|
||||
pass
|
||||
|
||||
_psyco.NoLocalsWarning = warning
|
||||
|
||||
def warn(msg):
|
||||
from warnings import warn
|
||||
warn(msg, warning, stacklevel=2)
|
||||
|
||||
#
|
||||
# Version checks
|
||||
#
|
||||
__version__ = 0x010600f0
|
||||
if _psyco.PSYVER != __version__:
|
||||
raise error, "version mismatch between Psyco parts, reinstall it"
|
||||
|
||||
version_info = (__version__ >> 24,
|
||||
(__version__ >> 16) & 0xff,
|
||||
(__version__ >> 8) & 0xff,
|
||||
{0xa0: 'alpha',
|
||||
0xb0: 'beta',
|
||||
0xc0: 'candidate',
|
||||
0xf0: 'final'}[__version__ & 0xf0],
|
||||
__version__ & 0xf)
|
||||
|
||||
|
||||
VERSION_LIMITS = [0x02020200, # 2.2.2
|
||||
0x02030000, # 2.3
|
||||
0x02040000] # 2.4
|
||||
|
||||
if ([v for v in VERSION_LIMITS if v <= sys.hexversion] !=
|
||||
[v for v in VERSION_LIMITS if v <= _psyco.PYVER ]):
|
||||
if sys.hexversion < VERSION_LIMITS[0]:
|
||||
warn("Psyco requires Python version 2.2.2 or later")
|
||||
else:
|
||||
warn("Psyco version does not match Python version. "
|
||||
"Psyco must be updated or recompiled")
|
||||
|
||||
|
||||
if hasattr(_psyco, 'ALL_CHECKS') and hasattr(_psyco, 'VERBOSE_LEVEL'):
|
||||
print >> sys.stderr, ('psyco: running in debugging mode on %s' %
|
||||
_psyco.PROCESSOR)
|
||||
|
||||
|
||||
###########################################################################
|
||||
# sys._getframe() gives strange results on a mixed Psyco- and Python-style
|
||||
# stack frame. Psyco provides a replacement that partially emulates Python
|
||||
# frames from Psyco frames. The new sys._getframe() may return objects of
|
||||
# a custom "Psyco frame" type, which is a subtype of the normal frame type.
|
||||
#
|
||||
# The same problems require some other built-in functions to be replaced
|
||||
# as well. Note that the local variables are not available in any
|
||||
# dictionary with Psyco.
|
||||
|
||||
|
||||
class Frame:
|
||||
pass
|
||||
|
||||
|
||||
class PythonFrame(Frame):
|
||||
|
||||
def __init__(self, frame):
|
||||
self.__dict__.update({
|
||||
'_frame': frame,
|
||||
})
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'f_back':
|
||||
try:
|
||||
result = embedframe(_psyco.getframe(self._frame))
|
||||
except ValueError:
|
||||
result = None
|
||||
except error:
|
||||
warn("f_back is skipping dead Psyco frames")
|
||||
result = self._frame.f_back
|
||||
self.__dict__['f_back'] = result
|
||||
return result
|
||||
else:
|
||||
return getattr(self._frame, attr)
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
setattr(self._frame, attr, value)
|
||||
|
||||
def __delattr__(self, attr):
|
||||
delattr(self._frame, attr)
|
||||
|
||||
|
||||
class PsycoFrame(Frame):
|
||||
|
||||
def __init__(self, tag):
|
||||
self.__dict__.update({
|
||||
'_tag' : tag,
|
||||
'f_code' : tag[0],
|
||||
'f_globals': tag[1],
|
||||
})
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'f_back':
|
||||
try:
|
||||
result = embedframe(_psyco.getframe(self._tag))
|
||||
except ValueError:
|
||||
result = None
|
||||
elif attr == 'f_lineno':
|
||||
result = self.f_code.co_firstlineno # better than nothing
|
||||
elif attr == 'f_builtins':
|
||||
result = self.f_globals['__builtins__']
|
||||
elif attr == 'f_restricted':
|
||||
result = self.f_builtins is not __builtins__
|
||||
elif attr == 'f_locals':
|
||||
raise AttributeError, ("local variables of functions run by Psyco "
|
||||
"cannot be accessed in any way, sorry")
|
||||
else:
|
||||
raise AttributeError, ("emulated Psyco frames have "
|
||||
"no '%s' attribute" % attr)
|
||||
self.__dict__[attr] = result
|
||||
return result
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
raise AttributeError, "Psyco frame objects are read-only"
|
||||
|
||||
def __delattr__(self, attr):
|
||||
if attr == 'f_trace':
|
||||
# for bdb which relies on CPython frames exhibiting a slightly
|
||||
# buggy behavior: you can 'del f.f_trace' as often as you like
|
||||
# even without having set it previously.
|
||||
return
|
||||
raise AttributeError, "Psyco frame objects are read-only"
|
||||
|
||||
|
||||
def embedframe(result):
|
||||
if type(result) is type(()):
|
||||
return PsycoFrame(result)
|
||||
else:
|
||||
return PythonFrame(result)
|
||||
|
||||
def _getframe(depth=0):
|
||||
"""Return a frame object from the call stack. This is a replacement for
|
||||
sys._getframe() which is aware of Psyco frames.
|
||||
|
||||
The returned objects are instances of either PythonFrame or PsycoFrame
|
||||
instead of being real Python-level frame object, so that they can emulate
|
||||
the common attributes of frame objects.
|
||||
|
||||
The original sys._getframe() ignoring Psyco frames altogether is stored in
|
||||
psyco._getrealframe(). See also psyco._getemulframe()."""
|
||||
# 'depth+1' to account for this _getframe() Python function
|
||||
return embedframe(_psyco.getframe(depth+1))
|
||||
|
||||
def _getemulframe(depth=0):
|
||||
"""As _getframe(), but the returned objects are real Python frame objects
|
||||
emulating Psyco frames. Some of their attributes can be wrong or missing,
|
||||
however."""
|
||||
# 'depth+1' to account for this _getemulframe() Python function
|
||||
return _psyco.getframe(depth+1, 1)
|
||||
|
||||
def patch(name, module=__builtin__):
|
||||
f = getattr(_psyco, name)
|
||||
org = getattr(module, name)
|
||||
if org is not f:
|
||||
setattr(module, name, f)
|
||||
setattr(_psyco, 'original_' + name, org)
|
||||
|
||||
_getrealframe = sys._getframe
|
||||
sys._getframe = _getframe
|
||||
patch('globals')
|
||||
patch('eval')
|
||||
patch('execfile')
|
||||
patch('locals')
|
||||
patch('vars')
|
||||
patch('dir')
|
||||
patch('input')
|
||||
_psyco.original_raw_input = raw_input
|
||||
__builtin__.__in_psyco__ = 0==1 # False
|
||||
|
||||
if hasattr(_psyco, 'compact'):
|
||||
import kdictproxy
|
||||
_psyco.compactdictproxy = kdictproxy.compactdictproxy
|
||||
BIN
Calibre_Plugins/k4mobidedrm_plugin.zip
Normal file
BIN
Calibre_Plugins/k4mobidedrm_plugin.zip
Normal file
Binary file not shown.
490
Calibre_Plugins/k4mobidedrm_plugin/k4mobidedrm_plugin.py
Normal file
490
Calibre_Plugins/k4mobidedrm_plugin/k4mobidedrm_plugin.py
Normal file
@@ -0,0 +1,490 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# engine to remove drm from Kindle for Mac and Kindle for PC books
|
||||
# for personal use for archiving and converting your ebooks
|
||||
|
||||
# PLEASE DO NOT PIRATE EBOOKS!
|
||||
|
||||
# We want all authors and publishers, and eBook stores to live
|
||||
# long and prosperous lives but at the same time we just want to
|
||||
# be able to read OUR books on whatever device we want and to keep
|
||||
# readable for a long, long time
|
||||
|
||||
# This borrows very heavily from works by CMBDTC, IHeartCabbages, skindle,
|
||||
# unswindle, DarkReverser, ApprenticeAlf, DiapDealer, some_updates
|
||||
# and many many others
|
||||
|
||||
# It can run standalone to convert K4M/K4PC/Mobi files, or it can be installed as a
|
||||
# plugin for Calibre (http://calibre-ebook.com/about) so that importing
|
||||
# K4 or Mobi with DRM is no londer a multi-step process.
|
||||
#
|
||||
# ***NOTE*** If you are using this script as a calibre plugin for a K4M or K4PC ebook
|
||||
# then calibre must be installed on the same machine and in the same account as K4PC or K4M
|
||||
# for the plugin version to function properly.
|
||||
#
|
||||
# To create a Calibre plugin, rename this file so that the filename
|
||||
# ends in '_plugin.py', put it into a ZIP file with all its supporting python routines
|
||||
# and import that ZIP into Calibre using its plugin configuration GUI.
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
__version__ = '1.1'
|
||||
|
||||
class Unbuffered:
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
def write(self, data):
|
||||
self.stream.write(data)
|
||||
self.stream.flush()
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.stream, attr)
|
||||
|
||||
import sys
|
||||
import os, csv, getopt
|
||||
import binascii
|
||||
import zlib
|
||||
from struct import pack, unpack, unpack_from
|
||||
|
||||
|
||||
#Exception Handling
|
||||
class DrmException(Exception):
|
||||
pass
|
||||
|
||||
#
|
||||
# crypto digestroutines
|
||||
#
|
||||
|
||||
import hashlib
|
||||
|
||||
def MD5(message):
|
||||
ctx = hashlib.md5()
|
||||
ctx.update(message)
|
||||
return ctx.digest()
|
||||
|
||||
def SHA1(message):
|
||||
ctx = hashlib.sha1()
|
||||
ctx.update(message)
|
||||
return ctx.digest()
|
||||
|
||||
# determine if we are running as a calibre plugin
|
||||
if 'calibre' in sys.modules:
|
||||
inCalibre = True
|
||||
global openKindleInfo, CryptUnprotectData, GetUserName, GetVolumeSerialNumber, charMap1, charMap2, charMap3, charMap4
|
||||
else:
|
||||
inCalibre = False
|
||||
|
||||
#
|
||||
# start of Kindle specific routines
|
||||
#
|
||||
|
||||
if not inCalibre:
|
||||
import mobidedrm
|
||||
if sys.platform.startswith('win'):
|
||||
from k4pcutils import openKindleInfo, CryptUnprotectData, GetUserName, GetVolumeSerialNumber, charMap1, charMap2, charMap3, charMap4
|
||||
if sys.platform.startswith('darwin'):
|
||||
from k4mutils import openKindleInfo, CryptUnprotectData, GetUserName, GetVolumeSerialNumber, charMap1, charMap2, charMap3, charMap4
|
||||
|
||||
global kindleDatabase
|
||||
|
||||
# Encode the bytes in data with the characters in map
|
||||
def encode(data, map):
|
||||
result = ""
|
||||
for char in data:
|
||||
value = ord(char)
|
||||
Q = (value ^ 0x80) // len(map)
|
||||
R = value % len(map)
|
||||
result += map[Q]
|
||||
result += map[R]
|
||||
return result
|
||||
|
||||
# Hash the bytes in data and then encode the digest with the characters in map
|
||||
def encodeHash(data,map):
|
||||
return encode(MD5(data),map)
|
||||
|
||||
# Decode the string in data with the characters in map. Returns the decoded bytes
|
||||
def decode(data,map):
|
||||
result = ""
|
||||
for i in range (0,len(data)-1,2):
|
||||
high = map.find(data[i])
|
||||
low = map.find(data[i+1])
|
||||
if (high == -1) or (low == -1) :
|
||||
break
|
||||
value = (((high * len(map)) ^ 0x80) & 0xFF) + low
|
||||
result += pack("B",value)
|
||||
return result
|
||||
|
||||
|
||||
# Parse the Kindle.info file and return the records as a list of key-values
|
||||
def parseKindleInfo():
|
||||
DB = {}
|
||||
infoReader = openKindleInfo()
|
||||
infoReader.read(1)
|
||||
data = infoReader.read()
|
||||
if sys.platform.startswith('win'):
|
||||
items = data.split('{')
|
||||
else :
|
||||
items = data.split('[')
|
||||
for item in items:
|
||||
splito = item.split(':')
|
||||
DB[splito[0]] =splito[1]
|
||||
return DB
|
||||
|
||||
# Get a record from the Kindle.info file for the key "hashedKey" (already hashed and encoded). Return the decoded and decrypted record
|
||||
def getKindleInfoValueForHash(hashedKey):
|
||||
global kindleDatabase
|
||||
encryptedValue = decode(kindleDatabase[hashedKey],charMap2)
|
||||
if sys.platform.startswith('win'):
|
||||
return CryptUnprotectData(encryptedValue,"")
|
||||
else:
|
||||
cleartext = CryptUnprotectData(encryptedValue)
|
||||
return decode(cleartext, charMap1)
|
||||
|
||||
# Get a record from the Kindle.info file for the string in "key" (plaintext). Return the decoded and decrypted record
|
||||
def getKindleInfoValueForKey(key):
|
||||
return getKindleInfoValueForHash(encodeHash(key,charMap2))
|
||||
|
||||
# Find if the original string for a hashed/encoded string is known. If so return the original string othwise return an empty string.
|
||||
def findNameForHash(hash):
|
||||
names = ["kindle.account.tokens","kindle.cookie.item","eulaVersionAccepted","login_date","kindle.token.item","login","kindle.key.item","kindle.name.info","kindle.device.info", "MazamaRandomNumber"]
|
||||
result = ""
|
||||
for name in names:
|
||||
if hash == encodeHash(name, charMap2):
|
||||
result = name
|
||||
break
|
||||
return result
|
||||
|
||||
# Print all the records from the kindle.info file (option -i)
|
||||
def printKindleInfo():
|
||||
for record in kindleDatabase:
|
||||
name = findNameForHash(record)
|
||||
if name != "" :
|
||||
print (name)
|
||||
print ("--------------------------")
|
||||
else :
|
||||
print ("Unknown Record")
|
||||
print getKindleInfoValueForHash(record)
|
||||
print "\n"
|
||||
|
||||
#
|
||||
# PID generation routines
|
||||
#
|
||||
|
||||
# Returns two bit at offset from a bit field
|
||||
def getTwoBitsFromBitField(bitField,offset):
|
||||
byteNumber = offset // 4
|
||||
bitPosition = 6 - 2*(offset % 4)
|
||||
return ord(bitField[byteNumber]) >> bitPosition & 3
|
||||
|
||||
# Returns the six bits at offset from a bit field
|
||||
def getSixBitsFromBitField(bitField,offset):
|
||||
offset *= 3
|
||||
value = (getTwoBitsFromBitField(bitField,offset) <<4) + (getTwoBitsFromBitField(bitField,offset+1) << 2) +getTwoBitsFromBitField(bitField,offset+2)
|
||||
return value
|
||||
|
||||
# 8 bits to six bits encoding from hash to generate PID string
|
||||
def encodePID(hash):
|
||||
global charMap3
|
||||
PID = ""
|
||||
for position in range (0,8):
|
||||
PID += charMap3[getSixBitsFromBitField(hash,position)]
|
||||
return PID
|
||||
|
||||
# Encryption table used to generate the device PID
|
||||
def generatePidEncryptionTable() :
|
||||
table = []
|
||||
for counter1 in range (0,0x100):
|
||||
value = counter1
|
||||
for counter2 in range (0,8):
|
||||
if (value & 1 == 0) :
|
||||
value = value >> 1
|
||||
else :
|
||||
value = value >> 1
|
||||
value = value ^ 0xEDB88320
|
||||
table.append(value)
|
||||
return table
|
||||
|
||||
# Seed value used to generate the device PID
|
||||
def generatePidSeed(table,dsn) :
|
||||
value = 0
|
||||
for counter in range (0,4) :
|
||||
index = (ord(dsn[counter]) ^ value) &0xFF
|
||||
value = (value >> 8) ^ table[index]
|
||||
return value
|
||||
|
||||
# Generate the device PID
|
||||
def generateDevicePID(table,dsn,nbRoll):
|
||||
seed = generatePidSeed(table,dsn)
|
||||
pidAscii = ""
|
||||
pid = [(seed >>24) &0xFF,(seed >> 16) &0xff,(seed >> 8) &0xFF,(seed) & 0xFF,(seed>>24) & 0xFF,(seed >> 16) &0xff,(seed >> 8) &0xFF,(seed) & 0xFF]
|
||||
index = 0
|
||||
for counter in range (0,nbRoll):
|
||||
pid[index] = pid[index] ^ ord(dsn[counter])
|
||||
index = (index+1) %8
|
||||
for counter in range (0,8):
|
||||
index = ((((pid[counter] >>5) & 3) ^ pid[counter]) & 0x1f) + (pid[counter] >> 7)
|
||||
pidAscii += charMap4[index]
|
||||
return pidAscii
|
||||
|
||||
# convert from 8 digit PID to 10 digit PID with checksum
|
||||
def checksumPid(s):
|
||||
letters = "ABCDEFGHIJKLMNPQRSTUVWXYZ123456789"
|
||||
crc = (~binascii.crc32(s,-1))&0xFFFFFFFF
|
||||
crc = crc ^ (crc >> 16)
|
||||
res = s
|
||||
l = len(letters)
|
||||
for i in (0,1):
|
||||
b = crc & 0xff
|
||||
pos = (b // l) ^ (b % l)
|
||||
res += letters[pos%l]
|
||||
crc >>= 8
|
||||
return res
|
||||
|
||||
|
||||
class MobiPeek:
|
||||
def loadSection(self, section):
|
||||
before, after = self.sections[section:section+2]
|
||||
self.f.seek(before)
|
||||
return self.f.read(after - before)
|
||||
def __init__(self, filename):
|
||||
self.f = file(filename, 'rb')
|
||||
self.header = self.f.read(78)
|
||||
self.ident = self.header[0x3C:0x3C+8]
|
||||
if self.ident != 'BOOKMOBI' and self.ident != 'TEXtREAd':
|
||||
raise DrmException('invalid file format')
|
||||
self.num_sections, = unpack_from('>H', self.header, 76)
|
||||
sections = self.f.read(self.num_sections*8)
|
||||
self.sections = unpack_from('>%dL' % (self.num_sections*2), sections, 0)[::2] + (0xfffffff, )
|
||||
self.sect0 = self.loadSection(0)
|
||||
self.f.close()
|
||||
def getBookTitle(self):
|
||||
# get book title
|
||||
toff, tlen = unpack('>II', self.sect0[0x54:0x5c])
|
||||
tend = toff + tlen
|
||||
title = self.sect0[toff:tend]
|
||||
return title
|
||||
def getexthData(self):
|
||||
# if exth region exists then grab it
|
||||
# get length of this header
|
||||
length, type, codepage, unique_id, version = unpack('>LLLLL', self.sect0[20:40])
|
||||
exth_flag, = unpack('>L', self.sect0[0x80:0x84])
|
||||
exth = ''
|
||||
if exth_flag & 0x40:
|
||||
exth = self.sect0[16 + length:]
|
||||
return exth
|
||||
def isNotEncrypted(self):
|
||||
lock_type, = unpack('>H', self.sect0[0xC:0xC+2])
|
||||
if lock_type == 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
# DiapDealer's stuff: Parse the EXTH header records and parse the Kindleinfo
|
||||
# file to calculate the book pid.
|
||||
def getK4Pids(exth, title):
|
||||
global kindleDatabase
|
||||
try:
|
||||
kindleDatabase = parseKindleInfo()
|
||||
except Exception as message:
|
||||
print(message)
|
||||
|
||||
if kindleDatabase != None :
|
||||
# Get the Mazama Random number
|
||||
MazamaRandomNumber = getKindleInfoValueForKey("MazamaRandomNumber")
|
||||
|
||||
# Get the HDD serial
|
||||
encodedSystemVolumeSerialNumber = encodeHash(GetVolumeSerialNumber(),charMap1)
|
||||
|
||||
# Get the current user name
|
||||
encodedUsername = encodeHash(GetUserName(),charMap1)
|
||||
|
||||
# concat, hash and encode to calculate the DSN
|
||||
DSN = encode(SHA1(MazamaRandomNumber+encodedSystemVolumeSerialNumber+encodedUsername),charMap1)
|
||||
|
||||
print("\nDSN: " + DSN)
|
||||
|
||||
# Compute the device PID (for which I can tell, is used for nothing).
|
||||
# But hey, stuff being printed out is apparently cool.
|
||||
table = generatePidEncryptionTable()
|
||||
devicePID = generateDevicePID(table,DSN,4)
|
||||
|
||||
print("Device PID: " + checksumPid(devicePID))
|
||||
|
||||
# Compute book PID
|
||||
exth_records = {}
|
||||
nitems, = unpack('>I', exth[8:12])
|
||||
pos = 12
|
||||
# Parse the exth records, storing data indexed by type
|
||||
for i in xrange(nitems):
|
||||
type, size = unpack('>II', exth[pos: pos + 8])
|
||||
content = exth[pos + 8: pos + size]
|
||||
|
||||
exth_records[type] = content
|
||||
pos += size
|
||||
|
||||
# Grab the contents of the type 209 exth record
|
||||
if exth_records[209] != None:
|
||||
data = exth_records[209]
|
||||
else:
|
||||
raise DrmException("\nNo EXTH record type 209 - Perhaps not a K4 file?")
|
||||
|
||||
# Parse the 209 data to find the the exth record with the token data.
|
||||
# The last character of the 209 data points to the record with the token.
|
||||
# Always 208 from my experience, but I'll leave the logic in case that changes.
|
||||
for i in xrange(len(data)):
|
||||
if ord(data[i]) != 0:
|
||||
if exth_records[ord(data[i])] != None:
|
||||
token = exth_records[ord(data[i])]
|
||||
|
||||
# Get the kindle account token
|
||||
kindleAccountToken = getKindleInfoValueForKey("kindle.account.tokens")
|
||||
|
||||
print("Account Token: " + kindleAccountToken)
|
||||
|
||||
pidHash = SHA1(DSN+kindleAccountToken+exth_records[209]+token)
|
||||
|
||||
bookPID = encodePID(pidHash)
|
||||
bookPID = checksumPid(bookPID)
|
||||
|
||||
if exth_records[503] != None:
|
||||
print "Pid for " + exth_records[503] + ": " + bookPID
|
||||
else:
|
||||
print "Pid for " + title + ":" + bookPID
|
||||
return bookPID
|
||||
|
||||
raise DrmException("\nCould not access K4 data - Perhaps K4 is not installed/configured?")
|
||||
return null
|
||||
|
||||
#
|
||||
# Main
|
||||
#
|
||||
def main(argv=sys.argv):
|
||||
global kindleDatabase
|
||||
import mobidedrm
|
||||
print ('K4MobiDeDrm v%(__version__)s '
|
||||
'provided by the work of many including DiapDealer, SomeUpdates, IHeartCabbages, CMBDTC, Skindle, DarkReverser, ApprenticeAlf, etc .' % globals())
|
||||
|
||||
if len(argv)<3:
|
||||
print "Removes DRM protection from K4PC, K4M, and Mobi ebooks"
|
||||
print "Usage:"
|
||||
print " %s <infile> <outfile> [<pidnums>]" % argv[0]
|
||||
return 1
|
||||
|
||||
if len(argv) == 4:
|
||||
pidnums = argv[3]
|
||||
|
||||
if len(argv) == 3:
|
||||
pidnums = ""
|
||||
|
||||
kindleDatabase = None
|
||||
infile = argv[1]
|
||||
outfile = argv[2]
|
||||
try:
|
||||
# first try with K4PC/K4M
|
||||
ex = MobiPeek(infile)
|
||||
if ex.isNotEncrypted():
|
||||
print "File was Not Encrypted"
|
||||
return 2
|
||||
title = ex.getBookTitle()
|
||||
exth = ex.getexthData()
|
||||
pid = getK4Pids(exth, title)
|
||||
unlocked_file = mobidedrm.getUnencryptedBook(infile, pid)
|
||||
except DrmException:
|
||||
pass
|
||||
except mobidedrm.DrmException:
|
||||
pass
|
||||
else:
|
||||
file(outfile, 'wb').write(unlocked_file)
|
||||
return 0
|
||||
|
||||
# now try from the pid list
|
||||
pids = pidnums.split(',')
|
||||
for pid in pids:
|
||||
try:
|
||||
print 'Trying: "'+ pid + '"'
|
||||
unlocked_file = mobidedrm.getUnencryptedBook(infile, pid)
|
||||
except mobidedrm.DrmException:
|
||||
pass
|
||||
else:
|
||||
file(outfile, 'wb').write(unlocked_file)
|
||||
return 0
|
||||
|
||||
# we could not unencrypt book
|
||||
print "Error: Could Not Unencrypt Book"
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.stdout=Unbuffered(sys.stdout)
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
if not __name__ == "__main__" and inCalibre:
|
||||
from calibre.customize import FileTypePlugin
|
||||
|
||||
class K4DeDRM(FileTypePlugin):
|
||||
name = 'K4PC, K4Mac, Mobi DeDRM' # Name of the plugin
|
||||
description = 'Removes DRM from K4PC, K4Mac, and Mobi files. \
|
||||
Provided by the work of many including DiapDealer, SomeUpdates, IHeartCabbages, CMBDTC, Skindle, DarkReverser, ApprenticeAlf, etc.'
|
||||
supported_platforms = ['osx', 'windows', 'linux'] # Platforms this plugin will run on
|
||||
author = 'DiapDealer, SomeUpdates' # The author of this plugin
|
||||
version = (0, 0, 1) # The version number of this plugin
|
||||
file_types = set(['prc','mobi','azw']) # The file types that this plugin will be applied to
|
||||
on_import = True # Run this plugin during the import
|
||||
priority = 200 # run this plugin before mobidedrm, k4pcdedrm, k4dedrm
|
||||
|
||||
def run(self, path_to_ebook):
|
||||
from calibre.gui2 import is_ok_to_use_qt
|
||||
from PyQt4.Qt import QMessageBox
|
||||
global kindleDatabase
|
||||
global openKindleInfo, CryptUnprotectData, GetUserName, GetVolumeSerialNumber, charMap1, charMap2, charMap3, charMap4
|
||||
if sys.platform.startswith('win'):
|
||||
from k4pcutils import openKindleInfo, CryptUnprotectData, GetUserName, GetVolumeSerialNumber, charMap1, charMap2, charMap3, charMap4
|
||||
if sys.platform.startswith('darwin'):
|
||||
from k4mutils import openKindleInfo, CryptUnprotectData, GetUserName, GetVolumeSerialNumber, charMap1, charMap2, charMap3, charMap4
|
||||
import mobidedrm
|
||||
|
||||
pidnums = self.site_customization
|
||||
|
||||
# first try with book specifc pid from K4PC or K4M
|
||||
try:
|
||||
kindleDatabase = None
|
||||
ex = MobiPeek(path_to_ebook)
|
||||
if ex.isNotEncrypted():
|
||||
return path_to_ebook
|
||||
title = ex.getBookTitle()
|
||||
exth = ex.getexthData()
|
||||
pid = getK4Pids(exth, title)
|
||||
unlocked_file = mobidedrm.getUnencryptedBook(path_to_ebook,pid)
|
||||
except DrmException:
|
||||
pass
|
||||
except mobidedrm.DrmException:
|
||||
pass
|
||||
else:
|
||||
of = self.temporary_file('.mobi')
|
||||
of.write(unlocked_file)
|
||||
of.close()
|
||||
return of.name
|
||||
|
||||
# now try from the pid list
|
||||
pids = pidnums.split(',')
|
||||
for pid in pids:
|
||||
try:
|
||||
unlocked_file = mobidedrm.getUnencryptedBook(path_to_ebook, pid)
|
||||
except mobidedrm.DrmException:
|
||||
pass
|
||||
else:
|
||||
of = self.temporary_file('.mobi')
|
||||
of.write(unlocked_file)
|
||||
of.close()
|
||||
return of.name
|
||||
|
||||
#if you reached here then no luck raise and exception
|
||||
if is_ok_to_use_qt():
|
||||
d = QMessageBox(QMessageBox.Warning, "K4MobiDeDRM Plugin", "Error decoding: %s\n" % path_to_ebook)
|
||||
d.show()
|
||||
d.raise_()
|
||||
d.exec_()
|
||||
raise Exception("K4MobiDeDRM plugin could not decode the file")
|
||||
return ""
|
||||
|
||||
def customization_help(self, gui=False):
|
||||
return 'Enter each 10 character PID separated by a comma (no spaces).'
|
||||
319
Calibre_Plugins/k4mobidedrm_plugin/k4mutils.py
Normal file
319
Calibre_Plugins/k4mobidedrm_plugin/k4mutils.py
Normal file
@@ -0,0 +1,319 @@
|
||||
# standlone set of Mac OSX specific routines needed for K4DeDRM
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
#Exception Handling
|
||||
class K4MDrmException(Exception):
|
||||
pass
|
||||
|
||||
import signal
|
||||
import threading
|
||||
import subprocess
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
|
||||
# **heavily** chopped up and modfied version of asyncproc.py
|
||||
# to make it actually work on Windows as well as Mac/Linux
|
||||
# For the original see:
|
||||
# "http://www.lysator.liu.se/~bellman/download/"
|
||||
# author is "Thomas Bellman <bellman@lysator.liu.se>"
|
||||
# available under GPL version 3 or Later
|
||||
|
||||
# create an asynchronous subprocess whose output can be collected in
|
||||
# a non-blocking manner
|
||||
|
||||
# What a mess! Have to use threads just to get non-blocking io
|
||||
# in a cross-platform manner
|
||||
|
||||
# luckily all thread use is hidden within this class
|
||||
|
||||
class Process(object):
|
||||
def __init__(self, *params, **kwparams):
|
||||
if len(params) <= 3:
|
||||
kwparams.setdefault('stdin', subprocess.PIPE)
|
||||
if len(params) <= 4:
|
||||
kwparams.setdefault('stdout', subprocess.PIPE)
|
||||
if len(params) <= 5:
|
||||
kwparams.setdefault('stderr', subprocess.PIPE)
|
||||
self.__pending_input = []
|
||||
self.__collected_outdata = []
|
||||
self.__collected_errdata = []
|
||||
self.__exitstatus = None
|
||||
self.__lock = threading.Lock()
|
||||
self.__inputsem = threading.Semaphore(0)
|
||||
self.__quit = False
|
||||
|
||||
self.__process = subprocess.Popen(*params, **kwparams)
|
||||
|
||||
if self.__process.stdin:
|
||||
self.__stdin_thread = threading.Thread(
|
||||
name="stdin-thread",
|
||||
target=self.__feeder, args=(self.__pending_input,
|
||||
self.__process.stdin))
|
||||
self.__stdin_thread.setDaemon(True)
|
||||
self.__stdin_thread.start()
|
||||
|
||||
if self.__process.stdout:
|
||||
self.__stdout_thread = threading.Thread(
|
||||
name="stdout-thread",
|
||||
target=self.__reader, args=(self.__collected_outdata,
|
||||
self.__process.stdout))
|
||||
self.__stdout_thread.setDaemon(True)
|
||||
self.__stdout_thread.start()
|
||||
|
||||
if self.__process.stderr:
|
||||
self.__stderr_thread = threading.Thread(
|
||||
name="stderr-thread",
|
||||
target=self.__reader, args=(self.__collected_errdata,
|
||||
self.__process.stderr))
|
||||
self.__stderr_thread.setDaemon(True)
|
||||
self.__stderr_thread.start()
|
||||
|
||||
def pid(self):
|
||||
return self.__process.pid
|
||||
|
||||
def kill(self, signal):
|
||||
self.__process.send_signal(signal)
|
||||
|
||||
# check on subprocess (pass in 'nowait') to act like poll
|
||||
def wait(self, flag):
|
||||
if flag.lower() == 'nowait':
|
||||
rc = self.__process.poll()
|
||||
else:
|
||||
rc = self.__process.wait()
|
||||
if rc != None:
|
||||
if self.__process.stdin:
|
||||
self.closeinput()
|
||||
if self.__process.stdout:
|
||||
self.__stdout_thread.join()
|
||||
if self.__process.stderr:
|
||||
self.__stderr_thread.join()
|
||||
return self.__process.returncode
|
||||
|
||||
def terminate(self):
|
||||
if self.__process.stdin:
|
||||
self.closeinput()
|
||||
self.__process.terminate()
|
||||
|
||||
# thread gets data from subprocess stdout
|
||||
def __reader(self, collector, source):
|
||||
while True:
|
||||
data = os.read(source.fileno(), 65536)
|
||||
self.__lock.acquire()
|
||||
collector.append(data)
|
||||
self.__lock.release()
|
||||
if data == "":
|
||||
source.close()
|
||||
break
|
||||
return
|
||||
|
||||
# thread feeds data to subprocess stdin
|
||||
def __feeder(self, pending, drain):
|
||||
while True:
|
||||
self.__inputsem.acquire()
|
||||
self.__lock.acquire()
|
||||
if not pending and self.__quit:
|
||||
drain.close()
|
||||
self.__lock.release()
|
||||
break
|
||||
data = pending.pop(0)
|
||||
self.__lock.release()
|
||||
drain.write(data)
|
||||
|
||||
# non-blocking read of data from subprocess stdout
|
||||
def read(self):
|
||||
self.__lock.acquire()
|
||||
outdata = "".join(self.__collected_outdata)
|
||||
del self.__collected_outdata[:]
|
||||
self.__lock.release()
|
||||
return outdata
|
||||
|
||||
# non-blocking read of data from subprocess stderr
|
||||
def readerr(self):
|
||||
self.__lock.acquire()
|
||||
errdata = "".join(self.__collected_errdata)
|
||||
del self.__collected_errdata[:]
|
||||
self.__lock.release()
|
||||
return errdata
|
||||
|
||||
# non-blocking write to stdin of subprocess
|
||||
def write(self, data):
|
||||
if self.__process.stdin is None:
|
||||
raise ValueError("Writing to process with stdin not a pipe")
|
||||
self.__lock.acquire()
|
||||
self.__pending_input.append(data)
|
||||
self.__inputsem.release()
|
||||
self.__lock.release()
|
||||
|
||||
# close stdinput of subprocess
|
||||
def closeinput(self):
|
||||
self.__lock.acquire()
|
||||
self.__quit = True
|
||||
self.__inputsem.release()
|
||||
self.__lock.release()
|
||||
|
||||
|
||||
# interface to needed routines in openssl's libcrypto
|
||||
def _load_crypto_libcrypto():
|
||||
from ctypes import CDLL, byref, POINTER, c_void_p, c_char_p, c_int, c_long, \
|
||||
Structure, c_ulong, create_string_buffer, addressof, string_at, cast
|
||||
from ctypes.util import find_library
|
||||
|
||||
libcrypto = find_library('crypto')
|
||||
if libcrypto is None:
|
||||
raise K4MDrmException('libcrypto not found')
|
||||
libcrypto = CDLL(libcrypto)
|
||||
|
||||
AES_MAXNR = 14
|
||||
c_char_pp = POINTER(c_char_p)
|
||||
c_int_p = POINTER(c_int)
|
||||
|
||||
class AES_KEY(Structure):
|
||||
_fields_ = [('rd_key', c_long * (4 * (AES_MAXNR + 1))), ('rounds', c_int)]
|
||||
AES_KEY_p = POINTER(AES_KEY)
|
||||
|
||||
def F(restype, name, argtypes):
|
||||
func = getattr(libcrypto, name)
|
||||
func.restype = restype
|
||||
func.argtypes = argtypes
|
||||
return func
|
||||
|
||||
AES_cbc_encrypt = F(None, 'AES_cbc_encrypt',[c_char_p, c_char_p, c_ulong, AES_KEY_p, c_char_p,c_int])
|
||||
|
||||
AES_set_decrypt_key = F(c_int, 'AES_set_decrypt_key',[c_char_p, c_int, AES_KEY_p])
|
||||
|
||||
PKCS5_PBKDF2_HMAC_SHA1 = F(c_int, 'PKCS5_PBKDF2_HMAC_SHA1',
|
||||
[c_char_p, c_ulong, c_char_p, c_ulong, c_ulong, c_ulong, c_char_p])
|
||||
|
||||
class LibCrypto(object):
|
||||
def __init__(self):
|
||||
self._blocksize = 0
|
||||
self._keyctx = None
|
||||
self.iv = 0
|
||||
|
||||
def set_decrypt_key(self, userkey, iv):
|
||||
self._blocksize = len(userkey)
|
||||
if (self._blocksize != 16) and (self._blocksize != 24) and (self._blocksize != 32) :
|
||||
raise K4MDrmException('AES improper key used')
|
||||
return
|
||||
keyctx = self._keyctx = AES_KEY()
|
||||
self.iv = iv
|
||||
rv = AES_set_decrypt_key(userkey, len(userkey) * 8, keyctx)
|
||||
if rv < 0:
|
||||
raise K4MDrmException('Failed to initialize AES key')
|
||||
|
||||
def decrypt(self, data):
|
||||
out = create_string_buffer(len(data))
|
||||
rv = AES_cbc_encrypt(data, out, len(data), self._keyctx, self.iv, 0)
|
||||
if rv == 0:
|
||||
raise K4MDrmException('AES decryption failed')
|
||||
return out.raw
|
||||
|
||||
def keyivgen(self, passwd):
|
||||
salt = '16743'
|
||||
saltlen = 5
|
||||
passlen = len(passwd)
|
||||
iter = 0x3e8
|
||||
keylen = 80
|
||||
out = create_string_buffer(keylen)
|
||||
rv = PKCS5_PBKDF2_HMAC_SHA1(passwd, passlen, salt, saltlen, iter, keylen, out)
|
||||
return out.raw
|
||||
return LibCrypto
|
||||
|
||||
def _load_crypto():
|
||||
LibCrypto = None
|
||||
try:
|
||||
LibCrypto = _load_crypto_libcrypto()
|
||||
except (ImportError, K4MDrmException):
|
||||
pass
|
||||
return LibCrypto
|
||||
|
||||
LibCrypto = _load_crypto()
|
||||
|
||||
#
|
||||
# Utility Routines
|
||||
#
|
||||
|
||||
# uses a sub process to get the Hard Drive Serial Number using ioreg
|
||||
# returns with the first found serial number in that class
|
||||
def GetVolumeSerialNumber():
|
||||
cmdline = '/usr/sbin/ioreg -r -c AppleAHCIDiskDriver'
|
||||
cmdline = cmdline.encode(sys.getfilesystemencoding())
|
||||
p = Process(cmdline, shell=True, bufsize=1, stdin=None, stdout=PIPE, stderr=PIPE, close_fds=False)
|
||||
poll = p.wait('wait')
|
||||
results = p.read()
|
||||
reslst = results.split('\n')
|
||||
sernum = '9999999999'
|
||||
cnt = len(reslst)
|
||||
for j in xrange(cnt):
|
||||
resline = reslst[j]
|
||||
pp = resline.find('"Serial Number" = "')
|
||||
if pp >= 0:
|
||||
sernum = resline[pp+19:]
|
||||
sernum = sernum[:-1]
|
||||
sernum = sernum.lstrip()
|
||||
break
|
||||
return sernum
|
||||
|
||||
# uses unix env to get username instead of using sysctlbyname
|
||||
def GetUserName():
|
||||
username = os.getenv('USER')
|
||||
return username
|
||||
|
||||
# Various character maps used to decrypt books. Probably supposed to act as obfuscation
|
||||
charMap1 = "n5Pr6St7Uv8Wx9YzAb0Cd1Ef2Gh3Jk4M"
|
||||
charMap2 = "ZB0bYyc1xDdW2wEV3Ff7KkPpL8UuGA4gz-Tme9Nn_tHh5SvXCsIiR6rJjQaqlOoM"
|
||||
charMap3 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
||||
charMap4 = "ABCDEFGHIJKLMNPQRSTUVWXYZ123456789"
|
||||
|
||||
def encode(data, map):
|
||||
result = ""
|
||||
for char in data:
|
||||
value = ord(char)
|
||||
Q = (value ^ 0x80) // len(map)
|
||||
R = value % len(map)
|
||||
result += map[Q]
|
||||
result += map[R]
|
||||
return result
|
||||
|
||||
import hashlib
|
||||
|
||||
def SHA256(message):
|
||||
ctx = hashlib.sha256()
|
||||
ctx.update(message)
|
||||
return ctx.digest()
|
||||
|
||||
# implements an Pseudo Mac Version of Windows built-in Crypto routine
|
||||
def CryptUnprotectData(encryptedData):
|
||||
sp = GetVolumeSerialNumber() + '!@#' + GetUserName()
|
||||
passwdData = encode(SHA256(sp),charMap1)
|
||||
crp = LibCrypto()
|
||||
key_iv = crp.keyivgen(passwdData)
|
||||
key = key_iv[0:32]
|
||||
iv = key_iv[32:48]
|
||||
crp.set_decrypt_key(key,iv)
|
||||
cleartext = crp.decrypt(encryptedData)
|
||||
return cleartext
|
||||
|
||||
# Locate and open the .kindle-info file
|
||||
def openKindleInfo():
|
||||
home = os.getenv('HOME')
|
||||
cmdline = 'find "' + home + '/Library/Application Support" -name ".kindle-info"'
|
||||
cmdline = cmdline.encode(sys.getfilesystemencoding())
|
||||
p1 = Process(cmdline, shell=True, bufsize=1, stdin=None, stdout=PIPE, stderr=PIPE, close_fds=False)
|
||||
poll = p1.wait('wait')
|
||||
results = p1.read()
|
||||
reslst = results.split('\n')
|
||||
kinfopath = 'NONE'
|
||||
cnt = len(reslst)
|
||||
for j in xrange(cnt):
|
||||
resline = reslst[j]
|
||||
pp = resline.find('.kindle-info')
|
||||
if pp >= 0:
|
||||
kinfopath = resline
|
||||
break
|
||||
if not os.path.exists(kinfopath):
|
||||
raise K4MDrmException('Error: .kindle-info file can not be found')
|
||||
return open(kinfopath,'r')
|
||||
107
Calibre_Plugins/k4mobidedrm_plugin/k4pcutils.py
Normal file
107
Calibre_Plugins/k4mobidedrm_plugin/k4pcutils.py
Normal file
@@ -0,0 +1,107 @@
|
||||
# K4PC Windows specific routines
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
import sys, os
|
||||
|
||||
from ctypes import windll, c_char_p, c_wchar_p, c_uint, POINTER, byref, \
|
||||
create_unicode_buffer, create_string_buffer, CFUNCTYPE, addressof, \
|
||||
string_at, Structure, c_void_p, cast
|
||||
|
||||
import _winreg as winreg
|
||||
|
||||
import traceback
|
||||
|
||||
MAX_PATH = 255
|
||||
|
||||
kernel32 = windll.kernel32
|
||||
advapi32 = windll.advapi32
|
||||
crypt32 = windll.crypt32
|
||||
|
||||
|
||||
#
|
||||
# Various character maps used to decrypt books. Probably supposed to act as obfuscation
|
||||
#
|
||||
charMap1 = "n5Pr6St7Uv8Wx9YzAb0Cd1Ef2Gh3Jk4M"
|
||||
charMap2 = "AaZzB0bYyCc1XxDdW2wEeVv3FfUuG4g-TtHh5SsIiR6rJjQq7KkPpL8lOoMm9Nn_"
|
||||
charMap3 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
||||
charMap4 = "ABCDEFGHIJKLMNPQRSTUVWXYZ123456789"
|
||||
|
||||
#
|
||||
# Exceptions for all the problems that might happen during the script
|
||||
#
|
||||
class DrmException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DataBlob(Structure):
|
||||
_fields_ = [('cbData', c_uint),
|
||||
('pbData', c_void_p)]
|
||||
DataBlob_p = POINTER(DataBlob)
|
||||
|
||||
|
||||
def GetSystemDirectory():
|
||||
GetSystemDirectoryW = kernel32.GetSystemDirectoryW
|
||||
GetSystemDirectoryW.argtypes = [c_wchar_p, c_uint]
|
||||
GetSystemDirectoryW.restype = c_uint
|
||||
def GetSystemDirectory():
|
||||
buffer = create_unicode_buffer(MAX_PATH + 1)
|
||||
GetSystemDirectoryW(buffer, len(buffer))
|
||||
return buffer.value
|
||||
return GetSystemDirectory
|
||||
GetSystemDirectory = GetSystemDirectory()
|
||||
|
||||
def GetVolumeSerialNumber():
|
||||
GetVolumeInformationW = kernel32.GetVolumeInformationW
|
||||
GetVolumeInformationW.argtypes = [c_wchar_p, c_wchar_p, c_uint,
|
||||
POINTER(c_uint), POINTER(c_uint),
|
||||
POINTER(c_uint), c_wchar_p, c_uint]
|
||||
GetVolumeInformationW.restype = c_uint
|
||||
def GetVolumeSerialNumber(path = GetSystemDirectory().split('\\')[0] + '\\'):
|
||||
vsn = c_uint(0)
|
||||
GetVolumeInformationW(path, None, 0, byref(vsn), None, None, None, 0)
|
||||
return str(vsn.value)
|
||||
return GetVolumeSerialNumber
|
||||
GetVolumeSerialNumber = GetVolumeSerialNumber()
|
||||
|
||||
|
||||
def GetUserName():
|
||||
GetUserNameW = advapi32.GetUserNameW
|
||||
GetUserNameW.argtypes = [c_wchar_p, POINTER(c_uint)]
|
||||
GetUserNameW.restype = c_uint
|
||||
def GetUserName():
|
||||
buffer = create_unicode_buffer(32)
|
||||
size = c_uint(len(buffer))
|
||||
while not GetUserNameW(buffer, byref(size)):
|
||||
buffer = create_unicode_buffer(len(buffer) * 2)
|
||||
size.value = len(buffer)
|
||||
return buffer.value.encode('utf-16-le')[::2]
|
||||
return GetUserName
|
||||
GetUserName = GetUserName()
|
||||
|
||||
|
||||
def CryptUnprotectData():
|
||||
_CryptUnprotectData = crypt32.CryptUnprotectData
|
||||
_CryptUnprotectData.argtypes = [DataBlob_p, c_wchar_p, DataBlob_p,
|
||||
c_void_p, c_void_p, c_uint, DataBlob_p]
|
||||
_CryptUnprotectData.restype = c_uint
|
||||
def CryptUnprotectData(indata, entropy):
|
||||
indatab = create_string_buffer(indata)
|
||||
indata = DataBlob(len(indata), cast(indatab, c_void_p))
|
||||
entropyb = create_string_buffer(entropy)
|
||||
entropy = DataBlob(len(entropy), cast(entropyb, c_void_p))
|
||||
outdata = DataBlob()
|
||||
if not _CryptUnprotectData(byref(indata), None, byref(entropy),
|
||||
None, None, 0, byref(outdata)):
|
||||
raise DrmException("Failed to Unprotect Data")
|
||||
return string_at(outdata.pbData, outdata.cbData)
|
||||
return CryptUnprotectData
|
||||
CryptUnprotectData = CryptUnprotectData()
|
||||
|
||||
#
|
||||
# Locate and open the Kindle.info file.
|
||||
#
|
||||
def openKindleInfo():
|
||||
regkey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders\\")
|
||||
path = winreg.QueryValueEx(regkey, 'Local AppData')[0]
|
||||
return open(path+'\\Amazon\\Kindle For PC\\{AMAwzsaPaaZAzmZzZQzgZCAkZ3AjA_AY}\\kindle.info','r')
|
||||
@@ -39,8 +39,12 @@
|
||||
# trailing data byte flags - version 5 and higher AND header size >= 0xE4.
|
||||
# 0.15 - Now outputs 'hearbeat', and is also quicker for long files.
|
||||
# 0.16 - And reverts to 'done' not 'done.' at the end for unswindle compatibility.
|
||||
# 0.17 - added modifications to support its use as an imported python module
|
||||
# both inside calibre and also in other places (ie K4DeDRM tools)
|
||||
# 0.17a - disabled the standalone plugin feature since a plugin can not import
|
||||
# a plugin
|
||||
|
||||
__version__ = '0.16'
|
||||
__version__ = '0.17'
|
||||
|
||||
import sys
|
||||
import struct
|
||||
@@ -248,7 +252,42 @@ class DrmStripper:
|
||||
def getResult(self):
|
||||
return self.data_file
|
||||
|
||||
if not __name__ == "__main__":
|
||||
def getUnencryptedBook(infile,pid):
|
||||
sys.stdout=Unbuffered(sys.stdout)
|
||||
data_file = file(infile, 'rb').read()
|
||||
strippedFile = DrmStripper(data_file, pid)
|
||||
return strippedFile.getResult()
|
||||
|
||||
def main(argv=sys.argv):
|
||||
sys.stdout=Unbuffered(sys.stdout)
|
||||
print ('MobiDeDrm v%(__version__)s. '
|
||||
'Copyright 2008-2010 The Dark Reverser.' % globals())
|
||||
if len(argv)<4:
|
||||
print "Removes protection from Mobipocket books"
|
||||
print "Usage:"
|
||||
print " %s <infile> <outfile> <PID>" % sys.argv[0]
|
||||
return 1
|
||||
else:
|
||||
infile = argv[1]
|
||||
outfile = argv[2]
|
||||
pid = argv[3]
|
||||
try:
|
||||
stripped_file = getUnencryptedBook(infile, pid)
|
||||
file(outfile, 'wb').write(stripped_file)
|
||||
except DrmException, e:
|
||||
print "Error: %s" % e
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
||||
#if not __name__ == "__main__":
|
||||
if False:
|
||||
|
||||
# note a calibre plugin can not import code with another calibre plugin
|
||||
# in it as it ends up registering two different plugins
|
||||
from calibre.customize import FileTypePlugin
|
||||
|
||||
class MobiDeDRM(FileTypePlugin):
|
||||
@@ -256,7 +295,7 @@ if not __name__ == "__main__":
|
||||
description = 'Removes DRM from secure Mobi files'
|
||||
supported_platforms = ['linux', 'osx', 'windows'] # Platforms this plugin will run on
|
||||
author = 'The Dark Reverser' # The author of this plugin
|
||||
version = (0, 1, 6) # The version number of this plugin
|
||||
version = (0, 1, 7) # The version number of this plugin
|
||||
file_types = set(['prc','mobi','azw']) # The file types that this plugin will be applied to
|
||||
on_import = True # Run this plugin during the import
|
||||
|
||||
@@ -270,41 +309,17 @@ if not __name__ == "__main__":
|
||||
try:
|
||||
unlocked_file = DrmStripper(data_file, i).getResult()
|
||||
except DrmException:
|
||||
# ignore the error
|
||||
pass
|
||||
if is_ok_to_use_qt():
|
||||
d = QMessageBox(QMessageBox.Warning, "MobiDeDRM Plugin", "Error decoding: %s\n" % path_to_ebook)
|
||||
d.show()
|
||||
d.raise_()
|
||||
d.exec_()
|
||||
raise Exception("MobiDeDRM Plugin: Error decoding ebook")
|
||||
else:
|
||||
of = self.temporary_file('.mobi')
|
||||
of.write(unlocked_file)
|
||||
of.close()
|
||||
return of.name
|
||||
if is_ok_to_use_qt():
|
||||
d = QMessageBox(QMessageBox.Warning, "MobiDeDRM Plugin", "Couldn't decode: %s\n\nImporting encrypted version." % path_to_ebook)
|
||||
d.show()
|
||||
d.raise_()
|
||||
d.exec_()
|
||||
return path_to_ebook
|
||||
|
||||
def customization_help(self, gui=False):
|
||||
return 'Enter PID (separate multiple PIDs with comma)'
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.stdout=Unbuffered(sys.stdout)
|
||||
print ('MobiDeDrm v%(__version__)s. '
|
||||
'Copyright 2008-2010 The Dark Reverser.' % globals())
|
||||
if len(sys.argv)<4:
|
||||
print "Removes protection from Mobipocket books"
|
||||
print "Usage:"
|
||||
print " %s <infile> <outfile> <PID>" % sys.argv[0]
|
||||
sys.exit(1)
|
||||
else:
|
||||
infile = sys.argv[1]
|
||||
outfile = sys.argv[2]
|
||||
pid = sys.argv[3]
|
||||
data_file = file(infile, 'rb').read()
|
||||
try:
|
||||
strippedFile = DrmStripper(data_file, pid)
|
||||
file(outfile, 'wb').write(strippedFile.getResult())
|
||||
except DrmException, e:
|
||||
print "Error: %s" % e
|
||||
sys.exit(1)
|
||||
sys.exit(0)
|
||||
Reference in New Issue
Block a user