the original code which only support python 2 is here
link to thinkgear.py
I'm trying to edit it to support python 3. the edited code is here:
import sys
import serial
from io import BytesIO
import struct
from collections import namedtuple
import logging
import logging.handlers
import sys
import time
import datetime
global delta
delta = []
_log = logging.getLogger(__name__)
_bytelog = logging.getLogger(__name__+'.bytes')
_bytelog.propagate = False
fh = logging.FileHandler('spam.log')
fh.setLevel(logging.DEBUG)
_log.addHandler(fh)
class ThinkGearProtocol(object):
def __init__(self, port):
self.serial = serial.Serial(port, 57600)
self.preread = BytesIO()
self.io = self.serial
#staticmethod
def _chksum(packet):
return ~sum(c for c in packet ) & 0xff
def _read(self, n):
buf = self.io.read(n)
if len(buf) < n:
_log.debug('incomplete read, short %s bytes', n - len(buf))
if self.io == self.preread:
_log.debug('end of preread buffer')
# self.preread.reset()
# self.preread.truncate()
# two line comment out
self.io = self.serial
buf += self.io.read(n-len(buf))
if len(buf) < n:
_log.debug('incomplete read, short %s bytes', n - len(buf))
for o in range(0, len(buf), 16):
_bytelog.debug('%04X '+' '.join(('%02X',)*len(buf[o:o+16])), o, *(c for c in buf[o:o+16]))
return buf
def _deread(self, buf):
_log.debug('putting back %s bytes', len(buf))
pos = self.preread.tell()
self.preread.seek(0, 2)
self.preread.write(buf)
self.preread.seek(pos)
self.io = self.preread
def get_packets(self):
last_two = ()
while True:
last_two = last_two[-1:]+(self._read(1),)
# _log.debug('last_two: %r', last_two)
if last_two == (b'\xAA',b'\xAA'):
plen = self._read(1)
if plen >= b'\xAA':
# Bogosity
_log.debug('discarding %r while syncing', last_two[0])
last_two = last_two[-1:]+(plen,)
else:
last_two = ()
packet = self._read(int.from_bytes((plen), byteorder='big'))
# _log.debug(plen)
checksum = self._read(1)
if ord(checksum) == self._chksum(packet):
yield self._decode(packet)
else:
_log.debug('bad checksum')
self._deread(packet+checksum)
elif len(last_two) == 2:
_log.debug('discarding %r while syncing', last_two[0])
def _decode(self, packet):
decoded = []
while packet:
extended_code_level = 0
while len(packet) and packet[0] == '\x55':
extended_code_level += 1
packet = packet[1:]
if len(packet) < 2:
_log.debug('ran out of packet: %r', '\x55'*extended_code_level+packet)
break
code = packet[0]
if code < 0x80:
value = packet[1]
packet = packet[2:]
else:
vlen = packet[1]
if len(packet) < 2+vlen:
_log.debug('ran out of packet: %r', '\x55'*extended_code_level+chr(code)+chr(vlen)+packet)
break
value = packet[2:2+vlen]
packet = packet[2+vlen:]
# _log.debug('extended_code_level is '+str(extended_code_level))
# _log.debug('code is '+str(code))
# _log.debug('data_types is '+str(data_types))
# _log.debug(not extended_code_level and code in data_types)
# _log.debug(not bool(extended_code_level and code in data_types))
# _log.debug((extended_code_level,code) in data_types)
if not bool(extended_code_level and code in data_types):
data = data_types[code](extended_code_level, code, value)
# _log.debug('extended_code_level is '+str(extended_code_level))
# _log.debug('code is '+str(code))
# _log.debug('value is '+str(value))
# _log.debug('data_types is '+str(data_types))
elif (extended_code_level,code) in data_types:
data = data_types[(extended_code_level,code)](extended_code_level, code, value)
else:
data = ThinkGearUnknownData(extended_code_level, code, value)
decoded.append(data)
return decoded
data_types = {}
class ThinkGearMetaClass(type):
def __new__(mcls, name, bases, data):
cls = super(ThinkGearMetaClass, mcls).__new__(mcls, name, bases, data)
code = getattr(cls, 'code', None)
if code:
data_types[code] = cls
extended_code_level = getattr(cls, 'extended_code_level', None)
if extended_code_level:
data_types[(extended_code_level,code)] = cls
return cls
class ThinkGearData(object, metaclass=ThinkGearMetaClass):
def __init__(self, extended_code_level, code, value):
self.extended_code_level = extended_code_level
self.code = code
# self.value = self._decode(value)
self.value = value
# _log.debug('123')
if self._log:
_log.log(self._log, '%s', self)
#staticmethod
def _decode(v):
return v
def __str__(self):
return self._strfmt % vars(self)
# __metaclass__ = ThinkGearMetaClass
_log = logging.DEBUG
class ThinkGearUnknownData(ThinkGearData):
'''???'''
_strfmt = 'Unknown: code=%(code)02X extended_code_level=%(extended_code_level)s %(value)r'
class ThinkGearPoorSignalData(ThinkGearData):
'''POOR_SIGNAL Quality (0-255)'''
code = 0x02
_strfmt = 'POOR SIGNAL: %(value)s'
_decode = staticmethod(ord)
class ThinkGearAttentionData(ThinkGearData):
'''ATTENTION eSense (0 to 100)'''
code = 0x04
_strfmt = 'ATTENTION eSense: %(value)s'
_decode = staticmethod(ord)
class ThinkGearMeditationData(ThinkGearData):
'''MEDITATION eSense (0 to 100)'''
code = 0x05
_strfmt = 'MEDITATION eSense: %(value)s'
_decode = staticmethod(ord)
class ThinkGearRawWaveData(ThinkGearData):
'''RAW Wave Value (-32768 to 32767)'''
code = 0x80
_strfmt = 'Raw Wave: %(value)s'
_decode = staticmethod(lambda v: struct.unpack('>h', v)[0])
# There are lots of these, don't log them by default
_log = False
EEGPowerData = namedtuple('EEGPowerData', 'delta theta lowalpha highalpha lowbeta highbeta lowgamma midgamma')
delta_value = namedtuple('EEGPowerData', 'delta')
class ThinkGearEEGPowerData(ThinkGearData):
'''Eight EEG band power values (0 to 16777215).
delta, theta, low-alpha high-alpha, low-beta, high-beta, low-gamma, and
mid-gamma EEG band power values.
'''
code = 0x83
_strfmt = 'ASIC EEG Power: %(value)r'
_decode = staticmethod(lambda v: EEGPowerData(*struct.unpack('>8L', ''.join( '\x00'+v[o:o+3] for o in range(0, 24, 3)))))
#print(EEGPowerData.delta)
def main():
global packet_log
packet_log = []
logging.basicConfig(level=logging.DEBUG)
for pkt in ThinkGearProtocol('COM3').get_packets():
packet_log.append(pkt)
if __name__ == '__main__':
main()
when running in python2, i get the result like this:
DEBUG:__main__:ASIC EEG Power: EEGPowerData(delta=7784, theta=7734, lowalpha=2035, highalpha=1979, lowbeta=2914, highbeta=3996, lowgamma=1944, midgamma=1847
when running in python3, the result is like this:
DEBUG:__main__:ASIC EEG Power: b'\x00\xa9\xf1\x00t%\x00\rK\x00\x18"\x00\x16%\x00\x1d6\x00OT\x00\x17\x84'
Anyone know how should i edit this line of code in order to make it work in python 3? Thank you
_decode = staticmethod(lambda v: EEGPowerData(*struct.unpack('>8L', ''.join( '\x00'+v[o:o+3] for o in range(0, 24, 3)))))
Related
I'm tring to provide a remote server by thrift, and below is my server-side trial:
from test_thrift.test_server.TestService import Iface,Processor
from thrift.Thrift import TType,TException
from thrift.Thrift import TProcessor
from thrift.transport import TSocket
from thrift.protocol import TBinaryProtocol
from thrift.server import TNonblockingServer
port = int(config["server"]["port"])
handler = SmbService()
processor = Processor(handler)
socket = TSocket.TServerSocket(port=port)
server = TNonblockingServer.TNonblockingServer(processor, socket)
server.setNumThreads(100)
server.serve()
At my client-side, for every request I will create a new connect and close it after got response from server.
from thrift.transport.TSocket import TTransportException
from thrift.Thrift import TException
from thrift.protocol import TBinaryProtocol, TCompactProtocol
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift import Thrift
from test_thrift.test_server import TestService
class SmbClient(object):
def __init__(self):
try:
self.tsocket = TSocket.TSocket(settings.THRIFT_HOST, settings.THRIFT_PORT_PRODUCT)
self.transportFactory = TTransport.TFramedTransportFactory()
self.transport = self.transportFactory.getTransport(self.tsocket)
self.protocol = TBinaryProtocol.TBinaryProtocol(self.transport)
self.client = TestService.Client(self.protocol)
self.transport.open()
except Thrift.TException as e:
self.logger.info(e)
class BaseService(object):
def __init__(self):
self.commonLogger = logging.getLogger('ads')
self.header = "header111"
def search_ads_list(self, request, account_id):
self.smbClient.client.getFBAdsByAccount(param1, param2)
def __enter__(self):
self.smbClient = SmbClient()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.smbClient.transport.close()
ech request call is like this:
with BaseService() as BaseSingleService:
status, data, info = BaseSingleService.search_ads_list(
request, account_id)
The actual amount of requests from client are not big enough , but after period of time. I got error like :
Traceback (most recent call last):
File "/home/xxx/xxx/src/smb_thrift_server.py", line 2200, in <module>
server.serve()
File "/home/xxx/xxx/venv3/lib/python3.5/site-packages/thrift/server/TNonblockingServer.py", line 350, in serve
self.handle()
File "/home/xxx/xxxx/venv3/lib/python3.5/site-packages/thrift/server/TNonblockingServer.py", line 310, in handle
rset, wset, xset = self._select()
File "/home/luban/smb_thrift_server/venv3/lib/python3.5/site-packages/thrift/server/TNonblockingServer.py", line 302, in _select
return select.select(readable, writable, readable)
ValueError: filedescriptor out of range in select()
Why error happended as I've closed the client connect each time after receiving the response?
update TestService codes below:
#
# Autogenerated by Thrift Compiler (0.9.1)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface(object):
"""..."""
def getFBAdsByAccount(self, header, account_id, effective_status, access_token):
"""
Parameters:
- header
- account_id
- effective_status
- access_token
"""
pass
"""..."""
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def getFBAdsByAccount(self, header, account_id, effective_status, access_token):
"""
Parameters:
- header
- account_id
- effective_status
- access_token
"""
self.send_getFBAdsByAccount(header, account_id, effective_status, access_token)
return self.recv_getFBAdsByAccount()
def send_getFBAdsByAccount(self, header, account_id, effective_status, access_token):
self._oprot.writeMessageBegin('getFBAdsByAccount', TMessageType.CALL, self._seqid)
args = getFBAdsByAccount_args()
args.header = header
args.account_id = account_id
args.effective_status = effective_status
args.access_token = access_token
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getFBAdsByAccount(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getFBAdsByAccount_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getFBAdsByAccount failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["getFBAdsByAccount"] = Processor.process_getFBAdsByAccount
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_getFBAdsByAccount(self, seqid, iprot, oprot):
args = getFBAdsByAccount_args()
args.read(iprot)
iprot.readMessageEnd()
result = getFBAdsByAccount_result()
try:
result.success = self._handler.getFBAdsByAccount(args.header, args.account_id, args.effective_status, args.access_token)
except test_thrift.common.ttypes.ServerException as e:
result.e = e
oprot.writeMessageBegin("getFBAdsByAccount", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
class getFBAdsByAccount_args(object):
"""
Attributes:
- header
- account_id
- effective_status
- access_token
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'header', (test_thrift.common.ttypes.RequestHeader, test_thrift.common.ttypes.RequestHeader.thrift_spec), None, ), # 1
(2, TType.STRING, 'account_id', None, None, ), # 2
(3, TType.LIST, 'effective_status', (TType.STRING,None), None, ), # 3
(4, TType.STRING, 'access_token', None, None, ), # 4
)
def __init__(self, header=None, account_id=None, effective_status=None, access_token=None,):
self.header = header
self.account_id = account_id
self.effective_status = effective_status
self.access_token = access_token
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.header = test_thrift.common.ttypes.RequestHeader()
self.header.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.account_id = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.effective_status = []
(_etype24, _size21) = iprot.readListBegin()
for _i25 in range(_size21):
_elem26 = iprot.readString();
self.effective_status.append(_elem26)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.access_token = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFBAdsByAccount_args')
if self.header is not None:
oprot.writeFieldBegin('header', TType.STRUCT, 1)
self.header.write(oprot)
oprot.writeFieldEnd()
if self.account_id is not None:
oprot.writeFieldBegin('account_id', TType.STRING, 2)
oprot.writeString(self.account_id)
oprot.writeFieldEnd()
if self.effective_status is not None:
oprot.writeFieldBegin('effective_status', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.effective_status))
for iter27 in self.effective_status:
oprot.writeString(iter27)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.access_token is not None:
oprot.writeFieldBegin('access_token', TType.STRING, 4)
oprot.writeString(self.access_token)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFBAdsByAccount_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (test_thrift.common.ttypes.ResponseListMap, test_thrift.common.ttypes.ResponseListMap.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (test_thrift.common.ttypes.ServerException, test_thrift.common.ttypes.ServerException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = test_thrift.common.ttypes.ResponseListMap()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = test_thrift.common.ttypes.ServerException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFBAdsByAccount_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
Is there a sure-fire way to check that the class of an object is a sub-class of the desired super?
For Example, in a migration script that I'm writing, I have to convert objects of a given type to dictionaries in a given manner to ensure two-way compatability of the data.
This is best summed up like so:
Serializable
User
Status
Issue
Test
Set
Step
Cycle
However, when I'm recursively checking objects after depickling, I receive a Test object that yields the following results:
Testing data object type:
type(data)
{type}< class'__main.Test' >
Testing Class type:
type(Test())
{type}< class'__main.Test' >
Testing object type against class type:
type(Test()) == type(data)
{bool}False
Testing if object isinstance() of Class:
isinstance(data, Test)
{bool}False
Testing if Class isinstance() of Super Class:
isinstance(Test(), Serializable)
{bool}True
Testing isinstance() of Super Class::
isinstance(data, Serializable)
{bool}False
Interestingly, it doesn't appear to have any such problem prior to pickling as it handles the creation of dictionary and integrity hash just fine.
This only crops up with depickled objects in both Pickle and Dill.
For Context, here's the code in it's native environment - the DataCache object that is pickled:
class DataCache(object):
_hash=""
_data = None
#staticmethod
def genHash(data):
dataDict = DataCache.dictify(data)
datahash = json.dumps(dataDict, sort_keys=True)
return hashlib.sha256(datahash).digest()
#staticmethod
def dictify(data):
if isinstance(data,list):
datahash = []
for item in data:
datahash.append(DataCache.dictify(item))
elif isinstance(data,(dict, collections.OrderedDict)):
datahash = collections.OrderedDict()
for key,value in datahash.iteritems():
datahash[key]= DataCache.dictify(value)
elif isinstance(data, Serializable):
datahash = data.toDict()
else:
datahash = data
return datahash
def __init__(self, restoreDict = {}):
if restoreDict:
self.__dict__.update(restoreDict)
def __getinitargs__(self):
return (self.__dict__)
def set(self, data):
self._hash = DataCache.genHash(data)
self._data = data
def verify(self):
dataHash = DataCache.genHash(self._data)
return (self._hash == dataHash)
def get(self):
return self._data
Finally, I know there's arguments for using JSON for readability in storage, I needed Pickle's ability to convert straight to and from Objects without specifying the object type myself. (thanks to the nesting, it's not really feasible)
Am I going mad here or does pickling do something to the class definitions?
EDIT:
Minimal Implementation:
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
from aenum import Enum
import json # _tricks
import base64
import argparse
import os
import sys
import datetime
import dill
import hashlib
import collections
class Serializable(object):
def __init__(self, initDict={}):
if initDict:
self.__dict__.update(initDict)
def __str__(self):
return str(self.sortSelf())
def sortSelf(self):
return collections.OrderedDict(sorted(self.__dict__.items()))
def toDict(self):
return self.__dict__
def fromDict(self, dict):
# Not using __dict__.update(...) to avoid polluting objects with the excess data
varMap = self.__dict__
if dict and varMap:
for key in varMap:
if (key in dict):
varMap[key] = dict[key]
self.__dict__.update(varMap)
return self
return None
class Issue(Serializable):
def __init__(self, initDict={}):
self.id = 0
self.key = ""
self.fields = {}
if initDict:
self.__dict__.update(initDict)
Serializable.__init__(self)
def fieldToDict(self, obj, key, type):
if key in obj:
result = obj[key]
else:
return None
if result is None:
return None
if isinstance(result, type):
return result.toDict()
return result
def fromDict(self, jsonDict):
super(Issue, self).fromDict(jsonDict)
self.fields["issuetype"] = IssueType().fromDict(self.fields["issuetype"])
self.fields["assignee"] = User().fromDict(self.fields["assignee"])
self.fields["creator"] = User().fromDict(self.fields["creator"])
self.fields["reporter"] = User().fromDict(self.fields["reporter"])
return self
def toDict(self):
result = super(Issue, self).toDict()
blankKeys = []
for fieldName, fieldValue in self.fields.iteritems():
if fieldValue is None:
blankKeys.append(fieldName)
if blankKeys:
for key in blankKeys:
self.fields.pop(key, None)
result["fields"]["issuetype"] = self.fieldToDict(result["fields"], "issuetype", IssueType)
result["fields"]["creator"] = self.fieldToDict(result["fields"], "creator", User)
result["fields"]["reporter"] = self.fieldToDict(result["fields"], "reporter", User)
result["fields"]["assignee"] = self.fieldToDict(result["fields"], "assignee", User)
return result
class IssueType(Serializable):
def __init__(self):
self.id = 0
self.name = ""
def toDict(self):
return {"id": str(self.id)}
class Project(Serializable):
def __init__(self):
Serializable.__init__(self)
self.id = 0
self.name = ""
self.key = ""
class Cycle(Serializable):
def __init__(self):
self.id = 0
self.name = ""
self.totalExecutions = 0
self.endDate = ""
self.description = ""
self.totalExecuted = 0
self.started = ""
self.versionName = ""
self.projectKey = ""
self.versionId = 0
self.environment = ""
self.totalCycleExecutions = 0
self.build = ""
self.ended = ""
self.name = ""
self.modifiedBy = ""
self.projectId = 0
self.startDate = ""
self.executionSummaries = {'executionSummary': []}
class Step(Serializable):
def __init__(self):
self.id = ""
self.orderId = 0
self.step = ""
self.data = ""
self.result = ""
self.attachmentsMap = {}
def toDict(self):
dict = {}
dict["step"] = self.step
dict["data"] = self.data
dict["result"] = self.result
dict["attachments"] = []
return dict
class Status(Serializable):
def __init__(self):
self.id = 0
self.name = ""
self.description = ""
self.isFinal = True
self.color = ""
self.isNative = True
self.statusCount = 0
self.statusPercent = 0.0
class User(Serializable):
def __init__(self):
self.displayName = ""
self.name = ""
self.emailAddress = ""
self.key = ""
self.active = False
self.timeZone = ""
class Execution(Serializable):
def __init__(self):
self.id = 0
self.orderId = 0
self.cycleId = -1
self.cycleName = ""
self.issueId = 0
self.issueKey = 0
self.projectKey = ""
self.comment = ""
self.versionId = 0,
self.versionName = "",
self.executedOn = ""
self.creationDate = ""
self.executedByUserName = ""
self.assigneeUserName = ""
self.status = {}
self.executionStatus = ""
def fromDict(self, jsonDict):
super(Execution, self).fromDict(jsonDict)
self.status = Status().fromDict(self.status)
# This is already listed as Execution Status, need to associate and convert!
return self
def toDict(self):
result = super(Execution, self).toDict()
result['status'] = result['status'].toDict()
return result
class ExecutionContainer(Serializable):
def __init__(self):
self.executions = []
def fromDict(self, jsonDict):
super(ExecutionContainer, self).fromDict(jsonDict)
self.executions = []
for executionDict in jsonDict["executions"]:
self.executions.append(Execution().fromDict(executionDict))
return self
class Test(Issue):
def __init__(self, initDict={}):
if initDict:
self.__dict__.update(initDict)
Issue.__init__(self)
def toDict(self):
result = super(Test, self).toDict()
stepField = "CustomField_0001"
if result["fields"][stepField]:
steps = []
for step in result["fields"][stepField]["steps"]:
steps.append(step.toDict())
result["fields"][stepField] = steps
return result
def fromDict(self, jsonDict):
super(Test, self).fromDict(jsonDict)
stepField = "CustomField_0001"
steps = []
if stepField in self.fields:
for step in self.fields[stepField]["steps"]:
steps.append(Step().fromDict(step))
self.fields[stepField] = {"steps": steps}
return self
class Set(Issue):
def __init__(self, initDict={}):
self.__dict__.update(initDict)
Issue.__init__(self)
class DataCache(object):
_hash = ""
_data = None
#staticmethod
def genHash(data):
dataDict = DataCache.dictify(data)
datahash = json.dumps(dataDict, sort_keys=True)
return hashlib.sha256(datahash).digest()
#staticmethod
def dictify(data):
if isinstance(data, list):
datahash = []
for item in data:
datahash.append(DataCache.dictify(item))
elif isinstance(data, (dict, collections.OrderedDict)):
datahash = collections.OrderedDict()
for key, value in datahash.iteritems():
datahash[key] = DataCache.dictify(value)
elif isinstance(data, Serializable):
datahash = data.toDict()
else:
datahash = data
return datahash
def __init__(self, restoreDict={}):
if restoreDict:
self.__dict__.update(restoreDict)
def __getinitargs__(self):
return (self.__dict__)
def set(self, data):
self._hash = DataCache.genHash(data)
self._data = data
def verify(self):
dataHash = DataCache.genHash(self._data)
return (self._hash == dataHash)
def get(self):
return self._data
def saveCache(name, projectKey, object):
filePath = "migration_caches/{projectKey}".format(projectKey=projectKey)
if not os.path.exists(path=filePath):
os.makedirs(filePath)
cache = DataCache()
cache.set(object)
targetFile = open("{path}/{name}".format(name=name, path=filePath), 'wb')
dill.dump(obj=cache, file=targetFile)
targetFile.close()
def loadCache(name, projectKey):
filePath = "migration_caches/{projectKey}/{name}".format(name=name, projectKey=projectKey)
result = False
try:
targetFile = open(filePath, 'rb')
try:
cache = dill.load(targetFile)
if isinstance(cache, DataCache):
if cache.verify():
result = cache.get()
except EOFError:
# except BaseException:
print ("Failed to load cache from file: {filePath}\n".format(filePath=filePath))
except IOError:
("Failed to load cache file at: {filePath}\n".format(filePath=filePath))
targetFile.close()
return result
testIssue = Test().fromDict({"id": 1000,
"key": "TEST",
"fields": {
"issuetype": {
"id": 1,
"name": "TestIssue"
},
"assignee": "Minothor",
"reporter": "Minothor",
"creator": "Minothor",
}
})
saveCache("Test", "TestProj", testIssue)
result = loadCache("Test", "TestProj")
EDIT 2
The script in it's current form, now seems to work correctly with vanilla Pickle, (initially switched to Dill due to a similar issue, which was solved by the switch).
However, if you are here with this issue and require Dill's features, then as Mike noted in the comments - it's possible to change the settings in dill.settings to have Dill behave pickle referenced items only with joblib mode, effectively mirroring pickle's standard pickling behaviour.
the code is suppose to decrypt and encrypt a random file, but I can't figure out how to input it.
import array
import hashlib
import random
from Crypto.Cipher import Blowfish
MH3G_JP = 0
MH3G_NA = 1
MH3G_EU = 2
MH4_JP = 3
MH4_NA = 4
MH4_EU = 5
MH4G_JP = 6
MH4G_NA = 7
MH4G_EU = 8
class SavedataCipher:
def __init__(self, game):
if game in (MH4G_JP, MH4G_NA, MH4G_EU):
self._cipher = Blowfish.new(b'blowfish key
iorajegqmrna4itjeangmb agmwgtobjteowhv9mope')
else:
raise ValueError('Ivalid game selected.')
def _xor(self, buff, key):
buff = array.array('H', buff)
for i in range(len(buff)):
if key == 0:
key = 1
key = key * 0xb0 % 0xff53
buff[i] ^= key
return buff.tostring()
def encrypt(self, buff):
csum = sum(buff) & 0xffffffff
buff = array.array('I', buff)
buff.insert(0, csum)
seed = random.getrandbits(16)
buff = array.array('I', self._xor(buff.tostring(), seed))
buff.insert(0, (seed << 16) + 0x10)
buff.byteswap()
buff = array.array('I', self._cipher.encrypt(buff.tostring()))
buff.byteswap()
return buff.tostring()
def decrypt(self, buff):
buff = array.array('I', buff)
buff.byteswap()
buff = array.array('I', self._cipher.decrypt(buff.tostring()))
buff.byteswap()
seed = buff.pop(0) >> 16
buff = array.array('I', self._xor(buff.tostring(), seed))
csum = buff.pop(0)
buff = buff.tostring()
if csum != (sum(buff) & 0xffffffff):
raise ValueError('Invalid checksum in header.')
return buff
def encrypt_file(self, savedata_file, out_file):
savedata = open(savedata_file, 'rb').read()
savedata = self.encrypt(savedata)
open(out_file, 'wb').write(savedata)
def decrypt_file(self, savedata_file, out_file):
savedata = open(savedata_file, 'rb').read()
savedata = self.decrypt(savedata)
open(out_file, 'wb').write(savedata)
This code is a class, you should create a function which calls this calls. You can do that in the same file, or in a different file and import the class.
if __name__ == "__main__":
cipher = SavedataCipher(MH4G_JP) # or use different parameter
# do something else
I'm attempting to create a source for icecast2/shoutcast. But after compiling everything I've run into a segmentation error. After further debugging with gdb I was given a more detailed error.
I do not know c of any kind so I'm not sure what to make of this error
Program received signal SIGSEGV, Segmentation fault.
send_mp3 (self=0x988eb0, buff = 0xa5c154 "" at mp3.c:175175 mp3.c: No such file or directory.
I thought maybe it was the loop using to much resources. But no matter how much I set time.sleep() I still got the same result.
import random
import shout
from pyModules import db
from pyModules import error
import ID3
import time
import sys
import glob
class Audio(object):
def __init__(self):
self.count = 0
self.nbuf = 0
self.buf = 0
self.shout = shout.Shout()
self.db = db.database()
self.songs = self.load()
def load(self):
return glob.glob("%s*%s" % (self.config('directory'), self.config('ext')))
def start(self):
self.running = True
self.shout.host = self.config('host')
self.shout.port = self.config('port')
self.shout.mount = self.config('mount')
self.shout.protocol = self.config('protocol')
self.shout.user = self.config('user')
self.shout.password = self.config('password')
self.shout.name = self.config('name')
self.shout.format = self.config('format')
self.shout.genre = self.config('genre')
self.shout.url = self.config('url')
self.shout.public = self.config('public')
self.shout.description = self.config('description')
self.songs = self.load()
self.shout.open()
def cShuffle(self):
sh = self.getSettings(1, key='shuffle')
if sh == 1:
random.shuffle(self.songs)
def cNext(self):
n = self.getSettings(1, key='setSong')
if n == 1:
self.stop()
self.db.setNext(0)
self.Change()
def cPrev(self):
p = self.getSettings(1, key='prevSong')
if p == 1:
self.stop()
if self.count == 0:
self.count -= 1
self.db.setPrev(0)
self.Change()
else:
self.count -= 2
self.Change()
def cReload(self):
r = self.getSettings(1, key='reload')
if r == 1:
self.songs = self.load()
def check(self):
self.cShuffle()
self.cNext()
self.cPrev()
self.cReload()
def getSettings(self, mode=0, key=None):
return self.db.getSettings(mode, key)
def config(self, value):
return self.db.config(value)
def getTitle(self, File, mode=0):
try:
song = ID3.ID3(File)
title = song["TITLE"]
except:
title = "unknown"
title = title.replace("'", "")
if mode == 0:
self.db.setSongTitle(title)
return title
elif mode == 1:
self.db.setNextSongTitle(title)
return title
elif mode == 2:
self.db.setPrevSongTitle(title)
def sendBlankFile(self):
File = open('/home/radio/AudioServer/bin/blank.mp3').read()
self.shout.send(File)
def stop(self):
self.buf = 0
self.nbuf = 0
self.running = 0
self.sendBlankFile()
def Change(self):
self.stop()
if len(self.songs) >= self.count: self.count = 0
else: self.count += 1
song = self.songs[self.count]
psong = self.songs[self.count - 1]
nsong = self.songs[self.count + 1]
self.getTitle(song, mode=0)
self.getTitle(nsong, mode=1)
self.getTitle(psong, mode=2)
self.play()
def play(self):
song = open(self.songs[self.count])
cs = self.songs[self.count]
self.shout.set_metadata({'song': self.getTitle(cs)})
total = 0
st = time.time()
self.nbuf = song.read(4096)
while self.running:
self.check()
self.buf = self.nbuf
self.nbuf = song.read(4096)
self.buf = self.nbuf
total = total + len(self.buf)
if len(self.buf) == 0:
self.running = False
self.Change()
self.shout.send(self.buf)
self.shout.sync()
if __name__ == "__main__":
Server = Audio()
default = Server.config('default')
Server.db.clear(default)
Server.start()
The issue was indeed a compile problem of libshout as cox pointed out. But it only worked in debian 7 and not ubuntu 12. I think the reason why is because I did not install libogg in ubuntu I only installed vorbis which I thought was the same thing. I also installed mp3 codecs just in case.
Dpkt is a python packet creation and parsing library https://code.google.com/p/dpkt/
The project lacks documentation for beginners. I am trying to document it and make example sample code for all. Based on my knowledge of python, i am having difficulty understanding some of the source code. Here for example is python the RTP (Real Time Transport Protocol) module
https://code.google.com/p/dpkt/source/browse/trunk/dpkt/rtp.py #rtp.py source code
# $Id$
"""Real-Time Transport Protocol"""
from dpkt import Packet
# version 1100 0000 0000 0000 ! 0xC000 14
# p 0010 0000 0000 0000 ! 0x2000 13
# x 0001 0000 0000 0000 ! 0x1000 12
# cc 0000 1111 0000 0000 ! 0x0F00 8
# m 0000 0000 1000 0000 ! 0x0080 7
# pt 0000 0000 0111 1111 ! 0x007F 0
#
_VERSION_MASK= 0xC000
_P_MASK = 0x2000
_X_MASK = 0x1000
_CC_MASK = 0x0F00
_M_MASK = 0x0080
_PT_MASK = 0x007F
_VERSION_SHIFT=14
_P_SHIFT = 13
_X_SHIFT = 12
_CC_SHIFT = 8
_M_SHIFT = 7
_PT_SHIFT = 0
VERSION = 2
class RTP(Packet):
__hdr__ = (
('_type', 'H', 0x8000),
('seq', 'H', 0),
('ts', 'I', 0),
('ssrc', 'I', 0),
)
csrc = ''
def _get_version(self): return (self._type&_VERSION_MASK)>>_VERSION_SHIFT
def _set_version(self, ver):
self._type = (ver << _VERSION_SHIFT) | (self._type & ~_VERSION_MASK)
def _get_p(self): return (self._type & _P_MASK) >> _P_SHIFT
def _set_p(self, p): self._type = (p << _P_SHIFT) | (self._type & ~_P_MASK)
def _get_x(self): return (self._type & _X_MASK) >> _X_SHIFT
def _set_x(self, x): self._type = (x << _X_SHIFT) | (self._type & ~_X_MASK)
def _get_cc(self): return (self._type & _CC_MASK) >> _CC_SHIFT
def _set_cc(self, cc): self._type = (cc<<_CC_SHIFT)|(self._type&~_CC_MASK)
def _get_m(self): return (self._type & _M_MASK) >> _M_SHIFT
def _set_m(self, m): self._type = (m << _M_SHIFT) | (self._type & ~_M_MASK)
def _get_pt(self): return (self._type & _PT_MASK) >> _PT_SHIFT
def _set_pt(self, m): self._type = (m << _PT_SHIFT)|(self._type&~_PT_MASK)
version = property(_get_version, _set_version)
p = property(_get_p, _set_p)
x = property(_get_x, _set_x)
cc = property(_get_cc, _set_cc)
m = property(_get_m, _set_m)
pt = property(_get_pt, _set_pt)
def __len__(self):
return self.__hdr_len__ + len(self.csrc) + len(self.data)
def __str__(self):
return self.pack_hdr() + self.csrc + str(self.data)
def unpack(self, buf):
super(RTP, self).unpack(buf)
self.csrc = buf[self.__hdr_len__:self.__hdr_len__ + self.cc * 4]
self.data = buf[self.__hdr_len__ + self.cc * 4:]
With this code, i was able to do the following in IPython Shell
[37] import dpkt
[38] rtp_pkt=dpkt.rtp.RTP()
[39] rtp_pkt.pack_hdr()
Out[39]: '\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
[47] rtp_pkt.data="HelloWorld"
[48] rtp_pkt.pack()
Out[48]: '\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00HelloWorld'
Based on my understanding of Classes in Python, there should be "init" function in the class that i don't see in rtp.py? I am wondering how the above Ipython Shell commands worked? Why does the hdr variable in rtp.py starts with double underscore "__" and why every class method precedes with single underscore "_". I know this could be to make it private or semi-private but does it have to be that way?
I know that RTP class is derived from Packet whose source code is also pasted here for convenience.
# $Id$
"""Simple packet creation and parsing."""
import copy, itertools, socket, struct
class Error(Exception): pass
class UnpackError(Error): pass
class NeedData(UnpackError): pass
class PackError(Error): pass
class _MetaPacket(type):
def __new__(cls, clsname, clsbases, clsdict):
t = type.__new__(cls, clsname, clsbases, clsdict)
st = getattr(t, '__hdr__', None)
if st is not None:
# XXX - __slots__ only created in __new__()
clsdict['__slots__'] = [ x[0] for x in st ] + [ 'data' ]
t = type.__new__(cls, clsname, clsbases, clsdict)
t.__hdr_fields__ = [ x[0] for x in st ]
t.__hdr_fmt__ = getattr(t, '__byte_order__', '>') + \
''.join([ x[1] for x in st ])
t.__hdr_len__ = struct.calcsize(t.__hdr_fmt__)
t.__hdr_defaults__ = dict(zip(
t.__hdr_fields__, [ x[2] for x in st ]))
return t
class Packet(object):
"""Base packet class, with metaclass magic to generate members from
self.__hdr__.
__hdr__ should be defined as a list of (name, structfmt, default) tuples
__byte_order__ can be set to override the default ('>')
Example::
>>> class Foo(Packet):
... __hdr__ = (('foo', 'I', 1), ('bar', 'H', 2), ('baz', '4s', 'quux'))
...
>>> foo = Foo(bar=3)
>>> foo
Foo(bar=3)
>>> str(foo)
'\x00\x00\x00\x01\x00\x03quux'
>>> foo.bar
3
>>> foo.baz
'quux'
>>> foo.foo = 7
>>> foo.baz = 'whee'
>>> foo
Foo(baz='whee', foo=7, bar=3)
>>> Foo('hello, world!')
Foo(baz=' wor', foo=1751477356L, bar=28460, data='ld!')
"""
__metaclass__ = _MetaPacket
def __init__(self, *args, **kwargs):
"""Packet constructor with ([buf], [field=val,...]) prototype.
Arguments:
buf -- optional packet buffer to unpack
Optional keyword arguments correspond to members to set
(matching fields in self.__hdr__, or 'data').
"""
self.data = ''
if args:
try:
self.unpack(args[0])
except struct.error:
if len(args[0]) < self.__hdr_len__:
raise NeedData
raise UnpackError('invalid %s: %r' %
(self.__class__.__name__, args[0]))
else:
for k in self.__hdr_fields__:
setattr(self, k, copy.copy(self.__hdr_defaults__[k]))
for k, v in kwargs.iteritems():
setattr(self, k, v)
def __len__(self):
return self.__hdr_len__ + len(self.data)
def __getitem__(self, k):
try: return getattr(self, k)
except AttributeError: raise KeyError
def __repr__(self):
l = [ '%s=%r' % (k, getattr(self, k))
for k in self.__hdr_defaults__
if getattr(self, k) != self.__hdr_defaults__[k] ]
if self.data:
l.append('data=%r' % self.data)
return '%s(%s)' % (self.__class__.__name__, ', '.join(l))
def __str__(self):
return self.pack_hdr() + str(self.data)
def pack_hdr(self):
"""Return packed header string."""
try:
return struct.pack(self.__hdr_fmt__,
*[ getattr(self, k) for k in self.__hdr_fields__ ])
except struct.error:
vals = []
for k in self.__hdr_fields__:
v = getattr(self, k)
if isinstance(v, tuple):
vals.extend(v)
else:
vals.append(v)
try:
return struct.pack(self.__hdr_fmt__, *vals)
except struct.error, e:
raise PackError(str(e))
def pack(self):
"""Return packed header + self.data string."""
return str(self)
def unpack(self, buf):
"""Unpack packet header fields from buf, and set self.data."""
for k, v in itertools.izip(self.__hdr_fields__,
struct.unpack(self.__hdr_fmt__, buf[:self.__hdr_len__])):
setattr(self, k, v)
self.data = buf[self.__hdr_len__:]
# XXX - ''.join([(len(`chr(x)`)==3) and chr(x) or '.' for x in range(256)])
__vis_filter = """................................ !"#$%&\'()*+,-./0123456789:;<=>?#ABCDEFGHIJKLMNOPQRSTUVWXYZ[.]^_`abcdefghijklmnopqrstuvwxyz{|}~................................................................................................................................."""
def hexdump(buf, length=16):
"""Return a hexdump output string of the given buffer."""
n = 0
res = []
while buf:
line, buf = buf[:length], buf[length:]
hexa = ' '.join(['%02x' % ord(x) for x in line])
line = line.translate(__vis_filter)
res.append(' %04d: %-*s %s' % (n, length * 3, hexa, line))
n += length
return '\n'.join(res)
try:
import dnet
def in_cksum_add(s, buf):
return dnet.ip_cksum_add(buf, s)
def in_cksum_done(s):
return socket.ntohs(dnet.ip_cksum_carry(s))
except ImportError:
import array
def in_cksum_add(s, buf):
n = len(buf)
cnt = (n / 2) * 2
a = array.array('H', buf[:cnt])
if cnt != n:
a.append(struct.unpack('H', buf[-1] + '\x00')[0])
return s + sum(a)
def in_cksum_done(s):
s = (s >> 16) + (s & 0xffff)
s += (s >> 16)
return socket.ntohs(~s & 0xffff)
def in_cksum(buf):
"""Return computed Internet checksum."""
return in_cksum_done(in_cksum_add(0, buf))
The question is how to really understand the source code so its documentation is done correctly?