I'm tring to provide a remote server by thrift, and below is my server-side trial:
from test_thrift.test_server.TestService import Iface,Processor
from thrift.Thrift import TType,TException
from thrift.Thrift import TProcessor
from thrift.transport import TSocket
from thrift.protocol import TBinaryProtocol
from thrift.server import TNonblockingServer
port = int(config["server"]["port"])
handler = SmbService()
processor = Processor(handler)
socket = TSocket.TServerSocket(port=port)
server = TNonblockingServer.TNonblockingServer(processor, socket)
server.setNumThreads(100)
server.serve()
At my client-side, for every request I will create a new connect and close it after got response from server.
from thrift.transport.TSocket import TTransportException
from thrift.Thrift import TException
from thrift.protocol import TBinaryProtocol, TCompactProtocol
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift import Thrift
from test_thrift.test_server import TestService
class SmbClient(object):
def __init__(self):
try:
self.tsocket = TSocket.TSocket(settings.THRIFT_HOST, settings.THRIFT_PORT_PRODUCT)
self.transportFactory = TTransport.TFramedTransportFactory()
self.transport = self.transportFactory.getTransport(self.tsocket)
self.protocol = TBinaryProtocol.TBinaryProtocol(self.transport)
self.client = TestService.Client(self.protocol)
self.transport.open()
except Thrift.TException as e:
self.logger.info(e)
class BaseService(object):
def __init__(self):
self.commonLogger = logging.getLogger('ads')
self.header = "header111"
def search_ads_list(self, request, account_id):
self.smbClient.client.getFBAdsByAccount(param1, param2)
def __enter__(self):
self.smbClient = SmbClient()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.smbClient.transport.close()
ech request call is like this:
with BaseService() as BaseSingleService:
status, data, info = BaseSingleService.search_ads_list(
request, account_id)
The actual amount of requests from client are not big enough , but after period of time. I got error like :
Traceback (most recent call last):
File "/home/xxx/xxx/src/smb_thrift_server.py", line 2200, in <module>
server.serve()
File "/home/xxx/xxx/venv3/lib/python3.5/site-packages/thrift/server/TNonblockingServer.py", line 350, in serve
self.handle()
File "/home/xxx/xxxx/venv3/lib/python3.5/site-packages/thrift/server/TNonblockingServer.py", line 310, in handle
rset, wset, xset = self._select()
File "/home/luban/smb_thrift_server/venv3/lib/python3.5/site-packages/thrift/server/TNonblockingServer.py", line 302, in _select
return select.select(readable, writable, readable)
ValueError: filedescriptor out of range in select()
Why error happended as I've closed the client connect each time after receiving the response?
update TestService codes below:
#
# Autogenerated by Thrift Compiler (0.9.1)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface(object):
"""..."""
def getFBAdsByAccount(self, header, account_id, effective_status, access_token):
"""
Parameters:
- header
- account_id
- effective_status
- access_token
"""
pass
"""..."""
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def getFBAdsByAccount(self, header, account_id, effective_status, access_token):
"""
Parameters:
- header
- account_id
- effective_status
- access_token
"""
self.send_getFBAdsByAccount(header, account_id, effective_status, access_token)
return self.recv_getFBAdsByAccount()
def send_getFBAdsByAccount(self, header, account_id, effective_status, access_token):
self._oprot.writeMessageBegin('getFBAdsByAccount', TMessageType.CALL, self._seqid)
args = getFBAdsByAccount_args()
args.header = header
args.account_id = account_id
args.effective_status = effective_status
args.access_token = access_token
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getFBAdsByAccount(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getFBAdsByAccount_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getFBAdsByAccount failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["getFBAdsByAccount"] = Processor.process_getFBAdsByAccount
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_getFBAdsByAccount(self, seqid, iprot, oprot):
args = getFBAdsByAccount_args()
args.read(iprot)
iprot.readMessageEnd()
result = getFBAdsByAccount_result()
try:
result.success = self._handler.getFBAdsByAccount(args.header, args.account_id, args.effective_status, args.access_token)
except test_thrift.common.ttypes.ServerException as e:
result.e = e
oprot.writeMessageBegin("getFBAdsByAccount", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
class getFBAdsByAccount_args(object):
"""
Attributes:
- header
- account_id
- effective_status
- access_token
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'header', (test_thrift.common.ttypes.RequestHeader, test_thrift.common.ttypes.RequestHeader.thrift_spec), None, ), # 1
(2, TType.STRING, 'account_id', None, None, ), # 2
(3, TType.LIST, 'effective_status', (TType.STRING,None), None, ), # 3
(4, TType.STRING, 'access_token', None, None, ), # 4
)
def __init__(self, header=None, account_id=None, effective_status=None, access_token=None,):
self.header = header
self.account_id = account_id
self.effective_status = effective_status
self.access_token = access_token
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.header = test_thrift.common.ttypes.RequestHeader()
self.header.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.account_id = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.effective_status = []
(_etype24, _size21) = iprot.readListBegin()
for _i25 in range(_size21):
_elem26 = iprot.readString();
self.effective_status.append(_elem26)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.access_token = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFBAdsByAccount_args')
if self.header is not None:
oprot.writeFieldBegin('header', TType.STRUCT, 1)
self.header.write(oprot)
oprot.writeFieldEnd()
if self.account_id is not None:
oprot.writeFieldBegin('account_id', TType.STRING, 2)
oprot.writeString(self.account_id)
oprot.writeFieldEnd()
if self.effective_status is not None:
oprot.writeFieldBegin('effective_status', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.effective_status))
for iter27 in self.effective_status:
oprot.writeString(iter27)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.access_token is not None:
oprot.writeFieldBegin('access_token', TType.STRING, 4)
oprot.writeString(self.access_token)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFBAdsByAccount_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (test_thrift.common.ttypes.ResponseListMap, test_thrift.common.ttypes.ResponseListMap.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (test_thrift.common.ttypes.ServerException, test_thrift.common.ttypes.ServerException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = test_thrift.common.ttypes.ResponseListMap()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = test_thrift.common.ttypes.ServerException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFBAdsByAccount_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
Related
the original code which only support python 2 is here
link to thinkgear.py
I'm trying to edit it to support python 3. the edited code is here:
import sys
import serial
from io import BytesIO
import struct
from collections import namedtuple
import logging
import logging.handlers
import sys
import time
import datetime
global delta
delta = []
_log = logging.getLogger(__name__)
_bytelog = logging.getLogger(__name__+'.bytes')
_bytelog.propagate = False
fh = logging.FileHandler('spam.log')
fh.setLevel(logging.DEBUG)
_log.addHandler(fh)
class ThinkGearProtocol(object):
def __init__(self, port):
self.serial = serial.Serial(port, 57600)
self.preread = BytesIO()
self.io = self.serial
#staticmethod
def _chksum(packet):
return ~sum(c for c in packet ) & 0xff
def _read(self, n):
buf = self.io.read(n)
if len(buf) < n:
_log.debug('incomplete read, short %s bytes', n - len(buf))
if self.io == self.preread:
_log.debug('end of preread buffer')
# self.preread.reset()
# self.preread.truncate()
# two line comment out
self.io = self.serial
buf += self.io.read(n-len(buf))
if len(buf) < n:
_log.debug('incomplete read, short %s bytes', n - len(buf))
for o in range(0, len(buf), 16):
_bytelog.debug('%04X '+' '.join(('%02X',)*len(buf[o:o+16])), o, *(c for c in buf[o:o+16]))
return buf
def _deread(self, buf):
_log.debug('putting back %s bytes', len(buf))
pos = self.preread.tell()
self.preread.seek(0, 2)
self.preread.write(buf)
self.preread.seek(pos)
self.io = self.preread
def get_packets(self):
last_two = ()
while True:
last_two = last_two[-1:]+(self._read(1),)
# _log.debug('last_two: %r', last_two)
if last_two == (b'\xAA',b'\xAA'):
plen = self._read(1)
if plen >= b'\xAA':
# Bogosity
_log.debug('discarding %r while syncing', last_two[0])
last_two = last_two[-1:]+(plen,)
else:
last_two = ()
packet = self._read(int.from_bytes((plen), byteorder='big'))
# _log.debug(plen)
checksum = self._read(1)
if ord(checksum) == self._chksum(packet):
yield self._decode(packet)
else:
_log.debug('bad checksum')
self._deread(packet+checksum)
elif len(last_two) == 2:
_log.debug('discarding %r while syncing', last_two[0])
def _decode(self, packet):
decoded = []
while packet:
extended_code_level = 0
while len(packet) and packet[0] == '\x55':
extended_code_level += 1
packet = packet[1:]
if len(packet) < 2:
_log.debug('ran out of packet: %r', '\x55'*extended_code_level+packet)
break
code = packet[0]
if code < 0x80:
value = packet[1]
packet = packet[2:]
else:
vlen = packet[1]
if len(packet) < 2+vlen:
_log.debug('ran out of packet: %r', '\x55'*extended_code_level+chr(code)+chr(vlen)+packet)
break
value = packet[2:2+vlen]
packet = packet[2+vlen:]
# _log.debug('extended_code_level is '+str(extended_code_level))
# _log.debug('code is '+str(code))
# _log.debug('data_types is '+str(data_types))
# _log.debug(not extended_code_level and code in data_types)
# _log.debug(not bool(extended_code_level and code in data_types))
# _log.debug((extended_code_level,code) in data_types)
if not bool(extended_code_level and code in data_types):
data = data_types[code](extended_code_level, code, value)
# _log.debug('extended_code_level is '+str(extended_code_level))
# _log.debug('code is '+str(code))
# _log.debug('value is '+str(value))
# _log.debug('data_types is '+str(data_types))
elif (extended_code_level,code) in data_types:
data = data_types[(extended_code_level,code)](extended_code_level, code, value)
else:
data = ThinkGearUnknownData(extended_code_level, code, value)
decoded.append(data)
return decoded
data_types = {}
class ThinkGearMetaClass(type):
def __new__(mcls, name, bases, data):
cls = super(ThinkGearMetaClass, mcls).__new__(mcls, name, bases, data)
code = getattr(cls, 'code', None)
if code:
data_types[code] = cls
extended_code_level = getattr(cls, 'extended_code_level', None)
if extended_code_level:
data_types[(extended_code_level,code)] = cls
return cls
class ThinkGearData(object, metaclass=ThinkGearMetaClass):
def __init__(self, extended_code_level, code, value):
self.extended_code_level = extended_code_level
self.code = code
# self.value = self._decode(value)
self.value = value
# _log.debug('123')
if self._log:
_log.log(self._log, '%s', self)
#staticmethod
def _decode(v):
return v
def __str__(self):
return self._strfmt % vars(self)
# __metaclass__ = ThinkGearMetaClass
_log = logging.DEBUG
class ThinkGearUnknownData(ThinkGearData):
'''???'''
_strfmt = 'Unknown: code=%(code)02X extended_code_level=%(extended_code_level)s %(value)r'
class ThinkGearPoorSignalData(ThinkGearData):
'''POOR_SIGNAL Quality (0-255)'''
code = 0x02
_strfmt = 'POOR SIGNAL: %(value)s'
_decode = staticmethod(ord)
class ThinkGearAttentionData(ThinkGearData):
'''ATTENTION eSense (0 to 100)'''
code = 0x04
_strfmt = 'ATTENTION eSense: %(value)s'
_decode = staticmethod(ord)
class ThinkGearMeditationData(ThinkGearData):
'''MEDITATION eSense (0 to 100)'''
code = 0x05
_strfmt = 'MEDITATION eSense: %(value)s'
_decode = staticmethod(ord)
class ThinkGearRawWaveData(ThinkGearData):
'''RAW Wave Value (-32768 to 32767)'''
code = 0x80
_strfmt = 'Raw Wave: %(value)s'
_decode = staticmethod(lambda v: struct.unpack('>h', v)[0])
# There are lots of these, don't log them by default
_log = False
EEGPowerData = namedtuple('EEGPowerData', 'delta theta lowalpha highalpha lowbeta highbeta lowgamma midgamma')
delta_value = namedtuple('EEGPowerData', 'delta')
class ThinkGearEEGPowerData(ThinkGearData):
'''Eight EEG band power values (0 to 16777215).
delta, theta, low-alpha high-alpha, low-beta, high-beta, low-gamma, and
mid-gamma EEG band power values.
'''
code = 0x83
_strfmt = 'ASIC EEG Power: %(value)r'
_decode = staticmethod(lambda v: EEGPowerData(*struct.unpack('>8L', ''.join( '\x00'+v[o:o+3] for o in range(0, 24, 3)))))
#print(EEGPowerData.delta)
def main():
global packet_log
packet_log = []
logging.basicConfig(level=logging.DEBUG)
for pkt in ThinkGearProtocol('COM3').get_packets():
packet_log.append(pkt)
if __name__ == '__main__':
main()
when running in python2, i get the result like this:
DEBUG:__main__:ASIC EEG Power: EEGPowerData(delta=7784, theta=7734, lowalpha=2035, highalpha=1979, lowbeta=2914, highbeta=3996, lowgamma=1944, midgamma=1847
when running in python3, the result is like this:
DEBUG:__main__:ASIC EEG Power: b'\x00\xa9\xf1\x00t%\x00\rK\x00\x18"\x00\x16%\x00\x1d6\x00OT\x00\x17\x84'
Anyone know how should i edit this line of code in order to make it work in python 3? Thank you
_decode = staticmethod(lambda v: EEGPowerData(*struct.unpack('>8L', ''.join( '\x00'+v[o:o+3] for o in range(0, 24, 3)))))
Is there a sure-fire way to check that the class of an object is a sub-class of the desired super?
For Example, in a migration script that I'm writing, I have to convert objects of a given type to dictionaries in a given manner to ensure two-way compatability of the data.
This is best summed up like so:
Serializable
User
Status
Issue
Test
Set
Step
Cycle
However, when I'm recursively checking objects after depickling, I receive a Test object that yields the following results:
Testing data object type:
type(data)
{type}< class'__main.Test' >
Testing Class type:
type(Test())
{type}< class'__main.Test' >
Testing object type against class type:
type(Test()) == type(data)
{bool}False
Testing if object isinstance() of Class:
isinstance(data, Test)
{bool}False
Testing if Class isinstance() of Super Class:
isinstance(Test(), Serializable)
{bool}True
Testing isinstance() of Super Class::
isinstance(data, Serializable)
{bool}False
Interestingly, it doesn't appear to have any such problem prior to pickling as it handles the creation of dictionary and integrity hash just fine.
This only crops up with depickled objects in both Pickle and Dill.
For Context, here's the code in it's native environment - the DataCache object that is pickled:
class DataCache(object):
_hash=""
_data = None
#staticmethod
def genHash(data):
dataDict = DataCache.dictify(data)
datahash = json.dumps(dataDict, sort_keys=True)
return hashlib.sha256(datahash).digest()
#staticmethod
def dictify(data):
if isinstance(data,list):
datahash = []
for item in data:
datahash.append(DataCache.dictify(item))
elif isinstance(data,(dict, collections.OrderedDict)):
datahash = collections.OrderedDict()
for key,value in datahash.iteritems():
datahash[key]= DataCache.dictify(value)
elif isinstance(data, Serializable):
datahash = data.toDict()
else:
datahash = data
return datahash
def __init__(self, restoreDict = {}):
if restoreDict:
self.__dict__.update(restoreDict)
def __getinitargs__(self):
return (self.__dict__)
def set(self, data):
self._hash = DataCache.genHash(data)
self._data = data
def verify(self):
dataHash = DataCache.genHash(self._data)
return (self._hash == dataHash)
def get(self):
return self._data
Finally, I know there's arguments for using JSON for readability in storage, I needed Pickle's ability to convert straight to and from Objects without specifying the object type myself. (thanks to the nesting, it's not really feasible)
Am I going mad here or does pickling do something to the class definitions?
EDIT:
Minimal Implementation:
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
from aenum import Enum
import json # _tricks
import base64
import argparse
import os
import sys
import datetime
import dill
import hashlib
import collections
class Serializable(object):
def __init__(self, initDict={}):
if initDict:
self.__dict__.update(initDict)
def __str__(self):
return str(self.sortSelf())
def sortSelf(self):
return collections.OrderedDict(sorted(self.__dict__.items()))
def toDict(self):
return self.__dict__
def fromDict(self, dict):
# Not using __dict__.update(...) to avoid polluting objects with the excess data
varMap = self.__dict__
if dict and varMap:
for key in varMap:
if (key in dict):
varMap[key] = dict[key]
self.__dict__.update(varMap)
return self
return None
class Issue(Serializable):
def __init__(self, initDict={}):
self.id = 0
self.key = ""
self.fields = {}
if initDict:
self.__dict__.update(initDict)
Serializable.__init__(self)
def fieldToDict(self, obj, key, type):
if key in obj:
result = obj[key]
else:
return None
if result is None:
return None
if isinstance(result, type):
return result.toDict()
return result
def fromDict(self, jsonDict):
super(Issue, self).fromDict(jsonDict)
self.fields["issuetype"] = IssueType().fromDict(self.fields["issuetype"])
self.fields["assignee"] = User().fromDict(self.fields["assignee"])
self.fields["creator"] = User().fromDict(self.fields["creator"])
self.fields["reporter"] = User().fromDict(self.fields["reporter"])
return self
def toDict(self):
result = super(Issue, self).toDict()
blankKeys = []
for fieldName, fieldValue in self.fields.iteritems():
if fieldValue is None:
blankKeys.append(fieldName)
if blankKeys:
for key in blankKeys:
self.fields.pop(key, None)
result["fields"]["issuetype"] = self.fieldToDict(result["fields"], "issuetype", IssueType)
result["fields"]["creator"] = self.fieldToDict(result["fields"], "creator", User)
result["fields"]["reporter"] = self.fieldToDict(result["fields"], "reporter", User)
result["fields"]["assignee"] = self.fieldToDict(result["fields"], "assignee", User)
return result
class IssueType(Serializable):
def __init__(self):
self.id = 0
self.name = ""
def toDict(self):
return {"id": str(self.id)}
class Project(Serializable):
def __init__(self):
Serializable.__init__(self)
self.id = 0
self.name = ""
self.key = ""
class Cycle(Serializable):
def __init__(self):
self.id = 0
self.name = ""
self.totalExecutions = 0
self.endDate = ""
self.description = ""
self.totalExecuted = 0
self.started = ""
self.versionName = ""
self.projectKey = ""
self.versionId = 0
self.environment = ""
self.totalCycleExecutions = 0
self.build = ""
self.ended = ""
self.name = ""
self.modifiedBy = ""
self.projectId = 0
self.startDate = ""
self.executionSummaries = {'executionSummary': []}
class Step(Serializable):
def __init__(self):
self.id = ""
self.orderId = 0
self.step = ""
self.data = ""
self.result = ""
self.attachmentsMap = {}
def toDict(self):
dict = {}
dict["step"] = self.step
dict["data"] = self.data
dict["result"] = self.result
dict["attachments"] = []
return dict
class Status(Serializable):
def __init__(self):
self.id = 0
self.name = ""
self.description = ""
self.isFinal = True
self.color = ""
self.isNative = True
self.statusCount = 0
self.statusPercent = 0.0
class User(Serializable):
def __init__(self):
self.displayName = ""
self.name = ""
self.emailAddress = ""
self.key = ""
self.active = False
self.timeZone = ""
class Execution(Serializable):
def __init__(self):
self.id = 0
self.orderId = 0
self.cycleId = -1
self.cycleName = ""
self.issueId = 0
self.issueKey = 0
self.projectKey = ""
self.comment = ""
self.versionId = 0,
self.versionName = "",
self.executedOn = ""
self.creationDate = ""
self.executedByUserName = ""
self.assigneeUserName = ""
self.status = {}
self.executionStatus = ""
def fromDict(self, jsonDict):
super(Execution, self).fromDict(jsonDict)
self.status = Status().fromDict(self.status)
# This is already listed as Execution Status, need to associate and convert!
return self
def toDict(self):
result = super(Execution, self).toDict()
result['status'] = result['status'].toDict()
return result
class ExecutionContainer(Serializable):
def __init__(self):
self.executions = []
def fromDict(self, jsonDict):
super(ExecutionContainer, self).fromDict(jsonDict)
self.executions = []
for executionDict in jsonDict["executions"]:
self.executions.append(Execution().fromDict(executionDict))
return self
class Test(Issue):
def __init__(self, initDict={}):
if initDict:
self.__dict__.update(initDict)
Issue.__init__(self)
def toDict(self):
result = super(Test, self).toDict()
stepField = "CustomField_0001"
if result["fields"][stepField]:
steps = []
for step in result["fields"][stepField]["steps"]:
steps.append(step.toDict())
result["fields"][stepField] = steps
return result
def fromDict(self, jsonDict):
super(Test, self).fromDict(jsonDict)
stepField = "CustomField_0001"
steps = []
if stepField in self.fields:
for step in self.fields[stepField]["steps"]:
steps.append(Step().fromDict(step))
self.fields[stepField] = {"steps": steps}
return self
class Set(Issue):
def __init__(self, initDict={}):
self.__dict__.update(initDict)
Issue.__init__(self)
class DataCache(object):
_hash = ""
_data = None
#staticmethod
def genHash(data):
dataDict = DataCache.dictify(data)
datahash = json.dumps(dataDict, sort_keys=True)
return hashlib.sha256(datahash).digest()
#staticmethod
def dictify(data):
if isinstance(data, list):
datahash = []
for item in data:
datahash.append(DataCache.dictify(item))
elif isinstance(data, (dict, collections.OrderedDict)):
datahash = collections.OrderedDict()
for key, value in datahash.iteritems():
datahash[key] = DataCache.dictify(value)
elif isinstance(data, Serializable):
datahash = data.toDict()
else:
datahash = data
return datahash
def __init__(self, restoreDict={}):
if restoreDict:
self.__dict__.update(restoreDict)
def __getinitargs__(self):
return (self.__dict__)
def set(self, data):
self._hash = DataCache.genHash(data)
self._data = data
def verify(self):
dataHash = DataCache.genHash(self._data)
return (self._hash == dataHash)
def get(self):
return self._data
def saveCache(name, projectKey, object):
filePath = "migration_caches/{projectKey}".format(projectKey=projectKey)
if not os.path.exists(path=filePath):
os.makedirs(filePath)
cache = DataCache()
cache.set(object)
targetFile = open("{path}/{name}".format(name=name, path=filePath), 'wb')
dill.dump(obj=cache, file=targetFile)
targetFile.close()
def loadCache(name, projectKey):
filePath = "migration_caches/{projectKey}/{name}".format(name=name, projectKey=projectKey)
result = False
try:
targetFile = open(filePath, 'rb')
try:
cache = dill.load(targetFile)
if isinstance(cache, DataCache):
if cache.verify():
result = cache.get()
except EOFError:
# except BaseException:
print ("Failed to load cache from file: {filePath}\n".format(filePath=filePath))
except IOError:
("Failed to load cache file at: {filePath}\n".format(filePath=filePath))
targetFile.close()
return result
testIssue = Test().fromDict({"id": 1000,
"key": "TEST",
"fields": {
"issuetype": {
"id": 1,
"name": "TestIssue"
},
"assignee": "Minothor",
"reporter": "Minothor",
"creator": "Minothor",
}
})
saveCache("Test", "TestProj", testIssue)
result = loadCache("Test", "TestProj")
EDIT 2
The script in it's current form, now seems to work correctly with vanilla Pickle, (initially switched to Dill due to a similar issue, which was solved by the switch).
However, if you are here with this issue and require Dill's features, then as Mike noted in the comments - it's possible to change the settings in dill.settings to have Dill behave pickle referenced items only with joblib mode, effectively mirroring pickle's standard pickling behaviour.
class account(object):
__duser_id = ''
__duser_name =''
__duser_no = ''
def __init__(self, default, entry_name, password, user_id='', user_name='', user_no=''):
if type(default) != bool:
raise Exception("Error 0x1: type(default) is boolean ")
if default == False:
self.__user_id = user_id
self.__user_name = user_name
self.__user_no = user_no
else:
self.__user_id = __duser_id
self.__user_name = __duser_name
self.__user_no = __duser_no
self.__entry_name = entry_name
self.__password = password
def dset(self, duser_id=__duser_id, duser_name=__duser_name, duser_no=__duser_no):
__duser_id = duser_id
__duser_name = duser_name
__duser_no = duser_no
return (__duser_id, __duser_name, __duser_no)
def dget(self):
return (__duser_id, __duser_name, __duser_no)
def set(self, user_name=self.__user_name, user_id=self.__user_id, user_no=self.__user_no, password=self.__password):
self.__user_id = user_id
self.__user_name = user_name
self.__user_no = user_no
self.__password = password
return (self.__user_id, self.__user_name, self.__user_no, self.password)
def get(self):
return (self.__user_id, self.__user_name, self.__user_no, self.password)
if __name__ == '__main__':
gmail = account(default=True, entry_name='gmail', password='pass***')
print(gmail.dget())
print(gmail.get())
out put is:
Traceback (most recent call last):
File "interface.py", line 1, in
class account(object):
File "interface.py", line 30, in account
def set(self, user_name=self.__user_name, user_id=self.__user_id, user_no=self.__user_no, password=self.__password):
NameError: name 'self' is not defined
Ok o got it.
but there is another one i just changed code.
This is a decorator with arbitrary number of arguments and keyword
arguments
def user_no_is_number(func):
def wrapper(*args, **kargs):
if 'user_no' in kargs:
if type(kargs['user_no']) != int:
raise Exception('Error 1x0: user_no must contains only numbers.')
else:
return func(*args, **kargs)
return wrapper
#staticmethod
#user_no_is_number
def dset(user_id=None, user_name=None, user_no=None):
if user_id:
account.__duser_id = user_id
if user_name:
account.__duser_name = user_name
if user_no:
account.__duser_no = user_no
return (account.__duser_id, account.__duser_name, account.__duser_no)
but the dset() function return always None
*I think there is problem with arbitrary keywords parameters. by using **kargs in decorator parameter it becomes dictionary and by again passing **kargs it just return values of that dictionary.*
I've written a basic storage class that holds data in a list of lists writes it to a file and then allows it to be read back from the file for persistence. It was working on the system it was originally written (windows 7) I moved it to a second system (windows 8) and now it seems to be malfunctioning.
The error I'm receiving is:
Traceback (most recent call last):
File "D:\Code Vault\pydb.py", line 69, in <module>
print Users.ReadData()
File "D:\Code Vault\pydb.py", line 23, in ReadData
self.data = cPickle.load(self.Inputstream)
EOFError
The full code for the class is as follows
class table():
#TODO Insert Key Feild to facilitate ID Search
def __init__(self,*args):
self.tablename = args[0]
self.data = []
self.colnames = {}
self.filename = str(self.tablename)+ ".p"
for e in args[1:]:
self.colnames.update({e:len(self.colnames)})
self.Filestorage = open(self.filename, "wb" )
self.Filestorage.close()
def ReadData(self):
self.Inputstream = open(self.filename, "rb")
self.Inputstream.seek(0)
self.data = cPickle.load(self.Inputstream)
self.Inputstream.close()
return self.data
def AddData (self, *args):
self.tempdata = []
for e in args:
self.tempdata.append(e)
if len(args) == len(self.colnames):
self.data.append(self.tempdata)
else:
return "Incorrect argument quantities"
return self.tempdata
def Commit(self):
self.Outputstream = open(self.filename, "ab" )
cPickle.dump(self.data, self.Outputstream)
self.Outputstream.close()
return self.ReadData()
def Search(self,query):
for e in self.ReadData():
if query in e:
return self.ReadData().index(e)
else:
return "Value " + str(query) + " does not exist"
def Update(self,query, edit):
for e in self.data:
if query in e:
e[e.index(query)] = edit
return self.data
def DumpTable(self):
self.data = []
self.Outputstream = open(self.filename+".p", "wb" )
cPickle.dump(self.data, self.Outputstream)
self.Outputstream.close()
self.ReadData()
Users = table("Users","Username","Password")
Users.AddData("bob","123456")
# Users.Commit()
# print Users.data
# print Users.filename
# print Users.data
print Users.ReadData()
It's probably a case of code blindness as it was working previously. Any Ideas would be appreciated
The amended code that answers the question.
import pickle
class table():
#TODO Insert Key Feild to facilitate ID Search
def __init__(self,*args):
self.tablename = args[0]
self.data = []
self.colnames = {}
self.filename = str(self.tablename)+ ".p"
for e in args[1:]:
self.colnames.update({e:len(self.colnames)})
try: #creating this conditional prevented the contents being erased upon instantiating the class
f = open(self.filename,"rb")
except IOError:
self.createfile = open(self.filename, "wb" )
self.createfile.close()
def ReadData(self):
self.Inputstream = open(self.filename, "rb")
self.data = pickle.load(self.Inputstream)
self.Inputstream.close()
return self.data
def AddData (self, *args):
self.tempdata = []
for e in args:
self.tempdata.append(e)
if len(args) == len(self.colnames):
self.data.append(self.tempdata)
else:
return "Incorrect argument quantities"
return self.tempdata
def Commit(self):
self.Outputstream = open(self.filename, "wb" )
pickle.dump(self.data, self.Outputstream)
self.Outputstream.close()
def createfile(self):
self.createfile = open(self.filename, "wb" )
self.createfile.close()
def Search(self,query):
for e in self.ReadData():
if query in e:
return self.ReadData().index(e)
else:
return "Value " + str(query) + " does not exist"
def Update(self,query, edit):
for e in self.data:
if query in e:
e[e.index(query)] = edit
return self.data
def DumpTable(self):
self.data = []
self.Outputstream = open(self.filename+".p", "wb" )
pickle.dump(self.data, self.Outputstream)
self.Outputstream.close()
self.ReadData()
The solution seems to be here pickle.load() raising EOFError in Windows
i.e. file needs to be open for read on binary mode with "rb" flag.
I have got a problem with my current script where I'm working on the keyboard event for xbmc. When I'm pressing on the enter button of the keyboard, the code will keep firing by re-fetching the same values from the XML tags which it will keep doing it over and over for number of times and it will keep storing the same values in the database when there are few values from the XML tags called 'display-name'.
Here's the code:
import xbmc
import xbmcgui
import xbmcaddon
import os
import urllib2
import StringIO
import sqlite3
from sqlite3 import dbapi2 as database
from xml.etree import ElementTree
class Channel:
def __init__(self):
self.__display_name = None
self.__icon = None
self.__programs = []
def get_display_name(self):
return self.__display_name
def get_icon(self):
return self.__icon
def get_programs(self):
return self.__programs
def set_display_name(self, value):
self.__display_name = value
def set_icon(self, value):
self.__icon = value
def set_programs(self, value):
self.__programs = value
def del_display_name(self):
del self.__display_name
def del_icon(self):
del self.__icon
def del_programs(self):
del self.__programs
display_name = property(get_display_name, set_display_name, del_display_name, "display_name's docstring")
icon = property(get_icon, set_icon, del_icon, "icon's docstring")
programs = property(get_programs, set_programs, del_programs, "programs's docstring")
class Programme:
def __init__(self):
self.__start = None
self.__stop = None
self.__title = None
self.__sub_title = None
self.__desc = None
self.__category = []
self.__credits = []
self.__icon = None
self.__episode_num = None
def get_episode_num(self):
return self.__episode_num
def set_episode_num(self, value):
self.__episode_num = value
def del_episode_num(self):
del self.__episode_num
def get_start(self):
return self.__start
def get_stop(self):
return self.__stop
def get_title(self):
return self.__title
def get_sub_title(self):
return self.__sub_title
def get_desc(self):
return self.__desc
def get_category(self):
return self.__category
def get_credits(self):
return self.__credits
def get_icon(self):
return self.__icon
def set_start(self, value):
self.__start = value
def set_stop(self, value):
self.__stop = value
def set_title(self, value):
self.__title = value
def set_sub_title(self, value):
self.__sub_title = value
def set_desc(self, value):
self.__desc = value
def set_category(self, value):
self.__category = value
def set_credits(self, value):
self.__credits = value
def set_icon(self, value):
self.__icon = value
def del_start(self):
del self.__start
def del_stop(self):
del self.__stop
def del_title(self):
del self.__title
def del_sub_title(self):
del self.__sub_title
def del_desc(self):
del self.__desc
def del_category(self):
del self.__category
def del_credits(self):
del self.__credits
def del_icon(self):
del self.__icon
start = property(get_start, set_start, del_start, "start's docstring")
stop = property(get_stop, set_stop, del_stop, "stop's docstring")
title = property(get_title, set_title, del_title, "title's docstring")
sub_title = property(get_sub_title, set_sub_title, del_sub_title, "sub_title's docstring")
desc = property(get_desc, set_desc, del_desc, "desc's docstring")
category = property(get_category, set_category, del_category, "category's docstring")
creditss = property(get_credits, set_credits, del_credits, "credits's docstring")
icon = property(get_icon, set_icon, del_icon, "icon's docstring")
episode_num = property(get_episode_num, set_episode_num, del_episode_num, "episode_num's docstring")
class Credits:
def __init__(self):
self.__type = None
self.__role = None
self.__name = None
def get_type(self):
return self.__type
def get_role(self):
return self.__role
def get_name(self):
return self.__name
def set_type(self, value):
self.__type = value
def set_role(self, value):
self.__role = value
def set_name(self, value):
self.__name = value
def del_type(self):
del self.__type
def del_role(self):
del self.__role
def del_name(self):
del self.__name
type = property(get_type, set_type, del_type, "type's docstring")
role = property(get_role, set_role, del_role, "role's docstring")
name = property(get_name, set_name, del_name, "name's docstring")
class MyClass(xbmcgui.WindowXML):
def __new__(cls):
return super(MyClass, cls).__new__(cls, 'script-tvguide-mainmenu.xml', ADDON.getAddonInfo('path'))
def onInit(self):
pass
def load_channel(self, elem):
channel = Channel()
for elem in elem.getchildren():
if elem.tag == 'display-name':
channel.set_display_name(elem.text)
elif elem.tag == 'icon':
channel.set_icon(elem.attrib['src'])
return channel
def load_programme(self, elem):
programme = Programme()
programme.set_start(elem.attrib['start'])
programme.set_stop(elem.attrib['stop'])
for elem in elem.getchildren():
if elem.tag == 'title':
programme.set_title(elem.text)
elif elem.tag == 'sub-title':
programme.set_title(elem.text)
elif elem.tag == 'desc':
programme.set_desc(elem.text)
elif elem.tag == 'category':
categories = programme.get_category()
categories.append(elem.text)
elif elem.tag == 'episode-num':
programme.set_episode_num(elem.text)
elif elem.tag == 'credits':
creditss = programme.get_credits()
creditss.append(self.load_credits(elem))
elif elem.tag == 'icon':
programme.set_icon(elem.attrib['src'])
return programme
def load_credits(self, elem):
creditss = Credits()
for elem in elem.getchildren():
if elem.tag == 'actor':
creditss.set_name(elem.text)
creditss.set_type('actor')
elif elem.tag == 'presenter':
creditss.set_name(elem.text)
creditss.set_type('presenter')
elif elem.tag == 'director':
creditss.set_name(elem.text)
creditss.set_type('director')
return credits
def onAction(self, action):
if action == ACTION_ENTER:
#OPEN THE XML SOURCE
url = ADDON.getSetting('ontv.url')
req = urllib2.Request(url)
response = urllib2.urlopen(req)
data = response.read()
response.close()
profilePath = xbmc.translatePath(os.path.join('special://userdata/addon_data/script.tvguide', ''))
if os.path.exists(profilePath):
profilePath = profilePath + 'source.db'
con = database.connect(profilePath)
cur = con.cursor()
cur.execute('CREATE TABLE programs(id TEXT, channel TEXT, title TEXT, start_date TIMESTAMP, end_date TIMESTAMP, description TEXT)')
con.commit()
con.close
tv_elem = ElementTree.parse(StringIO.StringIO(data)).getroot()
channels = {}
for elem in tv_elem.getchildren():
if elem.tag == 'channel':
channels[elem.attrib['id']] = self.load_channel(elem)
elif elem.tag == 'programme':
# get channel
channel = channels[elem.attrib['channel']]
# load program in channel
channel.get_programs().append(self.load_programme(elem))
# Print the loaded data
for channel_key in channels:
channel = channels[channel_key]
print channel.get_display_name() + ' :: ' + channel.get_icon() + ' :: Total programs = ' + str(len(channel.get_programs()))
display_name = channel.get_display_name()
profilePath = xbmc.translatePath(os.path.join('special://userdata/addon_data/script.tvguide', ''))
profilePath = profilePath + 'source.db'
con = sqlite3.connect(profilePath)
cur = con.cursor()
if not display_name in cur:
cur.execute("INSERT INTO programs(id, channel)" + " VALUES(?, ?)", [display_name, 0])
con.commit()
cur.close
print 'Channels store into database are now successfully!'
Here's the xml logs: http://xbmclogs.com/show.php?id=143587
Here's the database: http://testbox.elementfx.com/source.db
And here's the xml file: http://ontv.dk/xmltv/c81e728d9d4c2f636f067f89cc14862c
I only want to write the values in a database without keep re-fetching the same values over and over when I get the values from the XML tags.
Do you know how I can stop re-fetching the same values over and over when I store them in a database without keep running like a loop?