Calling an async function from a class - python

My prof gave us this code to play with, but I have troubles calling an async function from a class
import asyncio
import aiohttp
import time
from pathlib import Path
from typing import List, Any, Dict, Union, Awaitable, Optional
import json
import toml
from mypy_extensions import TypedDict
# apikey = …
_coin_list: Dict[str, Any] = {}
async def fetch(url: str) -> Dict[str, Any]:
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
text = await resp.text()
return json.loads(text)
async def fetch_price_data(from_currencies: List[str], to_currencies: List[str], full: bool=False) -> Dict[str, Any]:
if full:
endpoint = 'pricemultifull'
from_parameter = 'fsyms'
if 'USD' not in to_currencies:
to_currencies.append('USD')
else:
endpoint = 'pricemulti'
from_parameter = 'fsyms'
price_url = f'https://min-api.cryptocompare.com/data/{endpoint}?' \
f'{from_parameter}={",".join(from_currencies)}&' \
f'tsyms={",".join(to_currencies)}'
resp = await fetch(price_url)
if full:
resp = resp['RAW']
return resp
async def fetch_coin_list() -> Dict[str, Any]:
global _coin_list
async with asyncio.Lock():
if not _coin_list:
coin_list_url = 'https://min-api.cryptocompare.com/data/all/coinlist'
_coin_list = await fetch(coin_list_url)
return _coin_list['Data']
# warnings
class CryptoPriceException(Exception):
pass
class CurrencyNotFound(CryptoPriceException):
pass
class UnfetchedInformation(CryptoPriceException):
pass
CurrencyKwargs = TypedDict('CurrencyKwargs', {
'cache': int,
'target_currencies': List[str],
'full': bool,
'historical': Optional[str],
'human': bool
})
class Prices:
"""Prices object"""
def __init__(self, currency: 'Currency') -> None:
self._prices: Dict[str, float] = {}
self._currency = currency
async def get(self, target_currency: str, default: float=0.0) -> float:
"""
Gets the price for a specified currency, if currency is not in target currencies,
it's added there for the specific currency
:param target_currency: Currency to get converted price for
:param default: What to return if the desired currency is not found in fetched prices
"""
target_currency = target_currency.upper()
if target_currency not in self._currency.target_currencies:
self._currency.target_currencies.append(target_currency)
await self._currency.load()
# TODO float should be in the dict from when it's put there -> stop using .update() with data from api
return float(self._prices.get(target_currency, default))
def __getitem__(self, item: str) -> float:
try:
return float(self._prices[item.upper()])
except KeyError:
raise CurrencyNotFound("Desired target currency not found, make sure it's in desired_currencies "
"and that cryptocompare.com supports it.")
def __setitem__(self, key: str, value: float) -> None:
self._prices[key.upper()] = value
def __getattr__(self, item: str) -> float:
return self[item]
class Currency:
"""
Currency object
"""
def __init__(self, symbol: str, cache: int=60, target_currencies: List[str] = None,
full: bool = False, historical: Optional[str] = None, human: bool = False) -> None:
"""
:param symbol: Symbol of the currency (e.g. ZEC) - will be converted to uppercase automatically
:param cache: Seconds to keep prices in cache
:param target_currencies: Which currencies to convert prices to
:param full: Whether to fetch full data, like change, market cap, volume etc.
:param historical: Whether to fetch movement data, either None, or 'minute', 'hour', 'day'
:param human: Whether to fetch information that concern humans (logo, full name)
"""
self.symbol = symbol.upper()
self.cache = cache
self.target_currencies = target_currencies or ['USD', 'BTC']
self.last_loaded: Union[bool, float] = False
self.prices = Prices(self)
self.full = full
self.historical = historical
self.human = human
self.human_data: Dict[str, Any] = {}
self.full_data: Dict[str, Any] = {}
# #property
# def image_url(self) -> str:
# """Available only if human is True - url to a image of currency's logo"""
# if not self.human:
# raise UnfetchedInformation('human must be True to fetch image_url')
# return f'https://www.cryptocompare.com{self.human_data.get("ImageUrl")}'
# #property
# def name(self) -> str:
# """Available only if human is True - name of the currency (e.g. Bitcoin from BTC)"""
# if not self.human:
# raise UnfetchedInformation('human must be True to fetch name')
# return self.human_data.get('CoinName', '')
#property
def supply(self) -> float:
if not self.full:
raise UnfetchedInformation('full must be True to fetch supply')
return float(self.full_data['USD']['SUPPLY'])
#property
def market_cap(self) -> float:
# TODO should be in self.prices
if not self.full:
raise UnfetchedInformation('full must be True to fetch market_cap')
return float(self.full_data['USD']['MKTCAP'])
def volume(self):
raise NotImplementedError
async def load(self) -> None:
"""Loads the data if they are not cached"""
tasks: List[Awaitable[Any]] = []
if not self.last_loaded:
if self.human:
tasks.append(fetch_coin_list())
if not self.last_loaded or time.time() < self.last_loaded + self.cache:
tasks.append(self.__load())
await asyncio.gather(*tasks)
if self.human and not self.human_data:
extra_data = await fetch_coin_list()
self.human_data = extra_data.get(self.symbol, {})
self.last_loaded = time.time()
async def __load(self) -> None:
try:
json_data = await fetch_price_data([self.symbol], self.target_currencies, full=self.full)
except Exception as _:
fallback = self.__load_fallback()
for tsym, price in self.prices._prices.items():
self.prices._prices[tsym] = fallback[tsym]
else:
if self.full:
self.full_data = json_data.get(self.symbol, {})
for tsym, price in self.prices._prices.items():
if self.full_data.get(tsym):
self.prices._prices[tsym] = self.full_data.get(tsym, {}).get('PRICE')
else:
self.prices._prices.update(json_data.get(self.symbol, {}))
def __load_fallback(self):
fallback_toml = (Path(__file__).resolve().parent / 'fallbacks.tml')
with fallback_toml.open(mode='r') as f:
return toml.load(f)
class Currencies:
"""
Wrapper around currencies.
Paramaters will propagate to all currencies gotten through this wrapper.
If you want to share state across modules, you should import currencies with lowercase
and set their parameters manually.
"""
def __init__(self, cache: int=60, target_currencies: List[str]=None,
full: bool=False, historical: Optional[str]=None, human: bool=False) -> None:
"""
:param cache: Seconds to keep prices in cache
:param target_currencies: Which currencies to convert prices to
:param full: Whether to fetch full data, like change, market cap, volume etc.
:param historical: Whether to fetch movement data, either None, or 'minute', 'hour', 'day'
TODO https://min-api.cryptocompare.com/data/histominute?fsym=BTC&tsym=USD&limit=60&aggregate=3&e=CCCAGG
TODO aggregate -> po kolika minutach, cas je v timestampech
Bonusove argumenty v metode a vracet jen metodou?
:param human: Whether to fetch information that concern humans (logo, full name)
Will not work with classic currencies like USD or EUR
"""
self.currencies: Dict[str, Currency] = dict()
self.cache = cache
self.target_currencies = target_currencies or ['USD', 'BTC', 'ETH']
self.full = full
self.historical = historical
self.human = human
async def load_all(self) -> None:
"""Loads data for all currencies"""
symbols = []
for _, currency in self.currencies.items():
symbols.append(currency.symbol)
if self.human:
# This is done just once, as the contents don't change
await fetch_coin_list()
# TODO fetch only if at least one isn't cached
price_data = await fetch_price_data(symbols, self.target_currencies, full=self.full)
for symbol, currency in self.currencies.items():
if self.full:
currency.full_data = price_data.get(symbol, {})
currency.prices._prices.update(price_data.get(symbol, {}))
currency.last_loaded = time.time()
if self.human:
# Update the currency with already fetched extra information
await currency.load()
def add(self, *symbols: str) -> None:
"""Add to the list of symbols for which to load prices"""
for symbol in symbols:
if symbol not in self.currencies:
self.currencies[symbol] = Currency(symbol, **self.__currency_kwargs)
#property
def __currency_kwargs(self) -> CurrencyKwargs:
"""All kwargs that are propagated to individual currencies"""
return {
'cache': self.cache,
'target_currencies': self.target_currencies,
'full': self.full,
'historical': self.historical,
'human': self.human
}
def __getitem__(self, item: str) -> Currency:
"""Gets a currency, if not present, will create one"""
item = item.upper()
if item not in self.currencies:
self.currencies[item] = Currency(item, **self.__currency_kwargs)
return self.currencies[item]
def __getattr__(self, item: str) -> Currency:
"""Same as getitem, but accessible with dots"""
return self[item]
currencies = Currency()
For example, I'm trying to call fetch_coin_list function but it gives an error. And any method I'm trying to call gives an error.
I'm pretty sure I call it the wrong way but I have no idea how to fix it. Sorry I'm really stupid and it's my first time working with async functions, please help. I'll be incredibly thankful

A simple usage example is provided on the documentation page.
import asyncio
async def main():
print('Hello ...')
await asyncio.sleep(1)
print('... World!')
# Python 3.7+
asyncio.run(main())
If you want to run multiple asynchronous tasks, then you can refer to the method load of the class Currency in the code you provided, which uses the asyncio.gather method.

Related

Optapy domain definitions help. Getting error: java.lang.VerifyError: Bad type on operand stack

I am trying to write a pilot rostering project, where pilots and copilots need to be assigned to flights ("Duty"). I'm using python because there are existing python investments at the client.
I'm basing this POC off of the optapy Employee Scheduling quickstart.
The code is crashing in my domain.py, while trying to init my Availability class.
Specifically, in the #optapy.problem_fact decorator wrapping it. optapy\annotations.py, line 585
File "C:\Users\andre\AppData\Local\Programs\Python\Python39\Lib\site-packages\PythonClassTranslator.java", line 292, in org.optaplanner.jpyinterpreter.PythonClassTranslator.translatePythonClass
Exception: Java Exception
The above exception was the direct cause of the following exception:
File "C:\Users\andre\AppData\Local\Programs\Python\Python39\Lib\site-packages\jpyinterpreter\python_to_java_bytecode_translator.py", line 1220, in translate_python_class_to_java_class
out = PythonClassTranslator.translatePythonClass(python_compiled_class)
File "C:\Users\andre\AppData\Local\Programs\Python\Python39\Lib\site-packages\jpyinterpreter\python_to_java_bytecode_translator.py", line 422, in convert_to_java_python_like_object
out = translate_python_class_to_java_class(raw_type)
File "C:\Users\andre\AppData\Local\Programs\Python\Python39\Lib\site-packages\jpyinterpreter\python_to_java_bytecode_translator.py", line 399, in convert_to_java_python_like_object
convert_to_java_python_like_object(map_value, instance_map))
File "C:\Users\andre\AppData\Local\Programs\Python\Python39\Lib\site-packages\jpyinterpreter\python_to_java_bytecode_translator.py", line 1193, in translate_python_class_to_java_class
static_attributes_map.put(attribute[0], convert_to_java_python_like_object(attribute[1]))
File "C:\Users\andre\AppData\Local\Programs\Python\Python39\Lib\site-packages\optapy\optaplanner_java_interop.py", line 1050, in compile_and_get_class
parent_class = translate_python_class_to_java_class(python_class).getJavaClass()
File "C:\Users\andre\AppData\Local\Programs\Python\Python39\Lib\site-packages\optapy\optaplanner_java_interop.py", line 1063, in _generate_problem_fact_class
parent_class = compile_and_get_class(python_class)
File "C:\Users\andre\AppData\Local\Programs\Python\Python39\Lib\site-packages\optapy\annotations.py", line 585, in problem_fact
out.__optapy_java_class = _generate_problem_fact_class(fact_class)
File "C:\Users\andre\Desktop\Pilot Rosterer 3000\domain.py", line 131, in <module>
class Availability:
The error is as following:
Exception has occurred: java.lang.VerifyError (note: full exception trace is shown but execution is paused at: _run_module_as_main)
java.lang.VerifyError: Bad type on operand stack
Exception Details:
Location:
org/jpyinterpreter/user/enum/Enum/__new__$$2.invoke(Lorg/jpyinterpreter/user/domain/AvailabilityType;Lorg/optaplanner/jpyinterpreter/PythonLikeObject;)Lorg/optaplanner/jpyinterpreter/PythonLikeObject; #897: invokestatic
Reason:
Type 'org/optaplanner/jpyinterpreter/PythonLikeObject' (current frame, stack[1]) is not assignable to 'org/optaplanner/jpyinterpreter/types/PythonLikeType'
Current Frame:
bci: #897
flags: { }
locals: { 'org/jpyinterpreter/user/enum/Enum/__new__$$2', 'org/jpyinterpreter/user/domain/AvailabilityType', 'org/optaplanner/jpyinterpreter/PythonLikeObject', 'org/jpyinterpreter/user/domain/AvailabilityType', 'org/optaplanner/jpyinterpreter/PythonLikeObject', top, 'org/optaplanner/jpyinterpreter/types/PythonNone', top, top, top, null, 'org/optaplanner/jpyinterpreter/types/collections/PythonLikeTuple', top, top, top, top, top, 'java/lang/Object', 'org/optaplanner/jpyinterpreter/PythonLikeObject' }
stack: { 'org/optaplanner/jpyinterpreter/PythonLikeObject', 'org/optaplanner/jpyinterpreter/PythonLikeObject', 'org/optaplanner/jpyinterpreter/PythonLikeObject' }
Bytecode:
0000000: b200 5a3a 0b2b 4e2c 3a04 2ac0 0002 b400
... ... ...
domain.py
import optapy
import optapy.types
import optapy.score
import datetime
import enum
class Route:
name: str
duration: int
base: str
def __init__(self, name: str = None, duration: int = None, base: str = None):
self.name = name
self.duration = duration
self.base = base
class AvailabilityType(enum.Enum):
DESIRED = 'DESIRED'
UNDESIRED = 'UNDESIRED'
UNAVAILABLE = 'UNAVAILABLE'
#staticmethod
def list():
return list(map(lambda at: at, AvailabilityType))
#optapy.problem_fact
class Pilot:
name: str
skill_set: list[str]
home_base: str
duties: list # list[Duty]
def __init__(self, name: str = None, skill_set: list[str] = None, home_base: str = None, duties: list = None):
self.name = name
self.skill_set = skill_set
self.home_base = home_base
if duties is None:
self.duties = []
else:
self.duties = duties
def get_total_hours(self): # flight hours
dt = datetime.timedelta()
for duty in self.duties:
if duty.work_type != "Flying": continue # skip non-flight duties
dt += duty.end - duty.start
return dt.seconds/3600 + dt.days*24
def get_todays_hours(self, date: datetime.date):
dt = datetime.timedelta()
for duty in self.duties:
if duty.work_type != "Flying": continue # skip non-flight duties
if duty.start.date() != date: continue # skip duties from other days
dt += duty.end - duty.start # not taking into account flights over midnight. TODO: fix
return dt.seconds/3600 + dt.days*24
def get_total_overtime_hours(self):
days = dict()
for duty in self.duties:
if duty.work_type != "Flying": continue # skip non-flight duties
day = duty.start.date()
days[day] = days.get(day, datetime.timedelta()) + duty.end - duty.start
total_overtime = datetime.timedelta()
MAX_FLIGHT_HOURS_ALLOWED = datetime.timedelta(hours=10)
for day in days:
if day > MAX_FLIGHT_HOURS_ALLOWED:
total_overtime += day - MAX_FLIGHT_HOURS_ALLOWED
return total_overtime.seconds/3600 + total_overtime.days*24
def __str__(self):
return f'Pilot(name={self.name})'
def to_dict(self):
return {
'name': self.name,
'skill_set': self.skill_set,
'home_base': self.home_base,
'duties': self.duties
}
def duty_pinning_filter(solution, duty):
return not solution.schedule_state.is_draft(duty)
#optapy.planning_entity(pinning_filter=duty_pinning_filter)
#optapy.planning_pin
class Duty:
id: int
start: datetime.datetime
end: datetime.datetime
work_type: str
detail: str
required_skill: str
pilot: Pilot
# coPilot: Pilot # REMOVED: No longer complicating duties with 2 pilots, rather just creating seperate pilot duties and copilot duties for each route. This allows us to model duties other than flights
bases: list[str] # locaiotn(s) from which the duty can be performed
def __init__(self, id: int = None, start: datetime.datetime = None, end: datetime.datetime = None, work_type: str = None, detail: str = None, required_skill: str = None, pilot: Pilot = None, bases: list[str] = None):
self.id = id
self.start = start
self.end = end
self.work_type = work_type
self.detail = detail
self.required_skill = required_skill
self.pilot = pilot
self.bases = bases
#optapy.planning_id
def get_id(self):
return self.id
def __str__(self):
return f'Duty:{self.id}\t | {self.work_type}, detail={self.detail}, pilot={self.pilot}, start={self.start}, end={self.end}, required_skill={self.required_skill}, bases={self.bases})'
def to_dict(self):
return {
'id': self.id,
'work_type': self.work_type,
'start': self.start.isoformat(),
'end': self.end.isoformat(),
'detail': self.detail,
'required_skill': self.required_skill,
'pilot': self.pilot.to_dict() if self.pilot is not None else None,
'bases': self.bases
}
# ============================================================================================
# This is to get around the circular reference problem.
# The optapy decorators cannot refence Duty/Pilot before the classes are instantiated,
# but the classes rely on eachother being instantiated first before they can instatiated
# themselves. Hence the circular reference issue.
#optapy.planning_list_variable(Duty, ['duties_list'])
def get_duties(self):
return self.duties
def set_duties(self, duties):
self.duties = duties
#optapy.planning_variable(Pilot, value_range_provider_refs=['pilot_range'])
def get_pilot(self):
return self.pilot
def set_pilot(self, pilot):
self.pilot = pilot
Pilot.get_duties = get_duties
Pilot.set_duties = set_duties
Duty.get_pilot = get_pilot
Duty.set_pilot = set_pilot
# ============================================================================================
#optapy.problem_fact
class Availability:
pilot: Pilot
date: datetime.date
availability_type: AvailabilityType
def __init__(self, pilot: Pilot = None, date: datetime.date = None, availability_type: AvailabilityType = None):
self.pilot = pilot
self.date = date
self.availability_type = availability_type
def __str__(self):
return f'Availability(pilot={self.pilot}, date={self.date}, availability_type={self.availability_type})'
def to_dict(self):
return {
'pilot': self.pilot.to_dict(),
'date': self.date.isoformat(),
'availability_type': self.availability_type.value
}
class ScheduleState:
publish_length: int
draft_length: int
first_draft_date: datetime.date
last_historic_date: datetime.date
def __init__(self, publish_length: int = None, draft_length: int = None, first_draft_date: datetime.date = None, last_historic_date: datetime.date = None):
self.publish_length = publish_length
self.draft_length = draft_length
self.first_draft_date = first_draft_date
self.last_historic_date = last_historic_date
def is_draft(self, duty):
return duty.start >= datetime.datetime.combine(self.first_draft_date, datetime.time.min)
def to_dict(self):
return {
'publish_length': self.publish_length,
'draft_length': self.draft_length,
'first_draft_date': self.first_draft_date.isoformat(),
'last_historic_date': self.last_historic_date.isoformat()
}
#optapy.planning_solution
class PilotSchedule:
schedule_state: ScheduleState
availability_list: list[Availability]
pilot_list: list[Pilot]
duty_list: list[Duty]
solver_status: optapy.types.SolverStatus
score: optapy.score.SimpleScore
def __init__(self, schedule_state, availability_list, pilot_list, duty_list, solver_status, score=None):
self.pilot_list = pilot_list
self.availability_list = availability_list
self.schedule_state = schedule_state
self.duty_list = duty_list
self.solver_status = solver_status
self.score = score
#optapy.problem_fact_collection_property(Pilot)
#optapy.value_range_provider('pilot_range')
def get_pilot_list(self):
return self.pilot_list
#optapy.problem_fact_collection_property(Availability)
def get_availability_list(self):
return self.availability_list
#optapy.planning_entity_collection_property(Duty)
def get_duty_list(self):
return self.duty_list
#optapy.planning_score(optapy.score.HardSoftScore)
def get_score(self):
return self.score
def set_score(self, score):
self.score = score
def to_dict(self):
return {
'pilot_list': list(map(lambda pilot: pilot.to_dict(), self.pilot_list)),
'availability_list': list(map(lambda availability: availability.to_dict(), self.availability_list)),
'schedule_state': self.schedule_state.to_dict(),
'duty_list': list(map(lambda duty: duty.to_dict(), self.duty_list)),
'solver_status': self.solver_status.toString(),
'score': self.score.toString(),
}
This is a bug with OptaPy (see https://github.com/optapy/optapy/issues/146). In particular, it seems to be caused when translating an enum.Enum class (which is caused because Availability has type annotation for AvailabilityType, an enum). Removing the type annotation might fix the issue (since then AvailabilityType will not be translated until solving, which is more forgiving (since the translation results in an error, it will be translated as a pointer to a CPython class)). Will update this answer when the issue is fixed.
Updating from Python 3.9.0 to 3.11.1 solved the issue for me.

Mock function that is inherited while using pytest.mark.parametrize

I am new to unit testing. I created some classes to get from an API, deserialize it to JSON and input its values to a DB. All classes are working and I'm writing the unit tests right now. I have the following classes:
import requests
class GetResponse():
def getDeserialize(self, url: str):
ApiResponse = requests.get(f'{url}')
toJson = ApiResponse.json()
return toJson
class MercadoBitcoin(GetResponse):
def __init__(self) -> None:
#super().__init__()
self.beginningOfUrl = 'https://www.mercadobitcoin.net/api'
# This method is to get generally from the API
def standardGet(self, coin: str, method: str):
self.URL = f'{self.beginningOfUrl}/{coin}/{method}/'
urlGet = super().getDeserialize(self.URL)
return urlGet
# This method is to get specifically from the API Day Summary
def daySummary(self, year: int, month: int, day: int, coin: str):
method = 'day-summary'
self.URL = f'{self.beginningOfUrl}/{coin}/{method}/{year}/{month}/{day}'
urlGet = super().getDeserialize(self.URL)
return urlGet
I wrote this test:
class TestMercadoBitcoin():
#pytest.mark.parametrize(
"coin, method, expected",
[
("BTC", "ticker", "https://www.mercadobitcoin.net/api/BTC/ticker/"),
("ETH", "ticker", "https://www.mercadobitcoin.net/api/ETH/ticker/")
]
)
def test_standardGet(self, coin, method, expected):
actual = MercadoBitcoin()
actual.standardGet(coin=coin, method=method)
assert actual.URL == expected
I only want to test if the URL is the same as expected, not to run the request to the API. I tried these approaches for mockings and both didn't work:
#patch(requests.get)
#pytest.mark.parametrize(
"coin, method, expected",
[
("BTC", "ticker", "https://www.mercadobitcoin.net/api/BTC/ticker/"),
("ETH", "ticker", "https://www.mercadobitcoin.net/api/ETH/ticker/")
]
)
def test_standardGet(self, coin, method, expected, mock_requests):
actual = MercadoBitcoin()
actual.standardGet(coin=coin, method=method)
assert actual.URL == expected
#patch(super.getDeserialize)
#pytest.mark.parametrize(
"coin, method, expected",
[
("BTC", "ticker", "https://www.mercadobitcoin.net/api/BTC/ticker/"),
("ETH", "ticker", "https://www.mercadobitcoin.net/api/ETH/ticker/")
]
)
def test_standardGet(self, coin, method, expected, mock_requests):
actual = MercadoBitcoin()
actual.standardGet(coin=coin, method=method)
assert actual.URL == expected

are defaultdict(set) operations thread safe in python?

I am pretty naive to python, and in the situation where I have to deal with multi-threading in my production code. cut-pasting some part of code, which replicates the similar functionality below:
class DataProcessor(object):
def __init__(self, config: DataProcessorConfig) -> None:
self.config = config
self.lock = defaultdict(set)
self.function_thread_lock = threading.Lock()
self.config_singleton = ConfigSingleton.create_instance()
def process_data(self, data_record: Dict[str, str]) -> None:
self._logger.debug(f"record: {data_record}")
try:
self.acquire_locks(data_record)
self.process_record(data_record)
finally:
self.release_locks(data_record)
def release_locks(self, data_record: Dict[str, str]) -> None:
with self.function_thread_lock:
for obj in self.config_singleton.get_ids(parameter):
id_value = obj.get_id_value(data_record)
if id_value:
self.lock[obj.id_key].remove(obj.get_id_value(data_record))
def acquire_locks(self, data_record: Dict[str, str], threshold: int = 3) -> None:
for obj in self._config_singleton.get_ids(parameter):
try_count = 1
id_value = obj.get_id_value(data_record)
if id_value:
while try_count <= threshold:
try:
self.function_thread_lock.acquire()
if id_value not in self.lock[id_obj.id_key]:
self.lock[id_obj.id_key].add(id_value)
break
finally:
self.function_thread_lock.release()
sleep_amount = 2**try_count
time.sleep(sleep_amount)
try_count += 1
else:
raise Exception("blah blah")
def async_data_processing(self, data_record: Dict) -> Future:
future = self.executor_pool.submit(self.process_data, data_record)
return future
Now in other class, one of the function is calling async_data_processing to perform batch processing/multithreading.
But it seems that the defaultdict(set) is creating problems to perform the multithreaded processing smoothly, and every now and then creating KeyError, or would fail to acquire to locks. KeyError doesn't really makes sense, which is the whole point on using defaultdict(set) instead of default dictionary in python.
I have been struggling with this issue for few days and haven't been able to find any proper solution or direction on it.
Reaching out here, in hope of some help!
would appreciate any help, thank you ☺️

Templated object generation in python

What is a good design pattern to implement templated object generation (not sure that's the name) in python?
By that, I mean having a function such as:
from typing import TypeVar
T = TypeVar('T')
def mk_templated_obj_factory(template: T) -> Callable[..., T]:
"""Returns a f(**kwargs) function that returns an object of type T created by a template of the same type."""
Python has templated strings. Something like `"this {is} a {template}".format' would be how one could achieve the above. If we want to get a "proper" function that has a signature (useful for a user so they know what arguments they need to provide!), we could do this:
from inspect import signature, Signature, Parameter
from operator import itemgetter
from typing import Callable
f = "hello {name} how are you {verb}?".format
def templated_string_func(template: str) -> Callable:
"""A function making templated strings. Like template.format, but with a signature"""
f = partial(str.format, template)
names = filter(None, map(itemgetter(1), string.Formatter().parse(template)))
params = [Parameter(name=name, kind=Parameter.KEYWORD_ONLY) for name in names]
f.__signature__ = Signature(params)
return f
f = templated_string_func("hello {name} how are you {verb}?")
assert f(name='Christian', verb='doing') == 'hello Christian how are you doing?'
assert str(signature(f)) == '(*, name, verb)'
But would if we want to make dict factories? Something having this behavior:
g = templated_dict_func(template={'hello': '$name', 'how are you': ['$verb', 2]})
assert g(name='Christian', verb='doing') == {'hello': '$name', 'how are you': ['doing', 2]}
What about other types of objects?
It seems like something that would have a solid design pattern...
I would recommend using decorators to register your template function generating functions in a dictionary that maps from types to the functions that handle them. The dictionary is needed in order to be able to template objects of any type in an extensible way, without writing all the templating logic in a single big function, but instead adding handling logic for new types as needed.
The core code is in the Templater class, just grouped here for organisation:
class Templater:
templater_registry: dict[type, Callable[[Any], TemplateFunc]] = {}
#classmethod
def register(cls, handles_type: type):
def decorator(f):
cls.templater_registry[handles_type] = f
return f
return decorator
...
Where TemplateFunc is defined as Generator[str, None, Callable[..., T]], a generator that yields strs and returns a function that returns some type T. This is chosen so that the template handlers can yield the names of their keyword arguments and then return their template function. The Templater.template_func method uses a something of type TemplateFunc to generate a function with the correct signature.
The register decorator presented above is written such that:
#Templater.register(dict)
def templated_dict_func(template: dict[K, V]):
pass
is equivalent to:
def templated_dict_func(template: dict[K, V]):
pass
Templater.templater_registry[dict] = templated_dict_func
The code for templating any type is fairly self-explainatory:
class Templater:
...
#classmethod
def template_func_generator(cls, template: T) -> TemplateFunc[T]:
# if it is a type that can be a template
if type(template) in cls.templater_registry:
# then return the template handler
template_factory = cls.templater_registry[type(template)]
return template_factory(template)
else:
# else: an empty generator that returns a function that returns the template unchanged,
# since we don't know how to handle it
def just_return():
return lambda: template
yield # this yield is needed to tell python that this is a generator
return just_return()
The code for templating strings is fairly unchanged, except that the argument names are yielded instead of put in the function signature:
#Templater.register(str)
def templated_string_func(template: str) -> TemplateFunc[str]:
"""A function making templated strings. Like template.format, but with a signature"""
f = partial(str.format, template)
yield from filter(None, map(itemgetter(1), string.Formatter().parse(template)))
return f
The list template function could look like this:
#Templater.register(list)
def templated_list_func(template: list[T]) -> TemplateFunc[list[T]]:
entries = []
for item in template:
item_template_func = yield from Templater.template_func_generator(item)
entries.append(item_template_func)
def template_func(**kwargs):
return [
item_template_func(**kwargs)
for item_template_func in entries
]
return template_func
Although, if you cannot guarantee that every template function can handle extra arguments, you need to track which arguments belong to which elements and only pass the necessary ones. I use the get_generator_return utility function (defined later on) to capture both the yielded values and the return value of the recursive calls.
#Templater.register(list)
def templated_list_func(template: list[T]) -> TemplateFunc[list[T]]:
entries = []
for item in template:
params, item_template_func = get_generator_return(Templater.template_func_generator(item))
params = tuple(params)
yield from params
entries.append((item_template_func, params))
def template_func(**kwargs):
return [
item_template_func(**{arg: kwargs[arg] for arg in args})
for item_template_func, args in entries
]
return template_func
The dict handler is implemented similarly. This system could be extended to support all kinds of different objects, including arbitrary dataclasses and more, but I leave that as an exercise for the reader!
Here is the entire working example:
import string
from functools import partial
from inspect import Signature, Parameter
from operator import itemgetter
from typing import Callable, Any, TypeVar, Generator, Tuple, Dict, List
from collections import namedtuple
T = TypeVar('T')
U = TypeVar('U')
def get_generator_return(gen: Generator[T, Any, U]) -> Tuple[Generator[T, Any, U], U]:
return_value = None
def inner():
nonlocal return_value
return_value = yield from gen
gen_items = list(inner())
def new_gen():
yield from gen_items
return return_value
return new_gen(), return_value
# TemplateFunc: TypeAlias = Generator[str, None, Callable[..., T]]
TemplateFunc = Generator[str, None, Callable[..., T]]
class Templater:
templater_registry: Dict[type, Callable[[Any], TemplateFunc]] = {}
#classmethod
def register(cls, handles_type: type):
def decorator(f):
cls.templater_registry[handles_type] = f
return f
return decorator
#classmethod
def template_func_generator(cls, template: T) -> TemplateFunc[T]:
if type(template) in cls.templater_registry:
template_factory = cls.templater_registry[type(template)]
return template_factory(template)
else:
# an empty generator that returns a function that returns the template unchanged,
# since we don't know how to handle it
def just_return():
return lambda: template
yield # this yield is needed to tell python that this is a generator
return just_return()
#classmethod
def template_func(cls, template: T) -> Callable[..., T]:
gen = cls.template_func_generator(template)
params, f = get_generator_return(gen)
f.__signature__ = Signature(Parameter(name=param, kind=Parameter.KEYWORD_ONLY) for param in params)
return f
#Templater.register(str)
def templated_string_func(template: str) -> TemplateFunc[str]:
"""A function making templated strings. Like template.format, but with a signature"""
f = partial(str.format, template)
yield from filter(None, map(itemgetter(1), string.Formatter().parse(template)))
return f
K = TypeVar('K')
V = TypeVar('V')
#Templater.register(dict)
def templated_dict_func(template: Dict[K, V]) -> TemplateFunc[Dict[K, V]]:
DictEntryInfo = namedtuple('DictEntryInfo', ['key_func', 'value_func', 'key_args', 'value_args'])
entries: list[DictEntryInfo] = []
for key, value in template.items():
key_params, key_template_func = get_generator_return(Templater.template_func_generator(key))
value_params, value_template_func = get_generator_return(Templater.template_func_generator(value))
key_params = tuple(key_params)
value_params = tuple(value_params)
yield from key_params
yield from value_params
entries.append(DictEntryInfo(key_template_func, value_template_func, key_params, value_params))
def template_func(**kwargs):
return {
entry_info.key_func(**{arg: kwargs[arg] for arg in entry_info.key_args}):
entry_info.value_func(**{arg: kwargs[arg] for arg in entry_info.value_args})
for entry_info in entries
}
return template_func
#Templater.register(list)
def templated_list_func(template: List[T]) -> TemplateFunc[List[T]]:
entries = []
for item in template:
params, item_template_func = get_generator_return(Templater.template_func_generator(item))
params = tuple(params)
yield from params
entries.append((item_template_func, params))
def template_func(**kwargs):
return [
item_template_func(**{arg: kwargs[arg] for arg in args})
for item_template_func, args in entries
]
return template_func
g = Templater.template_func(template={'hello': '{name}', 'how are you': ['{verb}', 2]})
assert g(name='Christian', verb='doing') == {'hello': 'Christian', 'how are you': ['doing', 2]}
print(g.__signature__)

Changing python script TBA SHA1 to SHA256

I was recently hired as a junior dev as my first job for a bigger company which uses NetSuite. An old dev wrote a python script which handles pictures made by designers, that uploads pictures to NetSuite when they are uploaded to a specific folder.
Since the Script uses SHA1 I need to change the TBA to SHA256 because NetSuite does not support SHA1 anymore.
I have a hard time understanding the old dev's code, and find documentation on how to change the TBA from SHA1 to SHA256..
These are snippets from the code.
import datetime
import requests
import os
import oauth2 as oauth
import json
import time
import base64
import sys
import hashlib
import hmac
url = "https://xxxxx=1"
token = oauth.Token(key="xxxxxxxxxxx",secret="xxxxxxxxxx")
consumer = oauth.Consumer(key="xxxxxxxxxxxxxxxx",secret="xxxxxxxxxxxxxxxx")
realm="xxxxxxxxxxxxxx"
signature_method = oauth.SignatureMethod_HMAC_SHA1()
In this part I understand he initialises the method oauth.SignatureMethod_HMAC_SHA1().
Then when I go to the oauth file I find this
class SignatureMethod_HMAC_SHA1(SignatureMethod):
name = 'HMAC-SHA1'
def signing_base(self, request, consumer, token):
if (not hasattr(request, 'normalized_url') or request.normalized_url is None):
raise ValueError("Base URL for request is not set.")
sig = (
escape(request.method),
escape(request.normalized_url),
escape(request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key.encode('ascii'), raw.encode('ascii')
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
hashed = hmac.new(key, raw, sha1)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
I looked this file through, and it does not contain any methods containing SHA256.. Only SHA1 and PLAINTEXT.
I tried to change the values to SHA256 but that did not work of course.
I tried to look up documentation on oAuth2 but I only found very small amounts of information, and it seems like it only contains SHA1 and PLAINTEXT..
So how do I change the script to function with SHA256 instead of SHA1?
EDIT to answer comment
Hashlib contains this:
class _Hash(object):
digest_size: int
block_size: int
# [Python documentation note] Changed in version 3.4: The name attribute has
# been present in CPython since its inception, but until Python 3.4 was not
# formally specified, so may not exist on some platforms
name: str
def __init__(self, data: _DataType = ...) -> None: ...
def copy(self) -> _Hash: ...
def digest(self) -> bytes: ...
def hexdigest(self) -> str: ...
def update(self, arg: _DataType) -> None: ...
def md5(arg: _DataType = ...) -> _Hash: ...
def sha1(arg: _DataType = ...) -> _Hash: ...
def sha224(arg: _DataType = ...) -> _Hash: ...
def sha256(arg: _DataType = ...) -> _Hash: ...
def sha384(arg: _DataType = ...) -> _Hash: ...
def sha512(arg: _DataType = ...) -> _Hash: ...
def new(name: str, data: _DataType = ...) -> _Hash: ...
algorithms_guaranteed: AbstractSet[str]
algorithms_available: AbstractSet[str]
def pbkdf2_hmac(hash_name: str, password: _DataType, salt: _DataType, iterations: int, dklen: Optional[int] = ...) -> bytes: ...
if sys.version_info >= (3, 6):
class _VarLenHash(object):
digest_size: int
block_size: int
name: str
def __init__(self, data: _DataType = ...) -> None: ...
def copy(self) -> _VarLenHash: ...
def digest(self, length: int) -> bytes: ...
def hexdigest(self, length: int) -> str: ...
def update(self, arg: _DataType) -> None: ...
sha3_224 = _Hash
sha3_256 = _Hash
sha3_384 = _Hash
sha3_512 = _Hash
shake_128 = _VarLenHash
shake_256 = _VarLenHash
def scrypt(password: _DataType, *, salt: _DataType, n: int, r: int, p: int, maxmem: int = ..., dklen: int = ...) -> bytes: ...
class _BlakeHash(_Hash):
MAX_DIGEST_SIZE: int
MAX_KEY_SIZE: int
PERSON_SIZE: int
SALT_SIZE: int
def __init__(self, data: _DataType = ..., digest_size: int = ..., key: _DataType = ..., salt: _DataType = ..., person: _DataType = ..., fanout: int = ..., depth: int = ..., leaf_size: int = ..., node_offset: int = ..., node_depth: int = ..., inner_size: int = ..., last_node: bool = ...) -> None: ...
blake2b = _BlakeHash
blake2s = _BlakeHash
There is already sha256() function in Haslib file,
so you can try to add a new class SignatureMethod_HMAC_SHA256 into the oauth file which can be similar to that SHA1.
Just change parameters of hmac.new() function like this:
hashed = hmac.new(key, raw, sha256)
Whole class can look like this:
class SignatureMethod_HMAC_SHA256(SignatureMethod):
name = 'HMAC-SHA256'
def signing_base(self, request, consumer, token):
if (not hasattr(request, 'normalized_url') or request.normalized_url is None):
raise ValueError("Base URL for request is not set.")
sig = (
escape(request.method),
escape(request.normalized_url),
escape(request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key.encode('ascii'), raw.encode('ascii')
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
hashed = hmac.new(key, raw, sha256)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
Then you can simply call in your script new SHA256 method instead of that deprecated SHA1 method:
signature_method = oauth.SignatureMethod_HMAC_SHA256()

Categories