how to access the local variable of the class from outside? python - python

i have enums python file which has :
class ClassificationType(object):
CLASSIFICATION_TYPE_UNSPECIFIED = 0
MULTICLASS = 1
MULTILABEL = 2
i am writing another python file to get the value of the variable declared inside the enums class.
def dataset(model_typ):
dataset_spec = {
"classification": enums.ClassificationType.MULTICLASS
}
as per above code, i am able to get the value of MULTICLASS as 1.
now i need to pass the MULTICLASS/MULTILABEL/CLASSIFICATION_TYPE_UNSPECIFIED as argument(model_type) and pass it to dataset_spec.
how to do it?
thanks in advance
NOTE: i dont want to change enums.py file.

class ClassificationType(object):
CLASSIFICATION_TYPE_UNSPECIFIED = 0
MULTICLASS = 1
MULTILABEL = 2
def dataset(model_typ_multiclass):
dataset_spec = {
"classification": model_typ_multiclass
}
obj = ClassificationType()
model_typ_multiclass = obj.MULTICLASS
dataset(model_typ_multiclass)

Try using the below code in the other file (not the enums.py file):
from enums import ClassificationType as ct
import random
def dataset(model_typ):
dataset_spec = {
"classification": model_typ
}
print(dataset_spec)
dataset(random.choice([ct.MULTILABEL,ct.MULTICLASS]))
Output:
{'classification': 2}
I just simply changed the value of the dataset_spec's classification key to the argument model_typ then at the end of the code call the dataset function and in the parameters write in the enums.ClassificationType.MULTICLASS to get the enums.py files MULTICLASS variable

Related

Customize JSON representation of Pydantic model

I have a Pydantic model defined as follows:
class IntOrString(BaseModel):
int_value: Optional[StrictInt] = None
string_value: Optional[StrictStr] = None
Is there a way I can customize json() to make the output as follows:
p = IntOrString(int_value=123)
print(p.json())
#> 123
p = IntOrString(string_value="Hello World")
print(p.json())
#> "Hello World"
Note: IntOrString can be a nested attribute of another Pydantic model.
In addition to object (e.g. {"id": 123}), string, number boolean are also valid JSON type. In other words, the question is can a pydantic model be serialized to string, number or boolean instead of object?
I know it's a weird requirement. Just want to know if that's possible.
Thank you.
For such a simple thing as excluding None-valued fields in the JSON representation, you can simply use the built-in exclude_none parameter:
from typing import Optional
from pydantic import BaseModel, StrictInt, StrictStr
class Dummy(BaseModel):
id: Optional[StrictInt] = None
name: Optional[StrictStr] = None
class Other(BaseModel):
dummy: Dummy
if __name__ == '__main__':
p = Dummy(id=123)
print(p.json(exclude_none=True))
p = Dummy(name="Hello World")
print(p.json(exclude_none=True))
o = Other(dummy=Dummy(id=123))
print(o.json(exclude_none=True))
Output:
{"id": 123}
{"name": "Hello World"}
{"dummy": {"id": 123}}
If you want more complex stuff, you may want to provide your own custom JSON encoder either via the encoder parameter on a call-by-call basis or in the model config via json_dumps or json_encoders.

How to handle missing JSON nested keys from an API response in python?

Here is the JSON response I get from an API request:
{
"associates": [
{
"name":"DOE",
"fname":"John",
"direct_shares":50,
"direct_shares_details":{
"shares_PP":25,
"shares_NP":25
},
"indirect_shares":50,
"indirect_shares_details": {
"first_type": {
"shares_PP": 25,
"shares_NP": 0
},
"second_type": {
"shares_PP": 25,
"shares_NP": 0
}
}
}
]
}
However, in some occasions, some values will be equal to None. In that case, I handle it in my function for all the values that I know will be integers. But it doesn't work in this scenario for the nested keys inside indirect_shares_details:
{
"associates": [
{
"name":"DOE",
"fname":"John",
"direct_shares":50,
"direct_shares_details":{
"shares_PP":25,
"shares_NP":25
},
"indirect_shares":None,
"indirect_shares_details": None
}
}
]
}
So when I run my function to get the API values and put them in a custom dict, I get an error because the keys are simply inexistant in the response.
def get_shares_data(response):
associate_from_api = []
for i in response["associates"]:
associate_data = {
"PM_shares": round(company["Shares"], 2),
"full_name": i["name"] + " " + ["fname"]
"details": {
"shares_in_PM": i["direct_shares"],
"shares_PP_in_PM": i["direct_shares_details"]["shares_PP"],
"shares_NP_in_PM": i["direct_shares_details"]["shares_NP"],
"shares_directe": i["indirect_shares"],
"shares_indir_PP_1": i["indirect_shares_details"]["first_type"]["shares_PP"],
"shares_indir_NP_1": i["indirect_shares_details"]["first_type"]["shares_NP"],
"shares_indir_PP_2": i["indirect_shares_details"]["second_type"]["shares_PP"],
"shares_indir_NP_2": i["indirect_shares_details"]["second_type"]["shares_NP"],
}
}
for key,value in associate_data["details"].items():
if value != None:
associate_data["details"][key] = value * associate_data["PM_shares"] / 100
else:
associate_data["calculs"][key] = 0.0
associate_from_api.append(associate_data)
return associate_from_api
I've tried conditioning the access of the nested keys only if the parent key wasn't equal to None but I ended up declaring 3 different dictionaries inside if/else conditions and it turned into a mess, is there an efficient way to achieve this?
You can try accessing the values using dict.get('key') instead of accessing them directly, as in dict['key'].
Using the first approach, you will get None instead of KeyError if the key is not there.
EDIT: tested using the dictionary from the question:
You can try pydantic
Install pydantic
pip install pydantic
# OR
conda install pydantic -c conda-forge
Define some models based on your response structure
from pydantic import BaseModel
from typing import List, Optional
# There are some common fields in your json response.
# So you can put them together.
class ShareDetail(BaseModel):
shares_PP: int
shares_NP: int
class IndirectSharesDetails(BaseModel):
first_type: ShareDetail
second_type: ShareDetail
class Associate(BaseModel):
name: str
fname: str
direct_shares: int
direct_shares_details: ShareDetail
indirect_shares: int = 0 # Sets a default value for this field.
indirect_shares_details: Optional[IndirectSharesDetails] = None
class ResponseModel(BaseModel):
associates: List[Associate]
use ResponseModel.parse_xxx functions to parse response.
Here I use parse_file funtion, you can also use parse_json function
See: https://pydantic-docs.helpmanual.io/usage/models/#helper-functions
def main():
res = ResponseModel.parse_file("./NullResponse.json",
content_type="application/json")
print(res.dict())
if __name__ == "__main__":
main()
Then the response can be successfully parsed. And it automatically validates the input.

Trying to use Embedded Documents Fields in MongoDB

I'm following freecodecamp's video on MongoDB using mongoengine (as db). I'm trying to use the embedded document list field to add information to my main document. Also using a Streamlit webapp as my input source
My class's are:
class Contest(db.Document):
date_created = db.DateTimeField(default=datetime.today)
name = db.StringField(required=True)
format = db.EmbeddedDocumentField(Format)
class Format(db.EmbeddedDocument):
contest_id = db.ObjectIdField()
name = db.StringField()
Then I've tried a few different ways to to add the format to a specific contest instance.
Try #1
def set_format(active_contest):
format : Format = None
name = st.text_input('Name of Format:')
submit = st.button('Set Format Name')
if submit == True:
format.contest_id = active_contest.id
format.name = name
active_contest.save()
setting Format to None is the way the freecodecamp video shows... but i get this error: AttributeError: 'NoneType' object has no attribute 'contest_id'.
So I tried switching it to: format = Format()... this way it doesn't give me an error, but also doesn't update the Contest document to include the format information.
I also tried switching active_contest.save() to format.save() but then i get a: AttributeError: 'Format' object has no attribute 'save'
I've also tried the update function instead of save... but i get similar errors every-which way.
New to mongoDB and programming in general. Thanks in advance!
First of all, if you want to store Format as embedded document, the contest_id is not necessary in Format class. With this approach you will end with something like this in your MongoDB collection:
{
"date_created":ISODate(...),
"name": "...",
"format": {
"name": "..."
}
}
Another approach could be something like:
class Contest(db.Document):
date_created = db.DateTimeField(default=datetime.today)
name = db.StringField(required=True)
format = db.ReferenceField('Format') # <- Replaced by ReferenceField
class Format(db.Document): # <- EmbeddedDocument replaced by Document
name = db.StringField()
In that case each instance of "Format" will be stored in a separate collection. So you will end with something like this in MongoDB:
Collection Contest:
{
"date_created":ISODate(...),
"name": "...",
"format": :ObjectId("...") // <-- here's the relation field
}
Collection Format:
{
"_id":"...",
"name":"..",
}
Both approaches shares the same code:
def set_format(active_contest): # <-- here's the instance of 'Contest'
format : Format = Format() # <-- create a new Format instance
name = st.text_input('Name of Format:')
submit = st.button('Set Format Name')
if submit == True:
format.name = name
active_contest.format = format # <-- assigns the format to contest
active_contest.save() <- stores both because you are saving the 'parent' object

Dynamic entries in a settings module

I'm writing a package that imports audio files, processes them, plots them etc., for research purposes.
At each stage of the pipeline, settings are pulled from a settings module as shown below.
I want to be able to update a global setting like MODEL_NAME and have it update in any dicts containing it too.
settings.py
MODEL_NAME = 'Test1'
DAT_DIR = 'dir1/dir2/'
PROCESSING = {
"key1":{
"subkey2":0,
"subkey3":1
},
"key2":{
"subkey3":MODEL_NAME
}
}
run.py
import settings as s
wavs = import_wavs(s.DAT_DIR)
proc_wavs = proc_wavs(wavs,s.PROCESSING)
Some of the settings dicts I would like to contain MODEL_NAME, which works fine. The problem arises when I want to change MODEL_NAME during runtime. So if I do:
import settings as s
wavs = import_wavs(s.DAT_DIR)
s.MODEL_NAME='test1'
proc_wavs1 = proc_wavs(wavs,s.PROCESSING)
s.MODEL_NAME='test2'
proc_wavs2 = proc_wavs(wavs,s.PROCESSING)
But obviously both the calls so s.PROCESSING will contain the MODEL_NAME originally assigned in the settings file.
What is the best way to have it update?
Possible solutions I've thought of:
Store the variables as a mutable type, then update it e.g.:
s.MODEL_NAME[0] = ["test1"]
# do processing things
s.MODEL_NAME[0] = ["test2"]
Define each setting category as a function instead, so it is rerun on
each call e.g.:
MODEL_NAME = 'test1' ..
def PROCESSING():
return {
"key1":{
"subkey2":0,
"subkey3":1
},
"key2":{
"subkey3":MODEL_NAME
}
}
Then
s.MODEL_NAME='test1'
proc_wavs1 = proc_wavs(wavs,s.PROCESSING())
s.MODEL_NAME='test2'
proc_wavs1 = proc_wavs(wavs,s.PROCESSING())
I thought this would work great, but then it's very difficult to
change any entries of the functions during runtime eg if I wanted to
update the value of subkey2 and run something else.
Other thoughts maybe a class with an update method or something, does anyone have any better ideas?
You want to configure generic and specific settings structured in dictionaries for functions that perform waves analysis.
Start by defining a settings class, like :
class Settings :
data_directory = 'path/to/waves'
def __init__(self, model):
self.parameters= {
"key1":{
"subkey1":0,
"subkey2":0
},
"key2":{
"subkey1":model
}
}
# create a new class based on model1
s1 = Settings('model1')
# attribute values to specific keys
s1.parameters["key1"]["subkey1"] = 3.1415926
s1.parameters["key1"]["subkey2"] = 42
# an other based on model2
s2 = Settings('model2')
s2.parameters["key1"]["subkey1"] = 360
s2.parameters["key1"]["subkey2"] = 1,618033989
# load the audio
wavs = openWaves(Settings.data_directory)
# process with the given parameters
results1 = processWaves(wavs,s1)
results2 = processWaves(wavs,s2)

dictionary reading from module

I have following module root_file.py. This file contains number of blocks like.
Name = {
'1':'a'
'2':'b'
'3':'c'
}
In other file I am using
f1= __import__('root_file')
Now the requirement is that I have to read values a,b,c at runtime using variables like
for reading a
id=1
app=Name
print f1[app][id]
but getting error that
TypeError: unsubscriptable object
How about
import root_file as f1
id = 1
app = 'Name'
print getattr(f1, app)[id] # or f1.Name[id]
Uh, well, if I understand what you are trying to do:
In root_file.py
Name = {
'1':'a', #note the commas here!
'2':'b', #and here
'3':'c', #the last one is optional
}
Then, in the other file:
import root_file as mymodule
mydict = getattr(mymodule, "Name")
# "Name" could be very well be stored in a variable
# now mydict eqauls Name from root_file
# and you can access its properties, e.g.
mydict['2'] == 'b' # is a True statement

Categories