So I'm creating a basic TUI for a script I created. The goal is to collect several variables that include paths to directories and files. What I'm not sure about is once I've created the visual aspect of it, how to get those pieces of information to interact with other parts of the code.
Below is what I have so far in terms of the visual portion (the other part is about 500 lines), and honestly I'm at a loss on how to even print any of the variables set under the class and any pointers would be greatly appreciated.
#!/usr/bin/env python
# encoding: utf-8
import npyscreen
class PlistCreator(npyscreen.NPSApp):
def main(self):
screen = npyscreen.Form(name="Welcome to Nicks Playlist Creator!")
plistName = screen.add(npyscreen.TitleText, name="Playlist Name:" )
csvPath = screen.add(npyscreen.TitleFilenameCombo, name="CSV Schedule:")
toaPath = screen.add(npyscreen.TitleFilenameCombo, name="Path to ToA Video Folder:", use_two_lines=True)
outputPath = screen.add(npyscreen.TitleFilenameCombo, name = "Output Folder:", use_two_lines=True)
dateOfAir = screen.add(npyscreen.TitleDateCombo, name="Date of Air:")
timeOfStart = screen.add(npyscreen.TitleText, name="Time of Air (TC):")
screen.edit()
if __name__ == "__main__":
App = PlistCreator()
App.run()
You can get the value of any form object using the dit notation.
plistName = screen.add(npyscreen.TitleText, name="Playlist Name:" )
playlist_name = self.screen.plistName.value
etc. Note there are no parentheses after value. Once you have it in a variable, you can build another method in the class to handle the information.
Related
Ok, so I am having a weird one. I am running python in a SideFX Hython (their custom build) implementation that is using PDG. The only real difference between Hython and vanilla Python is some internal functions for handling geometry data and compiled nodes, which shouldn't be an issue even though they are being used.
The way the code runs, I am generating a list of files from the disk which creates PDG work items. Those work items are then processed in parallel by PDG. Here is the code for that:
import importlib.util
import pdg
import os
from pdg.processor import PyProcessor
import json
class CustomProcessor(PyProcessor):
def __init__(self, node):
PyProcessor.__init__(self,node)
self.extractor_module = 'GeoExtractor'
def onGenerate(self, item_holder, upstream_items, generation_type):
for upstream_item in upstream_items:
new_item = item_holder.addWorkItem(parent=upstream_item, inProcess=True)
return pdg.result.Success
def onCookTask(self, work_item):
spec = importlib.util.spec_from_file_location("callback", "Geo2Custom.py")
GE = importlib.util.module_from_spec(spec)
spec.loader.exec_module(GE)
GE.convert(f"{work_item.attribValue('directory')}/{work_item.attribValue('filename')}{work_item.attribValue('extension')}", work_item.index, f'FRAME { work_item.index }', self.extractor_module)
return pdg.result.Success
def bulk_convert (path_pattern, extractor_module = 'GeoExtractor'):
type_registry = pdg.TypeRegistry.types()
try:
type_registry.registerNode(CustomProcessor, pdg.nodeType.Processor, name="customprocessor", label="Custom Processor", category="Custom")
except Exception:
pass
whereItWorks = pdg.GraphContext("testBed")
whatWorks = whereItWorks.addScheduler("localscheduler")
whatWorks.setWorkingDir(os.getcwd (), '$HIP')
whereItWorks.setValues(f'{whatWorks.name}', {'maxprocsmenu':-1, 'tempdirmenu':0, 'verbose':1})
findem = whereItWorks.addNode("filepattern")
whereItWorks.setValue(f'{findem.name}', 'pattern', path_pattern, 0)
generic = whereItWorks.addNode("genericgenerator")
whereItWorks.setValue(generic.name, 'itemcount', 4, 0)
custom = whereItWorks.addNode("customprocessor")
custom.extractor_module = extractor_module
node1 = [findem]
node2 = [custom]*len(node1)
for n1, n2 in zip(node1, node2):
whereItWorks.connect(f'{n1.name}.output', f'{n2.name}.input')
n2.cook(True)
for node in whereItWorks.graph.nodes():
node.dirty(False)
whereItWorks.disconnect(f'{n1.name}.output', f'{n2.name}.input')
print ("FULLY DONE")
import os
import hou
import traceback
import CustomWriter
import importlib
def convert (filename, frame_id, marker, extractor_module = 'GeoExtractor'):
Extractor = importlib.__import__ (extractor_module)
base, ext = os.path.splitext (filename)
if ext == '.sc':
base = os.path.splitext (base)[0]
dest_file = base + ".custom"
geo = hou.Geometry ()
geo.loadFromFile (filename)
try:
frame = Extractor.extract_geometry (geo, frame_id)
except Exception as e:
print (f'F{ frame_id } Geometry extraction failed: { traceback.format_exc () }.')
return None
print (f'F{ frame_id } Geometry extracted. Writing file { dest_file }.')
try:
CustomWriter.write_frame (frame, dest_file)
except Exception as e:
print (f'F{ frame_id } writing failed: { e }.')
print (marker + " SUCCESS")
The onCookTask code is run when the work item is processed.
Inside of the GeoExtractor.py program I am importing the geometry file defined by the work item, then converting it into a couple Pandas dataframes to collate and process the massive volumes of data quickly, which is then passed to a custom set of functions for writing binary files to disk from the Pandas data.
Everything appears to run flawlessly, until I check my output binaries and see that they escalate in file size much more than they should, indicating that either something is being shared between instances or not cleared from memory and subsequent loads of the extractor code is appending the dataframes which are named the same.
I have run the GeoExtractor code sequentially with the python instance closing between each file conversion using the exact same code and the files are fine, growing only very slowly as the geometry data volume grows, so the issue has to lie somewhere in the parallelization of it using PDG and calling the GeoExtractor.py code over and over for each work item.
I have contemplated moving the importlib stuff to the __init__() for the class leaving only the call to the member function in the onCookTask() function. Maybe even going so far as to pass a unique variable for each work item which is used inside GeoExtractor to create a closure of the internal functions so they are unique instances in memory.
I tried to do a stripped down version of GeoExtractor and since I'm not sure where the leak is, I just ended up pulling out comments with proprietary or superfluous information and changing some custom library names, but the file ended up kinda long so I am including a pastebin: https://pastebin.com/4HHS8D2W
As for CustomGeometry and CustomWriter, there is no working form of either of those libraries that will be NDA safe, so unfortunately they have to stay blackboxed. The CustomGeometry is a handful of container classes which organize all of the data coming out of the geometry, and the writer is a formatter/writer for the binary format we are utilizing. I am hoping the issue wouldn't be in either of them.
Edit 1: I fixed an issue in the example code.
Edit 2: Added larger examples.
I'm a newbie, writing 2 functions in separate files (weather.py, TTS_phrase.py) in the same directory, to then use together:
def weather_now(my_town)
[My code]
return ("Sunny skies")
city = input("City Name : ")
print(weather_now(city))
and
def talk(text_to_convert)
[My code].save(filename)
print("Done")
t = input("What text would you like to convert to speech?\n")
talk(t)
They both work fine independently. When I create a new py file, import both functions ONLY, not the parts after it, it still seems to run the whole py file, not JUST the function.
import weather as w
import TTS_phrase as TTS
text1 = w.weather_now("London")
TTS.talk(text1)
The indents are correct. It's asking me for inputs and tries to save 2 files when I run this code. Am I doing something wrong? I'd really appreciate a steer. Thanks.
first you must understand that whenever a file is imported the whole script must run! so you can make a check, and exclude code you dont run on execute like this:
def weather_now(my_town)
[My code]
return ("Sunny skies")
if __name__ == "__main__":
city = input("City Name : ")
print(weather_now(city))
and
def talk(text_to_convert)
[My code].save(filename)
print("Done")
if __name__ == "__main__":
t = input("What text would you like to convert to speech?\n")
talk(t)
I have a text file I am using as a calibration file for a rudder indicator I am making. (It stores the port and stbd limits and the center position)
I would like to call this file when the program is booted so it had the same calibration settings from previously.
I can store the 3 numbers as a str in a .txt file and know how to recall them as a list.
My thought is to run a function when the app starts defining each part of the list as a variable eg.
calibrationfile1 = open('calfile.txt','r')
lines = calibrationfile1.readlines()
calvalue1 = lines[0].replace(",","").replace("[","").replace("]","")
calvalue = calvalue1.split()
rudderlimits = calvalue
port_rudder_limit = rudderlimits[0]
stbd_rudder_limit = rudderlimits[1]
center_position = rudderlimits[2]
how do I do call this in a function at startup and make the variables available in another function I dont want to use 'global'?
I have already made a funciton that is a calibration that creates this calfile.txt and it works.
thanks for your help :)
you can load the file on the app on_start method
class YourApp(App):
def on_start(self):
self.calibration_data = your_file_loading_function() # returns a dict?
... # other places in your code
class Popcorn(Widget):
def on_callback(self):
port_rudder_limit = App.get_running_app().calibration_data['port_rudder_limit']
... # do something....
or from kv file
<MyWidget>:
port_rudder_limit: app.calibration_data['port_rudder_limit']
For a few days now, I have been struggling with a problem, namely that the settings written by my settings class for a parser are not persistent when the program gets restarted. This problem occurs only on Windows, but in both Python x86 and x64 environments and when compiled using PyInstaller. It also does not matter whether the program is run as Administrator or not.
When the program first runs, write_def(self) is called by the constructor. This function writers teh defaults correctly to the file specified. After this, read_set(self) is called so the class variables are set.These class variables then do match the default values.
In another file, namely frames.py, write_set(self) is called, and all settings are passed as arguments. Using print statements I have asserted that the write_set(self) function receives the correct values. No errors occur when writing the settings to the file, and when running read_set(self) again, the new settings are read correctly and this is also shown in the GUI.
However, when closing the program and running it again, the default settings are again shown. This is not behaviour I expected.
Below I have added the settings class implementing a cPickle. When using pickle the behaviour is the same. When using shelve as in this file, the behaviour is the same. When using dill, the behaviour is the same. When implementing a ConfigParser.RawConfigParser (in the configparser branch of the GitHub repository linked to earlier), the behaviour is the same, and additionally when viewing the settings file in a text editor it is visible that the settings in the file are not updated.
When running the same code on Linux (Ubuntu 16.04.1 LTS with Python 2.7), everything works as expected with pickle and shelve versions. The settings are correctly saved and loaded from the file. Am I doing something wrong? Is it a Windows-specific issue with Python?
Thank you in advance for any help!
RedFantom.
# Written by RedFantom, Wing Commander of Thranta Squadron and Daethyra, Squadron Leader of Thranta Squadron
# Thranta Squadron GSF CombatLog Parser, Copyright (C) 2016 by RedFantom and Daethyra
# For license see LICENSE
# UI imports
import tkMessageBox
# General imports
import getpass
import os
import cPickle
# Own modules
import vars
# Class with default settings for in the settings file
class defaults:
# Version to display in settings tab
version = "2.0.0_alpha"
# Path to get the CombatLogs from
cl_path = 'C:/Users/' + getpass.getuser() + "/Documents/Star Wars - The Old Republic/CombatLogs"
# Automatically send and retrieve names and hashes of ID numbers from the remote server
auto_ident = str(False)
# Address and port of the remote server
server = ("thrantasquadron.tk", 83)
# Automatically upload CombatLogs as they are parsed to the remote server
auto_upl = str(False)
# Enable the overlay
overlay = str(True)
# Set the overlay opacity, or transparency
opacity = str(1.0)
# Set the overlay size
size = "big"
# Set the corner the overlay will be displayed in
pos = "TL"
# Set the defaults style
style = "plastik"
# Class that loads, stores and saves settings
class settings:
# Set the file_name for use by other functions
def __init__(self, file_name = "settings.ini"):
self.file_name = file_name
# Set the install path in the vars module
vars.install_path = os.getcwd()
# Check for the existence of the specified settings_file
if self.file_name not in os.listdir(vars.install_path):
print "[DEBUG] Settings file could not be found. Creating a new file with default settings"
self.write_def()
self.read_set()
else:
try:
self.read_set()
except:
tkMessageBox.showerror("Error", "Settings file available, but it could not be read. Writing defaults.")
self.write_def()
vars.path = self.cl_path
# Read the settings from a file containing a pickle and store them as class variables
def read_set(self):
with open(self.file_name, "r") as settings_file_object:
settings_dict = cPickle.load(settings_file_object)
self.version = settings_dict["version"]
self.cl_path = settings_dict["cl_path"]
self.auto_ident = settings_dict["auto_ident"]
self.server = settings_dict["server"]
self.auto_upl = settings_dict["auto_upl"]
self.overlay = settings_dict["overlay"]
self.opacity = settings_dict["opacity"]
self.size = settings_dict["size"]
self.pos = settings_dict["pos"]
self.style = settings_dict["style"]
# Write the defaults settings found in the class defaults to a pickle in a file
def write_def(self):
settings_dict = {"version":defaults.version,
"cl_path":defaults.cl_path,
"auto_ident":bool(defaults.auto_ident),
"server":defaults.server,
"auto_upl":bool(defaults.auto_upl),
"overlay":bool(defaults.overlay),
"opacity":float(defaults.opacity),
"size":defaults.size,
"pos":defaults.pos,
"style":defaults.style
}
with open(self.file_name, "w") as settings_file:
cPickle.dump(settings_dict, settings_file)
# Write the settings passed as arguments to a pickle in a file
# Setting defaults to default if not specified, so all settings are always written
def write_set(self, version=defaults.version, cl_path=defaults.cl_path,
auto_ident=defaults.auto_ident, server=defaults.server,
auto_upl=defaults.auto_upl, overlay=defaults.overlay,
opacity=defaults.opacity, size=defaults.size, pos=defaults.pos,
style=defaults.style):
settings_dict = {"version":version,
"cl_path":cl_path,
"auto_ident":bool(auto_ident),
"server":server,
"auto_upl":bool(auto_upl),
"overlay":bool(overlay),
"opacity":float(opacity),
"size":str(size),
"pos":pos,
"style":style
}
with open(self.file_name, "w") as settings_file_object:
cPickle.dump(settings_dict, settings_file_object)
self.read_set()
Sometimes it takes a while to get to an answer, and I just thought of this: What doesn't happen on Linux that does happen on Windows? The answer to that question is: Changing the directory to the directory of the files being parsed. And then it becomes obvious: the settings are stored correctly, but the folder where the settings file is created is changed during the program, so the settings don't get written to the original settings file, but a new settings file is created in another location.
I have been working on a Sublime Text 3 plugin that fixes some coding standards I have at work(That I have a bad habit of missing) I currently have this working with a command run in the console. Most of the code was originally from this thread.
import sublime, sublime_plugin
class ReplaceCommand(sublime_plugin.TextCommand):
def run(self, edit):
#for each selected region
region = sublime.Region(0, self.view.size())
#if there is slected text
if not region.empty():
#get the selected region
s = self.view.substr(region)
#perform the replacements
s = s.replace('){', ') {')
s = s.replace('}else', '} else')
s = s.replace('else{', 'else {')
#send the updated string back to the selection
self.view.replace(edit, region, s)
Then you just need to run:
view.run_command('replace')
And it will apply the coding standards(there are more I plan to implement but for now i'll stick with these) I would like this to run on save.
I tried just changing run(self, edit) to on_pre_save(self, edit) but it does not work. I don't get any syntax errors but It just doesn't work.
Can anyone tell me how to make this run on save instead of having to run the command?
On ST3 the only way to get an Edit object is by running a TextCommand. (It's in the docs, but they're not terribly clear). But, fortunately, you can run the command pretty much the same way you have been doing.
Events handlers, like on_pre_save, can only be defined on an EventListener. The on_pre_save() event is passed a view object so you just need to add something like this, which kicks-off the command you've already written.
class ReplaceEventListener(sublime_plugin.EventListener):
def on_pre_save(self, view):
view.run_command('replace')
The Solutions I came to was to create an on_pre_save() function that runs the command I listed earlier:
import sublime, sublime_plugin
class ReplaceCodeStandardsCommand(sublime_plugin.TextCommand):
def run(self, edit):
#for each selected region
region = sublime.Region(0, self.view.size())
#if there is slected text
if not region.empty():
#get the selected region
s = self.view.substr(region)
#perform the replacements
s = s.replace('){', ') {')
s = s.replace('}else', '} else')
s = s.replace('else{', 'else {')
#send the updated string back to the selection
self.view.replace(edit, region, s)
class ReplaceCodeStandardsOnSave(sublime_plugin.EventListener):
# Run ST's 'unexpand_tabs' command when saving a file
def on_pre_save(self, view):
if view.settings().get('update_code_standard_on_save') == 1:
view.window().run_command('replace_code_standards')
Hopefully this code helps someone!