I am trying to create a scenario where I need to draw line from the mousePressEvent position till the latest mouse moveposition which means i need to call paintEvent from mousePressEvent ,Is it possible ?
So scenario is this :
1) Used paintEvent to draw a 2 circles with black colour
2) Mouse press event waits for a event and press happens , I want to change the colour of the circle to green , is it possible ?
import sys, random
from PyQt4 import QtGui, QtCore
class P(QtGui.QWidget):
def __init__(self):
super(P, self).__init__()
self.initUI()
def initUI(self):
q=self.frameGeometry()
cp=QtGui.QDesktopWidget().availableGeometry().center()
q.moveCenter(cp)
self.setFixedSize(300,300)
self.setWindowTitle('Points')
self.show()
def mousePressEvent(self, QMouseEvent):
cursor =QtGui.QCursor(self)
position = QMouseEvent.pos()
xpos = QMouseEvent.x()
ypos = QMouseEvent.y()
#Trial ??????
q = QtGui.QPainter()
q.drawLine(30,30,90,90)
print QMouseEvent.pos()
def mouseReleaseEvent(self, QMouseEvent):
cursor =QtGui.QCursor()
print cursor.pos()
def paintEvent(self,e):
qp = QtGui.QPainter()
qp.begin(self)
E1 = qp.drawEllipse(30,30,20,20)
E2 = qp.drawEllipse(30,130,20,20)
def main():
app = QtGui.QApplication(sys.argv)
ex = P()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
In Simple words I need to know can we call one Event from another , i.e. Paint Event from Mouse press event ?
It is a much better idea to do all your painting in the paintEvent handler.
You should use your mouse event handlers to handle the collection of data (starting points, lengths, etc) and then do the actual repainting in the paintEvent.
Once you've collected the new data in the mouse event handlers, you can tell the QWidget that it needs to repaint by calling update function. This will schedule a paint event that will execute when the program returns to the event loop.
Related
I want to create a draggable and resizable frameless window in Pyside6 by rewritting the mouseEvent and resizeEvent.And try to use QSizeGrip to control the shape of window.
Drag and resize, I can implement both functions alone, but there is a problem when they are combined.
when I resize the window after dragging, the position will be wrong. I want to know what's the wrong in this codeļ¼
import sys
from PySide6.QtCore import *
from PySide6.QtWidgets import *
from PySide6.QtGui import *
class MyWidget(QWidget):
def __init__(self):
super().__init__()
self.text = QLabel("Hello World",alignment=Qt.AlignCenter)
self.layout =QVBoxLayout(self)
self.layout.addWidget(self.text)
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
self.gripSize = 16
self.grips = []
for i in range(4):
grip = QSizeGrip(self)
grip.resize(self.gripSize, self.gripSize)
self.grips.append(grip)
def resizeEvent(self, event):
QWidget.resizeEvent(self, event)
rect = self.rect()
# top left grip doesn't need to be moved...
# top right
self.grips[1].move(rect.right() - self.gripSize, 0)
# bottom right
self.grips[2].move(
rect.right() - self.gripSize, rect.bottom() - self.gripSize)
# bottom left
self.grips[3].move(0, rect.bottom() - self.gripSize)
def mousePressEvent(self, event):
self.oldPos = event.globalPos()
def mouseMoveEvent(self, event):
delta = QPoint(event.globalPos() - self.oldPos)
self.move(self.x() + delta.x(), self.y() + delta.y())
self.oldPos = event.globalPos()
if __name__ == "__main__":
app =QApplication([])
By default, QSizeGrip interfaces with the OS for the actual resizing as soon as it's activated (by pressing the left mouse button on it).
The result is that, after that, all mouse move events are intercepted by the system until the button is released. Since the button release is also intercepted by the system (to know that the resizing has been completed), QSizeGrip will be able to handle again mouse events only after the button release; since the previous condition was the mouse button press, it will receive a MouseMove event, and, by defaults, those events are ignored by widgets if they don't handle it.
If a mouse event is ignored, it is propagated to its parent(s), which in this case is your MyWidget.
Unfortunately, your assumption is that you only get mouse move events only after a button press, but, due to what explained above, this is not the case: you will not receive a mouse button press (it was handled by the size grip), but only a mouse move (since it's been ignored by the size grip).
Now, there are two cases:
you previously moved the window, so there is an oldPos based on the previous start mouse position, and the window will be moved using the wrong parameters;
you only resized the window since startup, and the program will crash because there was no oldPos attribute;
There are various possible solutions, but the simple one is to create a default oldPos attribute having a None value, set it in the mouse press, check if self.oldPos is not None in the mouse move (and eventually move) and, most importantly, restore self.oldPos = None in the mouse release.
Note that it's usually better to move the window only using a single button (the convention is the left one, but the middle one is not uncommon)
class MyWidget(QWidget):
oldPos = None
# ...
def mousePressEvent(self, event):
if event.button() == Qt.MouseButton.LeftButton:
self.oldPos = event.globalPos()
def mouseMoveEvent(self, event):
if self.oldPos is not None:
delta = event.globalPos() - self.oldPos
self.move(self.pos() + delta)
self.oldPos = event.globalPos()
def mouseReleaseEvent(self, event):
self.oldPos = None
Note: QPoint + QPoint is already a QPoint, and move() accepts a QPoint by default, so there's no need to sum x and y coordinates individually.
I have tried to use setAttribute(Qt.Qt.WA_TransparentForMouseEvents),but mouse also can't pierce through Qtwindow.
I want make mouse event penetrate Qtwindow,like I have clicked mouse's right button at a Qtwindow which is located in Windows10 Desktop,then it will trigger win10 contextmenu.
Would a transparent window suit your needs?
from PyQt5 import QtCore, QtWidgets, QtGui
class Overlay(QtWidgets.QWidget):
def __init__(self):
super().__init__()
self.setAttribute(QtCore.Qt.WA_TransparentForMouseEvents)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground, True)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
layout = QtWidgets.QHBoxLayout(self)
label = QtWidgets.QLabel('Transparent and propagating')
label.setFont(QtGui.QFont('Arial', 26))
label.setStyleSheet("background-color : white")
layout.addWidget(label)
self.show()
if __name__ == '__main__':
app = QtWidgets.QApplication([])
form = Overlay()
app.exec_()
I tried to figure out a way to directly transmit clicks to the desktop. The closest related question gave me some ideas, but ultimately I was not able to get it working, the clicks never reach the desktop. Maybe you can still get some ideas from this:
from PyQt5 import QtWidgets, QtGui
import win32api, win32con
from ctypes import windll
class Overlay(QtWidgets.QWidget):
def __init__(self):
super().__init__()
layout = QtWidgets.QHBoxLayout(self)
label = QtWidgets.QLabel('Click to Desktop')
label.setFont(QtGui.QFont('Arial', 26))
label.setStyleSheet("background-color : white")
layout.addWidget(label)
# make window partially transparent to see where you are clicking
self.setWindowOpacity(0.5)
# get handle to desktop as described in linked question
hProgman = windll.User32.FindWindowW("Progman", 0)
hFolder = windll.User32.FindWindowExW(hProgman, 0, "SHELLDLL_DefView", 0)
self.desktop = windll.User32.FindWindowExW(hFolder, 0, "SysListView32", 0)
self.show()
def mousePressEvent(self, event):
# catch mouse event to route it to desktop
x = event.globalPos().x()
y = event.globalPos().y()
lParam = win32api.MAKELONG(x, y)
# left click on desktop (left button down + up, => should be replaced by event.button() pseudo switch case once working)
windll.User32.SendInput(self.desktop, win32con.WM_LBUTTONDOWN, win32con.MK_LBUTTON, lParam)
windll.User32.SendInput(self.desktop, win32con.WM_LBUTTONUP, 0, lParam)
# display position for debugging (position gets displayed, but nothing gets clicked)
print(f'clicked on desktop at position {x} and {y}')
if __name__ == '__main__':
app = QtWidgets.QApplication([])
form = Overlay()
app.exec_()
class main(QWidget):
def __init__(self):
super().__init__()
self.setWindowFlags(Qt.Popup|Qt.WindowDoesNotAcceptFocus|Qt.WindowTransparentForInput)
self.setAttribute(Qt.WA_AlwaysStackOnTop, True)
I am trying to draw over image using QPainter. It works good when using solid color. When using semi transparent color, dots were appearing.
Also when drawing multiple lines in one place, the color gets multiplied and produces a darker color.
import sys
from PyQt5.QtCore import Qt, QPoint
from PyQt5.QtWidgets import QMainWindow, QApplication
from PyQt5.QtGui import QPixmap, QPainter, QPen, QColor
class Menu(QMainWindow):
def __init__(self):
super().__init__()
self.drawing = False
self.lastPoint = QPoint()
self.image = QPixmap(r"C:\Users\www\Desktop\image.jpg")
self.setGeometry(100, 100, 500, 300)
self.resize(self.image.width(), self.image.height())
self.show()
def paintEvent(self, event):
painter = QPainter(self)
painter.drawPixmap(self.rect(), self.image)
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
self.drawing = True
self.lastPoint = event.pos()
def mouseMoveEvent(self, event):
if event.buttons() and Qt.LeftButton and self.drawing:
painter = QPainter(self.image)
painter.setPen(QPen(QColor(121,252,50,50), 20, Qt.SolidLine))
painter.drawLine(self.lastPoint, event.pos())
self.lastPoint = event.pos()
self.update()
def mouseReleaseEvent(self, event):
if event.button == Qt.LeftButton:
self.drawing = False
if __name__ == '__main__':
app = QApplication(sys.argv)
mainMenu = Menu()
sys.exit(app.exec_())
I need to keep the color as the original color(instead of getting darker each time) when drawing several times over the same place.
Mouse movement are "discrete", which means that whenever you move your mouse you won't get continuous pixel coordinates: if you move your mouse fast enough from (0, 0) to (20, 20), you'll probably get only two or three mouseMoveEvents in the middle at most, resulting in single segments for each mouse event.
The "dots" you see are actually areas where the different lines you draw collide, expecially since the mouse movement are not continuous. If you think of it as painting with watercolors, it's like if you draw a small line at each mouse movement, then wait until it's dried, then start to paint another line from the previous point.
As soon as you draw unique lines at each mouseMoveEvent, the edges of those segments are superimposed, resulting in those "less transparent dots" (since you're using a non opaque color), which are the points where the segments collide, and, because painting is usually "additive" you get two or more areas where the superimposed color results in a more opaque one: imagine it as watching through two pairs of sunglasses that are not aligned.
QPainterPath, instead, can draw continuous lines without that "artifact", as long as they are part of the same painter path (no matter its subpath, including subpath polygons, ellipses, arcs, etc.). Then, whenever you tell the QPainter to draw a new element, it will be superimposed to the previous ones.
To better clarify, in this image on the left I'm drawing two distinct lines with a common vertex, using your color, which would be the case of a mousePressEvent (start drawing), a fast movement to the right (draw the first line) and another one to the bottom (draw another line). On the right there are the same "lines", but using a unique QPainterPath.
In this example code I temporarily create a painter path, which stores the current "drawing path" until the mouse is released, after which the path is actually applied to the QPixmap.
import sys
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QWidget, QApplication
from PyQt5.QtGui import QPixmap, QPainter, QPen, QColor, QPainterPath
class Menu(QWidget):
def __init__(self):
super().__init__()
self.drawingPath = None
self.image = QPixmap(r"testimage.jpg")
self.resize(self.image.width(), self.image.height())
self.show()
def paintEvent(self, event):
painter = QPainter(self)
painter.drawPixmap(self.rect(), self.image)
if self.drawingPath:
painter.setPen(QPen(QColor(121,252,50,50), 20, Qt.SolidLine))
painter.drawPath(self.drawingPath)
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
# start a new QPainterPath and *move* to the current point
self.drawingPath = QPainterPath()
self.drawingPath.moveTo(event.pos())
def mouseMoveEvent(self, event):
if event.buttons() and Qt.LeftButton and self.drawingPath:
# add a line to the painter path, without "removing" the pen
self.drawingPath.lineTo(event.pos())
self.update()
def mouseReleaseEvent(self, event):
if event.button() == Qt.LeftButton and self.drawingPath:
# draw the painter path to the pixmap
painter = QPainter(self.image)
painter.setPen(QPen(QColor(121,252,50,50), 20, Qt.SolidLine))
painter.drawPath(self.drawingPath)
self.drawingPath = None
self.update()
if __name__ == '__main__':
app = QApplication(sys.argv)
mainMenu = Menu()
sys.exit(app.exec_())
There is only one problem with this: drawing over a currently drawing path won't result in a more opaque color, meaning that, as long as the mouse button is pressed, no matter how many times you "paint" over the same point, the color will always be the same. To get the "more opaque color" effect, you'll need to paint over the intersection(s), starting a new path each time.
PS: I used a QWidget, as in some cases a QMainWindow can grab mouse movements starting from a click on a non interactive area (like in this case) and use it to move the interface.
import sys
from PyQt4 import QtGui,QtCore
x=-40
y=0
z=0
class MyView(QtGui.QGraphicsView):
def __init__(self):
QtGui.QGraphicsView.__init__(self)
self.scene=QtGui.QGraphicsScene(self)
self.item=[]
self.item2=[]
myLine=QtGui.QGraphicsLineItem(-10,20,20,20)
myLine.setCursor(QtCore.Qt.CrossCursor)
self.scene.addItem(myLine)
myLine.setFlag(QtGui.QGraphicsItem.ItemIsMovable)
self.setScene(self.scene)
def keyPressEvent(self,event):
global x
global y
global z
if event.key()==QtCore.Qt.Key_Left:
self.item.append(QtGui.QGraphicsRectItem(x,-10,40,40))
self.scene.addItem(self.item[y])
x=x+10
self.item[y].setFlag(QtGui.QGraphicsItem.ItemIsMovable)
y=y+1
elif event.key()==QtCore.Qt.Key_Right:
self.item2.append(QtGui.QGraphicsEllipseItem(x,-10,40,40))
self.scene.addItem(self.item2[z])
x=x+10
self.item2[z].setFlag(QtGui.QGraphicsItem.ItemIsMovable)
z=z+1
def mousePressEvent(self , event):
self._start=event.pos()
def mouseReleaseEvent(self,event):
start = QtCore.QPointF(self.mapToScene(self._start))
end = QtCore.QPointF(self.mapToScene(event.pos()))
self.scene.addItem(
QtGui.QGraphicsLineItem(QtCore.QLineF(start, end)))
if __name__=='__main__':
app=QtGui.QApplication(sys.argv)
view= MyView()
view.show()
sys.exit(app.exec_())
as you can see i am not able to move the rectangles and ellipses because of the mouse press event it is drawing a line
i want to be able to drag as well as draw a line
when pressed over graphics item it should drag other wise it should draw line
Use if itemAt(event.pos) is None: in mousePressEvent() to determine whether the click occurred over a QGraphicsItem. Set a state flag self.__dragging = the_item in that method, so that you can determine in the mouseMoveEvent() and the mouseReleaseEvent() whether you were dragging an object (self.__dragging is not None) or drawing a line (self.__dragging is None).
Is it possible to scale (or "zoom") a QTextEdit area? I believe I read that placing QTextEdit inside QLayout can allow for scaling of the QTextEdit area, though did not find how to implement it. Couple of options...
CTRL + Roll of Mouse Wheel
Running the code below, holding down the CTRL (control) key and rolling the mouse wheel, the event is captured and the text does scale (at least on Windows), however, as the text grows larger the wheel has to move further and further for very much effect, so one goal is to be able to modify that somehow, maybe some math to increase the increments to a greater degree on increases in the plus direction.
(The setReadOnly()'s below are because it would seem textEdit has to be ReadOnly(False) for the mouse event to be captured, then True to be able to scale during roll of the mouse wheel, so it is then set back to original state of False again on release of the CTRL key).
Toolbar Button Click
The other option is toolbar buttons for zoom in and out.
onZoomInClicked() is called.
Some current problems with the code below
1. It prints: QLayout: Attempting to add QLayout "" to MainWindow "", which already has a layout and I don't have my head wrapped around that yet.
2. QtGui.QTextEdit(self.formLayout) instead of (self) to place the textEdit area inside the layout produces TypeError: 'PySide.QtGui.QTextEdit' called with wrong argument types
3. wheelEvent() could use some way to modify event.delta() maybe?
4. The toolbar button (text only) will currently run its def when clicked, however it only contains a print statement.
from PySide import QtGui, QtCore
class MainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.formLayout = QtGui.QFormLayout(self)
self.textEdit = QtGui.QTextEdit(self)
self.toolBar = QtGui.QToolBar(self)
self.actionZoomIn = QtGui.QAction(self)
self.textEdit.setHtml('<font color=blue>Hello <b>world</b></font>')
self.setCentralWidget(self.textEdit)
self.addToolBar(self.toolBar)
self.toolBar.addAction(self.actionZoomIn)
self.actionZoomIn.setText('Zoom In')
self.actionZoomIn.connect(self.actionZoomIn,
QtCore.SIGNAL('triggered()'), self.onZoomInClicked)
def onZoomInClicked(self):
print "onZoomInClicked(self) needs code"
def wheelEvent(self, event):
print "wheelEvent() captured"
if (event.modifiers() & QtCore.Qt.ControlModifier):
self.textEdit.setReadOnly(True)
event.accept()
def keyReleaseEvent(self, evt):
if evt.key() == QtCore.Qt.Key_Control:
self.textEdit.setReadOnly(False)
if __name__ == '__main__':
app = QtGui.QApplication([])
frame = MainWindow()
frame.show()
app.exec_()
I've been grappling with this for days so would be great to have the more customizable QTextEdit scale/zoom working if it is even possible.
The two error messages can be expained as follows:
The QMainWidget automatically gets a layout, so the QFormLayout is redundant. If you want to add a layout, create a QWidget to be the central widget and make it the parent of the new layout. Other widgets can then be added to that new layout.
The parent of a QWidget subclass must itself be QWidget subclass, which QFormLayout isn't.
I've modified your example so that it does most of what you asked for. Note that QTextEdit.zoomIn and QTextEdit.zoomOut both take a range argument for controlling the degree of zoom.
from PySide import QtGui, QtCore
class MainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.textEdit = Editor(self)
self.toolBar = QtGui.QToolBar(self)
self.actionZoomIn = QtGui.QAction('Zoom In', self)
self.actionZoomOut = QtGui.QAction('Zoom Out', self)
self.textEdit.setHtml('<font color=blue>Hello <b>world</b></font>')
self.setCentralWidget(self.textEdit)
self.addToolBar(self.toolBar)
self.toolBar.addAction(self.actionZoomIn)
self.toolBar.addAction(self.actionZoomOut)
self.actionZoomIn.triggered.connect(self.onZoomInClicked)
self.actionZoomOut.triggered.connect(self.onZoomOutClicked)
def onZoomInClicked(self):
self.textEdit.zoom(+1)
def onZoomOutClicked(self):
self.textEdit.zoom(-1)
class Editor(QtGui.QTextEdit):
def __init__(self, parent=None):
super(Editor, self).__init__(parent)
def zoom(self, delta):
if delta < 0:
self.zoomOut(1)
elif delta > 0:
self.zoomIn(5)
def wheelEvent(self, event):
if (event.modifiers() & QtCore.Qt.ControlModifier):
self.zoom(event.delta())
else:
QtGui.QTextEdit.wheelEvent(self, event)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
window = MainWindow()
window.show()
app.exec_()