How to use the psychopy.visual function in psychopy

To help you get started, we’ve selected a few psychopy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github smathot / OpenSesame / openexp / _canvas / _richtext / psycho.py View on Github external
def prepare(self):

		im = self._to_pil()
		x, y = self.to_xy(self.x, self.y)
		if not self.center:
			x += im.width // 2
			y -= im.height // 2
		self._stim = visual.SimpleImageStim(self.win, im, pos=(x, y))
github lupyanlab / lab-computer / psychopy-tests / test_psychopy_sound_simple.py View on Github external
from psychopy import visual, core, event, sound

win = visual.Window()
text = visual.TextStim(win, text='Hello world!')
text.draw()
win.flip()
core.wait(1.0)

text.setText('Press X to play sound')
text.draw()
win.flip()
event.waitKeys(keyList=['x'])
snd = sound.Sound('sounds/telephone-ring.wav')
snd.play()
core.wait(1.0)
github psychopy / psychopy / psychopy / demos / coder / hardware / joystick_absXY.py View on Github external
#!/usr/bin/env python
from psychopy import visual, core, event
import pygame.joystick
#see http://www.pygame.org/docs/ref/joystick.html#pygame.joystick.Joystick

#create a window to draw in
myWin = visual.Window((800.0,800.0), allowGUI=False)

pygame.joystick.init()#initialise the module

if event.joystick.get_count()>0:
    myJoystick = pygame.joystick.Joystick(0)
    myJoystick.init()#initialise the device
    print 'found ', myJoystick.get_name(), ' with:'
    print '...', myJoystick.get_numbuttons(), ' buttons'
    print '...', myJoystick.get_numhats(), ' hats'
    print '...', myJoystick.get_numaxes(), ' analogue axes'
else:
    print "You don't have a joystick connected!?"
    myWin.close()
    core.quit()

#INITIALISE SOME STIMULI
github isolver / ioHub / iohub / examples / scriptOnly / run.py View on Github external
psychoStim=OrderedDict()

coord_type = display.getCoordinateType()

psychoStim['grating'] = visual.PatchStim(psychoWindow, mask="circle", 
                                        size=75,pos=[-100,0], sf=.075,
                                        units=coord_type)

psychoStim['fixation'] =visual.PatchStim(psychoWindow, size=25, 
                                        pos=[0,0], sf=0,  
                                        color=[-1,-1,-1],
                                        colorSpace='rgb',
                                        units=coord_type)
                                        
psychoStim['mouseDot'] =visual.GratingStim(psychoWindow,tex=None,
                                            mask="gauss", 
                                            pos=mouse.getPosition(),
                                            size=(50,50),color='purple',
                                            units=coord_type)


# Clear all events from the global event buffer, 
# and from the device level event buffers.
io.clearEvents('all')

# Draw the dtim and flip the screen to the updated display graphics
[psychoStim[stimName].draw() for stimName in psychoStim]
psychoWindow.flip()
first_flip_time=Computer.currentSec()

# Get the stimulus display inxed being used, so Mouse events can be filtered by
github esdalmaijer / PyGaze / pygaze / libscreen.py View on Github external
arguments
	pos		-- a (x,y) tuple that makes sense to PsychoPy (i.e. (0,0) is
			   display center; bottom left is (-,-) and top right is
			   (+,+))
	
	keyword arguments
	dispsize	-- a (width, height) tuple for the display resolution or None
			   for autodetecting the size of current active window
			   (default = None)
	
	returns
	pos		-- a (x,y) position tuple, assuming (0,0) is top left
	"""

	if dispsize == None:
		dispsize = tuple(psychopy.visual.openWindows[SCREENNR].size)

	x = pos[0] + dispsize[0]/2
	y = (pos[1] * -1) + dispsize[1]/2

	return (x,y)

github psychopy / psychopy / psychopy / demos / coder / stimuli / screensAndWindows.py View on Github external
"""

from __future__ import division

from psychopy import visual, event
from numpy import sin, pi  # numeric python

if True:  # use two positions on one screen
    winL = visual.Window(size=[400, 300], pos=[100, 200], screen=0,
                         allowGUI=False)  # , fullscr=True)
    winR = visual.Window(size=[400, 300], pos=[400, 200], screen=0,
                         allowGUI=False)  # , fullscr=True)  # same screen diff place
else:
    winL = visual.Window(size=[400, 300], pos=[100, 200], screen=0,
                         allowGUI=False, fullscr=False)
    winR = visual.Window(size=[400, 300], pos=[100, 200], screen=1,
                         allowGUI=False, fullscr=False)  # same place diff screen

# create some stimuli
# NB. if the windows have the same characteristics then

# left screen
contextPatchL = visual.GratingStim(winL, tex='sin', mask='circle',
    size=1.0, sf=3.0, texRes=512)
targetStimL = visual.GratingStim(winL, ori=20, tex='sin', mask='circle',
    size=0.4, sf=3.0, texRes=512, autoLog=False)

# right screen
contextPatchR = visual.GratingStim(winR, tex='sin', mask='circle',
    size=1.0, sf=3.0, texRes=512)
targetStimR =visual.GratingStim(winR, ori=20, tex='sin', mask='circle',
    size=0.4, sf=3.0, texRes=512, autoLog=False)
github psychopy / psychopy / psychopy / demos / coder / hardware / RiftMinimal.py View on Github external
# Minimal Oculus Rift head-mounted display example. Press the 'q' key or use
# the application GUI to exit. Requires PsychXR to be installed.
#
# This file is public domain.
#
from psychopy import visual, event, core  # visual imported, even if not used!

# Create a VR session, treat the returned object just like a regular window.
#
hmd = visual.Rift()

# loop until the user quits the app through the GUI menu
stopApp = False
while not stopApp:
    for i in ('left', 'right'):
        hmd.setBuffer(i)  # select the eye buffer to draw to

        # Setup the viewing parameters for the current buffer, this needs to be
        # called every time the buffer changes.
        #
        # For standard PsychoPy stimuli (e.g. GratingStim, ImageStim, etc.) you
        # should use 'setDefaultView' with 'mono=True' when creating a
        # visual.Rift instance. This configures the headset to properly render
        # 2D stimuli, treating the HMD as a monitor.
        #
        hmd.setDefaultView()
github psychopy / psychopy / psychopy / demos / coder / stimuli / elementArrays.py View on Github external
from psychopy import visual, core, event
from psychopy.tools.coordinatetools import cart2pol

# We only need these two commands from numpy.random:
from numpy.random import random, shuffle

win = visual.Window([1024, 768], units='pix', monitor='testMonitor')

N = 500
fieldSize = 500
elemSize = 40
coherence = 0.5

# build a standard (but dynamic!) global form stimulus
xys = random([N, 2]) * fieldSize - fieldSize / 2.0  # numpy vector
globForm = visual.ElementArrayStim(win,
    nElements=N, sizes=elemSize, sfs=3,
    xys=xys, colors=[180, 1, 1], colorSpace='hsv')

# calculate the orientations for global form stimulus
def makeCoherentOris(XYs, coherence, formAngle):
    # length along the first dimension:
    nNew = XYs.shape[0]

    # random orientations:
    newOris = random(nNew) * 180

    # select some elements to be coherent
    possibleIndices = list(range(nNew))  # create an array of indices
    shuffle(possibleIndices)  # shuffle it 'in-place' (no new array)
    coherentIndices = possibleIndices[0: int(nNew * coherence)]
github isolver / ioHub / examples / simple / run.py View on Github external
# get the index of the screen to create the PsychoPy window in.
        screen_index=display.getStimulusScreenIndex()

        # Create a psychopy window, full screen resolution, full screen mode, pix units, with no boarder, using the monitor
        # profile name 'test monitor, which is created on the fly right now by the script
        psychoWindow = visual.Window(screen_resolution, monitor="testMonitor", units=coord_type, fullscr=True, allowGUI=False,screen=screen_index)

        # Hide the 'system mouse cursor' so we can display a cool gaussian mask for a mouse cursor.
        mouse.setSystemCursorVisibility(False)

        # Create an ordered dictionary of psychopy stimuli. An ordered dictionary is one that returns keys in the order
        # they are added, you you can use it to reference stim by a name or by 'zorder'
        psychoStim=OrderedDict()
        psychoStim['grating'] = visual.PatchStim(psychoWindow, mask="circle", size=75,pos=[-100,0], sf=.075)
        psychoStim['fixation'] =visual.PatchStim(psychoWindow, size=25, pos=[0,0], sf=0,  color=[-1,-1,-1], colorSpace='rgb')
        psychoStim['mouseDot'] =visual.GratingStim(psychoWindow,tex=None, mask="gauss", pos=currentPosition,size=(50,50),color='purple')

        # Clear all events from the global and device level event buffers.
        self.hub.clearEvents('all')

        QUIT_EXP=False
        # Loop until we get a keyboard event with the space, Enter (Return), or Escape key is pressed.
        while QUIT_EXP is False:

            # for each loop, update the grating phase
            psychoStim['grating'].setPhase(0.05, '+')#advance phase by 0.05 of a cycle

            # and update the mouse contingent gaussian based on the current mouse location
            currentPosition=mouse.getPosition()
            psychoStim['mouseDot'].setPos(currentPosition)
github psychopy / psychopy / psychopy / demos / coder / stimuli / variousVisualStims.py View on Github external
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
Demo of several visual stimuli depending on the mouse position.
"""

from __future__ import division

from psychopy import visual, event, core
import numpy

win = visual.Window([600, 600], color='black')

gabor = visual.GratingStim(win, mask='gauss', pos=[-0.5, -0.5], color=[0, 0, 1], sf=5, ori=30)
movie = visual.MovieStim3(win, 'jwpIntro.mov', units='pix', pos=[100, 100], size=[160, 120])
txt = u"unicode (eg \u03A8 \u040A \u03A3)"
text = visual.TextStim(win, pos=[0.5, -0.5], text=txt, font=['Times New Roman'])
faceRGB = visual.ImageStim(win, image='face.jpg', pos=[-0.5, 0.5])

mouse = event.Mouse()
instr = visual.TextStim(win, text='move the mouse around')

t = 0.0
while not event.getKeys() and not mouse.getPressed()[0]:
    # get mouse events
    mouse_dX, mouse_dY = mouse.getRel()

    gabor.ori -= mouse_dY * 10
    text.ori += mouse_dY * 10
    faceRGB.ori += mouse_dY * 10