Article¶
The article “In COM we trust: Feasibility of USB-based event marking” is available with open access from Behavior Research Methods: doi:10.3758/s13428-021-01571-z
The article “In COM we trust: Feasibility of USB-based event marking” is available with open access from Behavior Research Methods: doi:10.3758/s13428-021-01571-z
The data is permanently archived on Zenodo and available via this DOI: doi:10.5281/zenodo.3838621
This Python script was used for data analysis.
1"""Analyze data for "In COM We Trust: Feasibility of USB-Based Event Marking".
2
3Expects the following data files in a /data directory nested in the
4working directory
5(available from https://doi.org/10.5281/zenodo.3838622):
6
7- NLS-lin-leo.txt.gz
8- NLS-lin-ljr.txt.gz
9- NLS-lin-lu3.txt.gz
10- NLS-lin-par.txt.gz
11- NLS-lin-t32.txt.gz
12- NLS-lin-tlc.txt.gz
13- NLS-lin-uno.txt.gz
14- NLS-win-leo.txt.gz
15- NLS-win-ljr.txt.gz
16- NLS-win-lu3.txt.gz
17- NLS-win-par.txt.gz
18- NLS-win-t32.txt.gz
19- NLS-win-tlc.txt.gz
20- NLS-win-uno.txt.gz
21
22Will produce the following outputs in a new directory "analysis_outputs":
23
24- figure2_raincloud.png
25- figure2_raincloud.pdf
26
27Python requirements:
28
29- Python >= 3.6
30- numpy >= 1.15
31- pandas >= 0.24
32- matplotlib >= 3.0.2
33- seaborn == 0.10.1
34- ptitprince == 0.2.4
35
36It is recommended to run this script in an isolated environment,
37for example by using conda (https://docs.conda.io/en/latest/miniconda.html).
38After making conda available on your command line, run the following
39commands to create a suitable environment for running `analysis.py`:
40
41- conda create -n usb_to_ttl Python=3.8 numpy pandas matplotlib --yes
42- conda activate usb_to_ttl
43- pip install seaborn==0.10.1 ptitprince==0.2.4
44
45And then run:
46
47- python analysis.py
48
49"""
50# %% Imports
51import os
52
53import matplotlib.pyplot as plt
54import numpy as np
55import pandas as pd
56import ptitprince
57import seaborn as sns
58
59# %% Function to read latency data
60
61
62def read_data(fnames):
63 """Read latency data.
64
65 Parameters
66 ----------
67 fnames : list of str
68 The files to read in.
69
70 Returns
71 -------
72 df : pandas.DataFrame
73 All files concatenated as a data frame.
74
75 """
76 dfs = []
77 for fname in fnames:
78
79 # Read labstreamer file, skipping header
80 df = pd.read_csv(fname, sep="\t", skiprows=13, na_values="NAN")
81
82 # Drop all rows where latency_ms was not measured
83 df = df[~df["latency_ms"].isna()]
84
85 # Add more information
86 opsys = fname.split("-")[1]
87 device = fname.split("-")[2][:3]
88 df["device"] = df["channel"].map({"Analog 0": "kbd", "Analog 2": device})
89 df["os"] = opsys
90 df["meas"] = f"{opsys}-{device}"
91
92 # Drop the event column because it's meaningless in this circumstance
93 df = df.drop(columns="event")
94
95 # Each group of unique time_s measurements gets an index
96 # Example: all rows where time_s is 5.4 have as entry under
97 # 'index' 10
98 _, _, idx = np.unique(df["time_s"], return_index=True, return_inverse=True)
99 df["idx"] = idx
100 df = df.sort_values(by="idx")
101
102 dfs.append(df)
103
104 # Concatenate all data frames and reset the pandas index
105 df = pd.concat(dfs, join="inner")
106 df = df.reset_index(drop=True)
107
108 # Map abbreviations to full names
109 if set(["lin", "win"]) == set(df["os"].unique()):
110 df["os"] = df["os"].map({"lin": "Linux", "win": "Windows"})
111
112 if set(["kbd", "par", "leo", "uno", "t32", "tlc", "lu3", "ljr"]) == set(
113 df["device"].unique()
114 ):
115 df["device"] = df["device"].map(
116 {
117 "kbd": "Teensy 3.2 Keyboard",
118 "par": "Parallel Port",
119 "leo": "Arduino Leonardo",
120 "uno": "Arduino Uno",
121 "t32": "Teensy 3.2",
122 "tlc": "Teensy LC",
123 "ljr": "LabJack U3 (writeRegister)",
124 "lu3": "LabJack U3 (setFIOState)",
125 }
126 )
127
128 return df
129
130
131# %% Function to preprocess data
132
133
134def preprocess_data(df, max_uncertainty, n_first_measurements):
135 """Preprocess the data.
136
137 Parameters
138 ----------
139 df : pandas.DataFrame
140 The data to be preprocessed.
141 max_uncertainty : float
142 Maximum acceptable network uncertainty in milliseconds.
143 All rows in `df` with higher uncertainty will be dropped.
144 n_first_measurements : int
145 The number of first valid measurements to select.
146
147 Returns
148 -------
149 df : pandas.DataFrame
150 The preprocessed input data, changed inplace.
151
152 """
153 # Drop rows where network uncertainty is too high
154 #
155 # When the LabStreamer detects a TTL trigger at timepoint tTTL,
156 # it calculates the delay between the last LSL trigger based on the timestamp tLSL
157 # (reported by the stimulus PC) and converts this timestamp to its own
158 # clock by subtracting the estimated clock offset Δt.
159 # The TTL latency is then calculated as tTTL-(tLSL-Δt).
160 # Measurement errors of the estimated clock offset are therefore reflected
161 # in the calculated trigger latency,
162 # but are not indicative of errors in the trigger latency tTTL
163 df = df[df["network_unc_ms"] <= max_uncertainty]
164
165 # Drop rows where the latency is erroneously low
166 #
167 # The LabStreamer has a sampling rate of 10kHz and detects events as the
168 # first sample above the threshold in the configured interval relative to
169 # the LSL trigger.
170 # Sometimes, the keyboard input received by the data collection script
171 # was duplicated after ~2ms and the (still active; as the outputs are set
172 # to high for 5ms) TTL trigger was attributed to the second event with a
173 # latency of less then one sample (0.1ms @ 10kHz).
174 df = df[~((df["latency_ms"] < 0.1) & (df["device"] != "Teensy 3.2 Keyboard"))]
175
176 # Drop measurement indices that do not consist of two rows
177 # (one for keyboard, and one for device)
178 # equivalent to the following line:
179 # df = df.groupby(["meas", "idx"]).filter(lambda x: x["latency_ms"].count() == 2)
180 df_idx_count = pd.DataFrame(df.groupby("meas")["idx"].value_counts())
181 df_idx_count = df_idx_count.rename({"idx": "idx_count"}, axis=1).reset_index()
182 df = df.merge(df_idx_count, on=["meas", "idx"])
183 df = df[df["idx_count"] == 2]
184 df = df.drop(columns=["idx_count"])
185
186 # Make a new continuous index based on clean data
187 tmps = list()
188 for meas in df["meas"].unique():
189 tmps.append(
190 (
191 df[df["meas"] == meas]
192 .groupby("idx")
193 .last()
194 .reset_index()
195 .reset_index()[["meas", "idx", "index"]]
196 )
197 )
198
199 tmp = pd.concat(tmps)
200 tmp = tmp.rename(columns={"index": "i"})
201
202 df = df.merge(tmp, on=["meas", "idx"], validate="many_to_one")
203
204 # Select only the n_first_measurements
205 df = df[df["i"] < n_first_measurements]
206
207 # Sort and return
208 df = df[["meas", "os", "device", "i", "latency_ms"]]
209 df = df.sort_values(by=["os", "device", "i"])
210 return df
211
212
213# %% Define constants for the analysis
214
215# Paths to the data files
216FNAMES = [
217 os.path.join("data", "NLS-win-leo.txt.gz"),
218 os.path.join("data", "NLS-win-ljr.txt.gz"),
219 os.path.join("data", "NLS-win-lu3.txt.gz"),
220 os.path.join("data", "NLS-win-par.txt.gz"),
221 os.path.join("data", "NLS-win-tlc.txt.gz"),
222 os.path.join("data", "NLS-win-t32.txt.gz"),
223 os.path.join("data", "NLS-win-uno.txt.gz"),
224 os.path.join("data", "NLS-lin-leo.txt.gz"),
225 os.path.join("data", "NLS-lin-ljr.txt.gz"),
226 os.path.join("data", "NLS-lin-lu3.txt.gz"),
227 os.path.join("data", "NLS-lin-par.txt.gz"),
228 os.path.join("data", "NLS-lin-tlc.txt.gz"),
229 os.path.join("data", "NLS-lin-t32.txt.gz"),
230 os.path.join("data", "NLS-lin-uno.txt.gz"),
231]
232
233# Parameters for `preprocess_data` (see docstring)
234MAX_UNCERTAINTY = 0.01
235N_FIRST_MEASUREMENTS = 2500
236
237# Settings for plotting
238LETTER_WIDTH_INCH = 8.5
239sns.set_style("whitegrid")
240
241# Create output directory for analysis
242OUTDIR = "analysis_outputs"
243os.makedirs(OUTDIR, exist_ok=True)
244
245# %% Read the data
246
247df = read_data(FNAMES)
248
249# Contingency table of measurements
250table_recorded = pd.crosstab(
251 index=pd.Categorical(df["os"], categories=sorted(df["os"].unique())),
252 columns=pd.Categorical(df["device"], categories=sorted(df["device"].unique())),
253 dropna=False,
254)
255
256# Drop keyboard, because it is measured for each device, ...
257# and thus contains all other columns
258table_recorded.index.name = "os"
259table_recorded.columns.name = "device"
260table_recorded.drop(columns=["Teensy 3.2 Keyboard"], inplace=True)
261
262min_measurements = np.min(table_recorded.min())
263max_measurements = np.max(table_recorded.max())
264
265print(f"Min and max number of measurements: {min_measurements}, {max_measurements}")
266
267print("\nTable of recorded measurements:")
268try:
269 display(table_recorded.head())
270except NameError:
271 print(table_recorded.head())
272
273# Contingency table of dropped measurements
274dropped = df[df["network_unc_ms"] > MAX_UNCERTAINTY]
275
276table_dropped = pd.crosstab(
277 index=pd.Categorical(dropped["os"], categories=sorted(df["os"].unique())),
278 columns=pd.Categorical(dropped["device"], categories=sorted(df["device"].unique())),
279 dropna=False,
280)
281
282# Again, drop keyboard because it is measured for each device, ...
283# and thus contains all other columns
284table_dropped.index.name = "os"
285table_dropped.columns.name = "device"
286table_dropped.drop(columns=["Teensy 3.2 Keyboard"], inplace=True)
287
288print(f"\nTable of dropped measurements (uncertainty > {MAX_UNCERTAINTY}):")
289try:
290 display(table_dropped.head())
291except NameError:
292 print(table_dropped.head())
293
294# %% Preprocess data
295
296df = preprocess_data(df, MAX_UNCERTAINTY, N_FIRST_MEASUREMENTS)
297
298# %% Produce data summary table
299
300
301def iqr(x):
302 """Calculate interquartile range."""
303 return np.subtract(*np.percentile(x, [75, 25]))
304
305
306table = (
307 df.groupby(["device", "os"])
308 .agg(
309 {
310 "latency_ms": [np.mean, np.std, np.median, iqr],
311 }
312 )
313 .reset_index()
314)
315
316
317# %% Map multiindex of table to single index
318if isinstance(table.columns, pd.core.indexes.multi.MultiIndex):
319 cols = []
320 for col in table.columns:
321 if len(col[-1]) > 0:
322 cols.append("-".join(col))
323 else:
324 cols.append(col[0])
325
326 cols = [s.replace("_", "-") for s in cols]
327 table.columns = cols
328
329
330# %% Show full table
331# For convenience, do this in Ipython, which allows copying the table
332# to then paste it into Google Sheets and importing it from there into
333# Google Docs.
334# As a final step, the imported table can be formatted in APA style,
335# see this video for help: https://www.youtube.com/watch?v=f7WomKsmeuI
336copy = table.copy()
337copy = copy[
338 [
339 "os",
340 "device",
341 "latency-ms-mean",
342 "latency-ms-std",
343 "latency-ms-median",
344 "latency-ms-iqr",
345 ]
346]
347copy.columns = ["Operating System", "Device", "Mean", "SD", "Median", "IQR"]
348
349# Round, drop unneeded columns from copy and sort
350copy = copy.round(3)
351copy = copy[["Operating System", "Device", "Mean", "SD", "Median", "IQR"]]
352copy = copy.sort_values(by=["Operating System", "Mean"])
353
354try:
355 display(copy.style.hide_index())
356except NameError:
357 print("'display' function only available in Ipython ... falling back to printing.")
358 print(table.round(3).to_string(index=False, justify="center"))
359
360
361# %% Settings for plotting
362
363# Remove keyboard, we are not plotting it, and not using it for summary stats
364df = df[df["device"] != "Teensy 3.2 Keyboard"]
365print("\nDropped 'Teensy 3.2 Keyboard' from data.")
366
367# Set plotting order
368order = table.groupby("device").min().sort_values("latency-ms-mean").index.to_list()
369order.remove("Teensy 3.2 Keyboard")
370print(f"\nplotting in order: {order}")
371
372# y-axis label settings
373ylabel_map = dict(zip(order, ["\n".join(device.split(" ")) for device in order]))
374ylabels = [ylabel_map[i] for i in order]
375
376
377# %% Plot
378
379with sns.plotting_context("paper", font_scale=1.3):
380 fig, ax = plt.subplots(figsize=(LETTER_WIDTH_INCH, 5))
381
382 palette = "colorblind"
383
384 ptitprince.half_violinplot(
385 x="device",
386 order=order,
387 y="latency_ms",
388 hue="os",
389 hue_order=["Linux", "Windows"],
390 data=df,
391 ax=ax,
392 palette=palette,
393 split=True,
394 inner=None,
395 offset=0.3,
396 )
397
398 for i in ax.collections:
399 i.set_alpha(0.65)
400
401 sns.stripplot(
402 x="device",
403 order=order,
404 y="latency_ms",
405 hue="os",
406 hue_order=["Linux", "Windows"],
407 data=df,
408 ax=ax,
409 palette=palette,
410 alpha=0.1,
411 size=1,
412 zorder=0,
413 jitter=1,
414 dodge=True,
415 edgecolor=None,
416 )
417
418 sns.boxplot(
419 x="device",
420 order=order,
421 y="latency_ms",
422 hue="os",
423 hue_order=["Linux", "Windows"],
424 data=df,
425 ax=ax,
426 palette=palette,
427 color=palette,
428 width=0.15,
429 zorder=10,
430 dodge=True,
431 showcaps=True,
432 boxprops={"zorder": 10},
433 showfliers=True,
434 whiskerprops={"linewidth": 2, "zorder": 10},
435 saturation=0.75,
436 )
437
438 xlim = ax.get_xlim()
439 ax.set_xlim((xlim[0] + xlim[0] * 0.5, xlim[1]))
440
441 ax.set_xticklabels(ylabels)
442 ax.set_xlabel("Device", labelpad=25)
443
444 # Limit the extent of the y axis, which makes data non-visible, ...
445 # so add a big outlier marker (red star) for that to mention in the caption
446 upper_ylim = 2.1
447 ax.set_ylim((0.0, upper_ylim))
448 text_pos = 0.5
449 outlier_text_obj = ax.text(
450 x=text_pos,
451 y=1.0,
452 s="*",
453 color="red",
454 transform=ax.transAxes,
455 ha="center",
456 fontsize=20,
457 )
458
459 # Sanity check we did not cut any other outliers
460 outliers = df[df["latency_ms"] >= upper_ylim].reset_index(drop=True)
461 assert outliers["meas"].nunique() == 1
462
463 # Sanity check we make the star in the correct position (in the middle)
464 assert len(order) == 7
465 assert order[3] == "LabJack U3 (writeRegister)"
466 assert text_pos == 0.5
467
468 # Print short report
469 print(
470 f"{outliers.shape[0]} outliers not shown for {outliers['meas'][0]}. "
471 f"Ranging from {outliers['latency_ms'].min().round(1)} to "
472 f"{outliers['latency_ms'].max().round(1)} ms "
473 f"(mean: {outliers['latency_ms'].mean().round(1)} ms)"
474 )
475
476 ax.set_ylabel("Latency (ms)")
477 ax.grid(b=True, which="major", axis="y")
478
479 # Get the handles and labels. For this example it'll be 2 tuples
480 # of length 4 each.
481 handles, labels = ax.get_legend_handles_labels()
482
483 # When creating the legend, only use the first two elements
484 # to effectively remove the last two.
485 lh = plt.legend(handles[0:2], labels[0:2], loc="upper left")
486 lh.set_title("Operating System")
487
488 sns.despine()
489
490 for ext in ["png", "pdf"]:
491 fname = f"figure2_raincloud.{ext}"
492 fname = os.path.join(OUTDIR, fname)
493 fig.tight_layout()
494 dpi = 600 if ext == "png" else None
495 plt.savefig(fname, dpi=dpi, bbox_extra_artists=(outlier_text_obj,))
496
497# %% Calculate summary statistics between OS
498
499# Drop the LabJack U3 from this comparison, as it's an outlier
500df = df[~df["device"].str.startswith("LabJack")]
501
502# Guarantee that Linux is listed before Windows, to later have the appropriate diff
503df = df.sort_values(by=["os", "device", "i"])
504
505df_os = pd.pivot_table(
506 df, values="latency_ms", index="os", aggfunc=[np.mean, np.std, np.median, iqr]
507)
508df_os.loc["Windows - Linux", :] = np.diff(df_os.to_numpy(), axis=0)
509
510try:
511 display(df_os.round(3))
512except NameError:
513 print(df_os.round(3))
514
515# %% Where is the OS effect strongest?
516
517# exclude LabJack U3 as an outlier
518# exclude parallel port, gets mentioned in the text anyhow
519print("Effect of OS (Windows - Linux) on latency in ms\n")
520print(
521 table[
522 ~table["device"].isin(
523 [
524 "Teensy 3.2 Keyboard",
525 "Parallel Port",
526 "LabJack U3 (writeRegister)",
527 "LabJack U3 (setFIOState)",
528 ]
529 )
530 ]
531 .groupby("device")["latency-ms-mean"]
532 .agg(np.diff)
533 .sort_values()
534)
This firmware written in C was running on each device that was tested (except the parallel port).
1/*
2Device firmware for a USB trigger box (Appelhoff & Stenner, 2021).
3
4MIT License
5
6Copyright 2021 Stefan Appelhoff, Tristan Stenner
7
8*/
9
10// Teensys can send emulated key presses to the host computer.
11// Enabling this option "presses" the Enter key every 90ms when pin 10 is
12// connected to ground. Enabled by default on Teensys when compiling with
13// keyboard support unless ENABLE_KEYBOARD=0 is defined
14#ifndef ENABLE_KEYBOARD
15#if defined(CORE_TEENSY_KEYBOARD) || defined(USB_SERIAL_HID)
16#define ENABLE_KEYBOARD 1
17#else
18#define ENABLE_KEYBOARD 0
19#endif
20#endif
21
22// send special pin patterns to troubleshoot connection issues after receiving
23// either 254 or 255 as value
24#define ENABLE_PIN_TESTS 0
25
26// function prototype; sets output pins in a loop
27void setOutputsLoop(uint8_t outChar);
28
29// device specific values for outputPins and setOutputs;
30#if defined(ARDUINO_AVR_MICRO)
31constexpr int outputPins[] = {2,3,4,5,6,7,8,9};
32auto setOutputs = setOutputsLoop;
33
34#elif defined(ARDUINO_AVR_LEONARDO) || defined(ARDUINO_AVR_UNO)
35constexpr int outputPins[] = {0,1,2,3,4,5,6,7};
36auto setOutputs = setOutputsLoop;
37
38#elif defined(CORE_TEENSY)
39constexpr int outputPins[] = {0,1,2,3,4,5,6,7};
40
41void setOutputs(uint8_t outChar) {
42 for(int i=0; i<8; ++i) digitalWriteFast(i, outChar&(1<<i));
43}
44
45#else
46#warning "Unknown board"
47// Fall back to default values
48constexpr int outputPins[] = {0,1,2,3,4,5,6,7};
49auto setOutputs = setOutputsLoop;
50#endif
51
52void setOutputsLoop(uint8_t outChar)
53{
54 for(int i=0;i<8;i++) digitalWrite(outputPins[i], outChar&(1<<i));
55}
56
57void clearOutputs() {
58 setOutputs(0);
59}
60
61void setup() {
62 Serial.begin(115200);
63 for(auto pin: outputPins) pinMode(pin, OUTPUT);
64
65 // Connect pin 10 to GND to enable keyboard emulation
66 pinMode(10, INPUT_PULLUP);
67}
68
69void loop() {
70#if ENABLE_KEYBOARD
71 // Helper variable to keep the time of the last synthetic keyboard event
72 static elapsedMillis lastKbdEvent = 0;
73
74 if(!digitalRead(10) && lastKbdEvent >= 90) {
75 digitalWriteFast(outputPins[1], HIGH);
76 Keyboard.press(KEY_ENTER);
77 delay(1);
78 Keyboard.release(KEY_ENTER);
79 digitalWriteFast(outputPins[1], LOW);
80 lastKbdEvent = 0;
81 }
82#warning "Enabling keyboard"
83#else
84#warning "Keyboard disabled"
85#endif
86
87 if(!Serial.available()) return;
88 // read in a single value
89 uint8_t inChar = Serial.read();
90
91 if(ENABLE_PIN_TESTS && inChar==255)
92 for(inChar=1; inChar<255; inChar++){
93 setOutputs(inChar);
94 delay(100);
95 clearOutputs();
96 delay(100);
97 Serial.println(inChar);
98 }
99 else if(ENABLE_PIN_TESTS && inChar==254)
100 for(int inChar=1;inChar<256;inChar = inChar << 1){
101 setOutputs(inChar);
102 delay(100);
103 clearOutputs();
104 delay(100);
105 }
106 else {
107 setOutputs(inChar);
108 // also blink LED for visual feedback
109 digitalWrite(13, HIGH);
110 delay(5);
111 clearOutputs();
112 digitalWrite(13, LOW);
113 }
114
115 // Optional: write back the received value
116 // Serial.println((int) inChar);
117#if defined(CORE_TEENSY)
118 // Serial.send_now();
119#endif
120}
This Python script was running on the host computer.
1#!/usr/bin/env python
2"""Measure TTL trigger sending latency of several devices.
3
4Required packages:
5
6- numpy (https://pypi.org/project/numpy/)
7- psychtoolbox (https://pypi.org/project/psychtoolbox/)
8- pylsl (https://pypi.org/project/pylsl/)
9- pyparallel (https://pypi.org/project/pyparallel/)
10
11MIT License
12
13Copyright 2021 Stefan Appelhoff, Tristan Stenner
14
15"""
16
17import os
18import sys
19import time
20
21import numpy as np
22import pylsl
23import psychtoolbox as ptb
24
25port = sys.argv[1]
26
27# Define the "send_trigger" function depending on which device we are testing
28if port == 'parport':
29 if os.name == 'nt':
30 from psychopy.parallel import ParallelPort as PP
31 else:
32 from parallel import Parallel as PP
33 parport = PP()
34
35 def send_trigger():
36 parport.setData(1)
37 time.sleep(.005)
38 parport.setData(0)
39
40elif port == 'labjack':
41 import u3
42 lj = u3.U3()
43
44 def send_trigger():
45 # Using the U3, we can either use the writeRegister method, ...
46 # or the setFIOState method.
47 lj.writeRegister(6700, 0xFF01)
48 # lj.setFIOState(0, 1)
49 time.sleep(.01)
50 lj.writeRegister(6700, 0xFF00)
51 # lj.setFIOState(0, 0)
52
53else:
54 # serial port
55 ser, _ = ptb.IOPort('OpenSerialPort', port,
56 'BaudRate=115200 FlowControl=None ReceiveTimeout=0.05')
57
58 def send_trigger():
59 ptb.IOPort('Write', ser, '\x01')
60 # "Sleep" is handled by firmware on device
61 # Flush incoming data
62 _, _, err = ptb.IOPort('Read', ser, 1, 3)
63
64# Create an LSL outlet
65outlet = pylsl.StreamOutlet(
66 pylsl.StreamInfo(name='latencytest',
67 type='marker',
68 channel_count=1,
69 nominal_srate=pylsl.IRREGULAR_RATE,
70 channel_format=pylsl.cf_string))
71
72# Get the psychHID index for the emulated keyboard
73try:
74 kbdidx = next(
75 (dev['index'] for dev in ptb.PsychHID('devices', 4)
76 if dev['product'].startswith('Teensyduino'))
77 )
78except StopIteration:
79 kbdidx = 0
80
81# Initialize the Psychtoolbox keyboard queue
82keycode = 12 if os.name == 'nt' else 36
83keys = np.zeros(256)
84keys[keycode] = 1
85ptb.PsychHID('KbQueueCreate', kbdidx, keys)
86ptb.PsychHID('KbQueueStart', kbdidx)
87
88# Call both functions once to make sure the C libraries are loaded
89pylsl.local_clock()
90ptb.GetSecs()
91
92# Make sure LSL and PTB use the same underlying clock
93timediff = abs(
94 pylsl.local_clock()-ptb.GetSecs()+ptb.GetSecs()-pylsl.local_clock()
95)
96assert(timediff < 1e-3)
97
98while True:
99 # Wait for the Teensy to simulate a keypress
100 ptb.PsychHID('KbQueueFlush', kbdidx, 3)
101 event, n = ptb.PsychHID('KbQueueGetEvent', kbdidx, 1)
102 t0 = pylsl.local_clock()
103 # Don't send a trigger when waiting for the keypress timed out or the event
104 # signified the key release (as opposed to initially pressing it down)
105 if len(event) == 0 or event['Pressed'] == 0:
106 continue
107
108 send_trigger() # Send a trigger via the configured interface
109
110 # Send an LSL event with the previously measured time to the LabStreamer
111 outlet.do_push_sample(
112 outlet.obj,
113 outlet.sample_type(b'1'),
114 pylsl.pylsl.c_double(t0),
115 pylsl.pylsl.c_int(True))