-
Notifications
You must be signed in to change notification settings - Fork 59
/
pylsl.py
1820 lines (1521 loc) · 66.9 KB
/
pylsl.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""Python API for the lab streaming layer.
The lab streaming layer provides a set of functions to make instrument data
accessible in real time within a lab network. From there, streams can be
picked up by recording programs, viewing programs or custom experiment
applications that access data streams in real time.
The API covers two areas:
- The "push API" allows to create stream outlets and to push data (regular
or irregular measurement time series, event data, coded audio/video frames,
etc.) into them.
- The "pull API" allows to create stream inlets and read time-synched
experiment data from them (for recording, viewing or experiment control).
pylsl has been tested with Python 2.7 and 3.4.
"""
import os
import platform
import struct
from ctypes import (
CDLL,
POINTER,
byref,
c_byte,
c_char_p,
c_double,
c_float,
c_int,
c_long,
c_longlong,
c_short,
c_size_t,
c_void_p,
cast,
util,
)
__all__ = [
"IRREGULAR_RATE",
"DEDUCED_TIMESTAMP",
"FOREVER",
"cf_float32",
"cf_double64",
"cf_string",
"cf_int32",
"cf_int16",
"cf_int8",
"cf_int64",
"cf_undefined",
"protocol_version",
"library_version",
"library_info",
"local_clock",
"StreamInfo",
"StreamOutlet",
"resolve_streams",
"resolve_byprop",
"resolve_bypred",
"StreamInlet",
"XMLElement",
"ContinuousResolver",
"TimeoutError",
"LostError",
"InvalidArgumentError",
"InternalError",
"stream_info",
"stream_outlet",
"stream_inlet",
"xml_element",
"timeout_error",
"lost_error",
"vectorf",
"vectord",
"vectorl",
"vectori",
"vectors",
"vectorc",
"vectorstr",
"resolve_stream",
]
# =================
# === Constants ===
# =================
# Constant to indicate that a stream has variable sampling rate.
IRREGULAR_RATE = 0.0
# Constant to indicate that a sample has the next successive time stamp
# according to the stream's defined sampling rate. Optional optimization to
# transmit less data per sample.
DEDUCED_TIMESTAMP = -1.0
# A very large time value (ca. 1 year); can be used in timeouts.
FOREVER = 32000000.0
# Value formats supported by LSL. LSL data streams are sequences of samples,
# each of which is a same-size vector of values with one of the below types.
# For up to 24-bit precision measurements in the appropriate physical unit (
# e.g., microvolts). Integers from -16777216 to 16777216 are represented
# accurately.
cf_float32 = 1
# For universal numeric data as long as permitted by network and disk budget.
# The largest representable integer is 53-bit.
cf_double64 = 2
# For variable-length ASCII strings or data blobs, such as video frames,
# complex event descriptions, etc.
cf_string = 3
# For high-rate digitized formats that require 32-bit precision. Depends
# critically on meta-data to represent meaningful units. Useful for
# application event codes or other coded data.
cf_int32 = 4
# For very high bandwidth signals or CD quality audio (for professional audio
# float is recommended).
cf_int16 = 5
# For binary signals or other coded data.
cf_int8 = 6
# For now only for future compatibility. Support for this type is not
# available on all languages and platforms.
cf_int64 = 7
# Can not be transmitted.
cf_undefined = 0
# Post processing flags
proc_none = 0 # No automatic post-processing; return the ground-truth time stamps for manual post-processing.
proc_clocksync = 1 # Perform automatic clock synchronization; equivalent to manually adding the time_correction().
proc_dejitter = 2 # Remove jitter from time stamps using a smoothing algorithm to the received time stamps.
proc_monotonize = 4 # Force the time-stamps to be monotonically ascending. Only makes sense if timestamps are dejittered.
proc_threadsafe = 8 # Post-processing is thread-safe (same inlet can be read from by multiple threads).
proc_ALL = (
proc_none | proc_clocksync | proc_dejitter | proc_monotonize | proc_threadsafe
)
# ==========================================================
# === Free Functions provided by the lab streaming layer ===
# ==========================================================
def protocol_version():
"""Protocol version.
The major version is protocol_version() / 100;
The minor version is protocol_version() % 100;
Clients with different minor versions are protocol-compatible with each
other while clients with different major versions will refuse to work
together.
"""
return lib.lsl_protocol_version()
def library_version():
"""Version of the underlying liblsl library.
The major version is library_version() / 100;
The minor version is library_version() % 100;
"""
return lib.lsl_library_version()
def library_info():
"""Get a string containing library information. The format of the string shouldn't be used
for anything important except giving a a debugging person a good idea which exact library
version is used."""
return lib.lsl_library_info().decode("utf-8")
def local_clock():
"""Obtain a local system time stamp in seconds.
The resolution is better than a millisecond. This reading can be used to
assign time stamps to samples as they are being acquired.
If the "age" of a sample is known at a particular time (e.g., from USB
transmission delays), it can be used as an offset to lsl_local_clock() to
obtain a better estimate of when a sample was actually captured. See
StreamOutlet.push_sample() for a use case.
"""
return lib.lsl_local_clock()
# ==========================
# === Stream Declaration ===
# ==========================
class StreamInfo:
"""The StreamInfo object stores the declaration of a data stream.
Represents the following information:
a) stream data format (#channels, channel format)
b) core information (stream name, content type, sampling rate)
c) optional meta-data about the stream content (channel labels,
measurement units, etc.)
Whenever a program wants to provide a new stream on the lab network it will
typically first create a StreamInfo to describe its properties and then
construct a StreamOutlet with it to create the stream on the network.
Recipients who discover the outlet can query the StreamInfo; it is also
written to disk when recording the stream (playing a similar role as a file
header).
"""
def __init__(
self,
name="untitled",
type="",
channel_count=1,
nominal_srate=IRREGULAR_RATE,
channel_format=cf_float32,
source_id="",
handle=None,
):
"""Construct a new StreamInfo object.
Core stream information is specified here. Any remaining meta-data can
be added later.
Keyword arguments:
name -- Name of the stream. Describes the device (or product series)
that this stream makes available (for use by programs,
experimenters or data analysts). Cannot be empty.
type -- Content type of the stream. By convention LSL uses the content
types defined in the XDF file format specification where
applicable (https://github.com/sccn/xdf). The content type is the
preferred way to find streams (as opposed to searching by name).
channel_count -- Number of channels per sample. This stays constant for
the lifetime of the stream. (default 1)
nominal_srate -- The sampling rate (in Hz) as advertised by the data
source, regular (otherwise set to IRREGULAR_RATE).
(default IRREGULAR_RATE)
channel_format -- Format/type of each channel. If your channels have
different formats, consider supplying multiple
streams or use the largest type that can hold
them all (such as cf_double64). It is also allowed
to pass this as a string, without the cf_ prefix,
e.g., 'float32' (default cf_float32)
source_id -- Unique identifier of the device or source of the data, if
available (such as the serial number). This is critical
for system robustness since it allows recipients to
recover from failure even after the serving app, device or
computer crashes (just by finding a stream with the same
source id on the network again). Therefore, it is highly
recommended to always try to provide whatever information
can uniquely identify the data source itself.
(default '')
"""
if handle is not None:
self.obj = c_void_p(handle)
else:
if isinstance(channel_format, str):
channel_format = string2fmt[channel_format]
self.obj = lib.lsl_create_streaminfo(
c_char_p(str.encode(name)),
c_char_p(str.encode(type)),
channel_count,
c_double(nominal_srate),
channel_format,
c_char_p(str.encode(source_id)),
)
self.obj = c_void_p(self.obj)
if not self.obj:
raise RuntimeError("could not create stream description " "object.")
def __del__(self):
"""Destroy a previously created StreamInfo object."""
# noinspection PyBroadException
try:
lib.lsl_destroy_streaminfo(self.obj)
except:
pass
# === Core Information (assigned at construction) ===
def name(self):
"""Name of the stream.
This is a human-readable name. For streams offered by device modules,
it refers to the type of device or product series that is generating
the data of the stream. If the source is an application, the name may
be a more generic or specific identifier. Multiple streams with the
same name can coexist, though potentially at the cost of ambiguity (for
the recording app or experimenter).
"""
return lib.lsl_get_name(self.obj).decode("utf-8")
def type(self):
"""Content type of the stream.
The content type is a short string such as "EEG", "Gaze" which
describes the content carried by the channel (if known). If a stream
contains mixed content this value need not be assigned but may instead
be stored in the description of channel types. To be useful to
applications and automated processing systems using the recommended
content types is preferred.
"""
return lib.lsl_get_type(self.obj).decode("utf-8")
def channel_count(self):
"""Number of channels of the stream.
A stream has at least one channel; the channel count stays constant for
all samples.
"""
return lib.lsl_get_channel_count(self.obj)
def nominal_srate(self):
"""Sampling rate of the stream, according to the source (in Hz).
If a stream is irregularly sampled, this should be set to
IRREGULAR_RATE.
Note that no data will be lost even if this sampling rate is incorrect
or if a device has temporary hiccups, since all samples will be
transmitted anyway (except for those dropped by the device itself).
However, when the recording is imported into an application, a good
data importer may correct such errors more accurately if the advertised
sampling rate was close to the specs of the device.
"""
return lib.lsl_get_nominal_srate(self.obj)
def channel_format(self):
"""Channel format of the stream.
All channels in a stream have the same format. However, a device might
offer multiple time-synched streams each with its own format.
"""
return lib.lsl_get_channel_format(self.obj)
def source_id(self):
"""Unique identifier of the stream's source, if available.
The unique source (or device) identifier is an optional piece of
information that, if available, allows that endpoints (such as the
recording program) can re-acquire a stream automatically once it is
back online.
"""
return lib.lsl_get_source_id(self.obj).decode("utf-8")
# === Hosting Information (assigned when bound to an outlet/inlet) ===
def version(self):
"""Protocol version used to deliver the stream."""
return lib.lsl_get_version(self.obj)
def created_at(self):
"""Creation time stamp of the stream.
This is the time stamp when the stream was first created
(as determined via local_clock() on the providing machine).
"""
return lib.lsl_get_created_at(self.obj)
def uid(self):
"""Unique ID of the stream outlet instance (once assigned).
This is a unique identifier of the stream outlet, and is guaranteed to
be different across multiple instantiations of the same outlet (e.g.,
after a re-start).
"""
return lib.lsl_get_uid(self.obj).decode("utf-8")
def session_id(self):
"""Session ID for the given stream.
The session id is an optional human-assigned identifier of the
recording session. While it is rarely used, it can be used to prevent
concurrent recording activities on the same sub-network (e.g., in
multiple experiment areas) from seeing each other's streams
(can be assigned in a configuration file read by liblsl, see also
Network Connectivity in the LSL wiki).
"""
return lib.lsl_get_session_id(self.obj).decode("utf-8")
def hostname(self):
"""Hostname of the providing machine."""
return lib.lsl_get_hostname(self.obj).decode("utf-8")
# === Data Description (can be modified) ===
def desc(self):
"""Extended description of the stream.
It is highly recommended that at least the channel labels are described
here. See code examples on the LSL wiki. Other information, such
as amplifier settings, measurement units if deviating from defaults,
setup information, subject information, etc., can be specified here, as
well. Meta-data recommendations follow the XDF file format project
(github.com/sccn/xdf/wiki/Meta-Data or web search for: XDF meta-data).
Important: if you use a stream content type for which meta-data
recommendations exist, please try to lay out your meta-data in
agreement with these recommendations for compatibility with other
applications.
"""
return XMLElement(lib.lsl_get_desc(self.obj))
def as_xml(self):
"""Retrieve the entire stream_info in XML format.
This yields an XML document (in string form) whose top-level element is
<description>. The description element contains one element for each
field of the stream_info class, including:
a) the core elements <name>, <type>, <channel_count>, <nominal_srate>,
<channel_format>, <source_id>
b) the misc elements <version>, <created_at>, <uid>, <session_id>,
<v4address>, <v4data_port>, <v4service_port>, <v6address>,
<v6data_port>, <v6service_port>
c) the extended description element <desc> with user-defined
sub-elements.
"""
return lib.lsl_get_xml(self.obj).decode("utf-8")
def get_channel_labels(self):
"""Get the channel names in the description.
Returns
-------
labels : list of str or ``None`` | None
List of channel names, matching the number of total channels.
If ``None``, the channel names are not set.
.. warning::
If a list of str and ``None`` are returned, some of the channel names
are missing. This is not expected and could occur if the XML tree in
the ``desc`` property is tempered with outside of the defined getter and
setter.
"""
return self._get_channel_info("label")
def get_channel_types(self):
"""Get the channel types in the description.
Returns
-------
types : list of str or ``None`` | None
List of channel types, matching the number of total channels.
If ``None``, the channel types are not set.
.. warning::
If a list of str and ``None`` are returned, some of the channel types
are missing. This is not expected and could occur if the XML tree in
the ``desc`` property is tempered with outside of the defined getter and
setter.
"""
return self._get_channel_info("type")
def get_channel_units(self):
"""Get the channel units in the description.
Returns
-------
units : list of str or ``None`` | None
List of channel units, matching the number of total channels.
If ``None``, the channel units are not set.
.. warning::
If a list of str and ``None`` are returned, some of the channel units
are missing. This is not expected and could occur if the XML tree in
the ``desc`` property is tempered with outside of the defined getter and
setter.
"""
return self._get_channel_info("unit")
def _get_channel_info(self, name):
"""Get the 'channel/name' element in the XML tree."""
if self.desc().child("channels").empty():
return None
ch_infos = list()
channels = self.desc().child("channels")
ch = channels.child("channel")
while not ch.empty():
ch_info = ch.child(name).first_child().value()
if len(ch_info) != 0:
ch_infos.append(ch_info)
else:
ch_infos.append(None)
ch = ch.next_sibling()
if all(ch_info is None for ch_info in ch_infos):
return None
if len(ch_infos) != self.channel_count():
print(
f"The stream description contains {len(ch_infos)} elements for "
f"{self.channel_count()} channels.",
)
return ch_infos
def set_channel_labels(self, labels):
"""Set the channel names in the description. Existing labels are overwritten.
Parameters
----------
labels : list of str
List of channel names, matching the number of total channels.
"""
self._set_channel_info(labels, "label")
def set_channel_types(self, types):
"""Set the channel types in the description. Existing types are overwritten.
The types are given as human readable strings, e.g. ``'eeg'``.
Parameters
----------
types : list of str | str
List of channel types, matching the number of total channels.
If a single `str` is provided, the type is applied to all channels.
"""
types = [types] * self.channel_count() if isinstance(types, str) else types
self._set_channel_info(types, "type")
def set_channel_units(self, units):
"""Set the channel units in the description. Existing units are overwritten.
The units are given as human readable strings, e.g. ``'microvolts'``, or as
multiplication factor, e.g. ``-6`` for ``1e-6`` thus converting e.g. Volts to
microvolts.
Parameters
----------
units : list of str | list of int | array of int | str | int
List of channel units, matching the number of total channels.
If a single `str` or `int` is provided, the unit is applied to all channels.
Notes
-----
Some channel types do not have a unit. The `str` ``none`` or the `int` 0 should
be used to denote this channel unit, corresponding to ``FIFF_UNITM_NONE`` in
MNE-Python.
"""
if isinstance(units, (int, str)):
units = [units] * self.channel_count()
else: # iterable
units = [
str(int(unit)) if isinstance(unit, int) else unit for unit in units
]
self._set_channel_info(units, "unit")
def _set_channel_info(self, ch_infos, name) -> None:
"""Set the 'channel/name' element in the XML tree."""
if len(ch_infos) != self.channel_count():
raise ValueError(
f"The number of provided channel {name} {len(ch_infos)} "
f"must match the number of channels {self.channel_count()}."
)
channels = StreamInfo._add_first_node(self.desc, "channels")
# fill the 'channel/name' element of the tree and overwrite existing values
ch = channels.child("channel")
for ch_info in ch_infos:
ch = channels.append_child("channel") if ch.empty() else ch
StreamInfo._set_description_node(ch, {name: ch_info})
ch = ch.next_sibling()
StreamInfo._prune_description_node(ch, channels)
# -- Helper methods to interact with the XMLElement tree ---------------------------
@staticmethod
def _add_first_node(desc, name):
"""Add the first node in the description and return it."""
if desc().child(name).empty():
node = desc().append_child(name)
else:
node = desc().child(name)
return node
@staticmethod
def _prune_description_node(node, parent):
"""Prune a node and remove outdated entries."""
# this is useful in case the sinfo is tempered with and had more entries of type
# 'node' than it should.
while not node.empty():
node_next = node.next_sibling()
parent.remove_child(node)
node = node_next
@staticmethod
def _set_description_node(node, mapping):
"""Set the key: value child(s) of a node."""
for key, value in mapping.items():
value = str(int(value)) if isinstance(value, int) else str(value)
if node.child(key).empty():
node.append_child_value(key, value)
else:
node.child(key).first_child().set_value(value)
# =====================
# === Stream Outlet ===
# =====================
class StreamOutlet:
"""A stream outlet.
Outlets are used to make streaming data (and the meta-data) available on
the lab network.
"""
def __init__(self, info, chunk_size=0, max_buffered=360):
"""Establish a new stream outlet. This makes the stream discoverable.
Keyword arguments:
description -- The StreamInfo object to describe this stream. Stays
constant over the lifetime of the outlet.
chunk_size --- Optionally the desired chunk granularity (in samples)
for transmission. If unspecified, each push operation
yields one chunk. Inlets can override this setting.
(default 0)
max_buffered -- Optionally the maximum amount of data to buffer (in
seconds if there is a nominal sampling rate, otherwise
x100 in samples). The default is 6 minutes of data.
Note that, for high-bandwidth data, you will want to
use a lower value here to avoid running out of RAM.
(default 360)
"""
self.obj = lib.lsl_create_outlet(info.obj, chunk_size, max_buffered)
self.obj = c_void_p(self.obj)
if not self.obj:
raise RuntimeError("could not create stream outlet.")
self.channel_format = info.channel_format()
self.channel_count = info.channel_count()
self.do_push_sample = fmt2push_sample[self.channel_format]
self.do_push_chunk = fmt2push_chunk[self.channel_format]
self.do_push_chunk_n = fmt2push_chunk_n[self.channel_format]
self.value_type = fmt2type[self.channel_format]
self.sample_type = self.value_type * self.channel_count
def __del__(self):
"""Destroy an outlet.
The outlet will no longer be discoverable after destruction and all
connected inlets will stop delivering data.
"""
# noinspection PyBroadException
try:
lib.lsl_destroy_outlet(self.obj)
except:
pass
def push_sample(self, x, timestamp=0.0, pushthrough=True):
"""Push a sample into the outlet.
Each entry in the list corresponds to one channel.
Keyword arguments:
x -- A list of values to push (one per channel).
timestamp -- Optionally the capture time of the sample, in agreement
with local_clock(); if omitted, the current
time is used. (default 0.0)
pushthrough -- Whether to push the sample through to the receivers
instead of buffering it with subsequent samples.
Note that the chunk_size, if specified at outlet
construction, takes precedence over the pushthrough flag.
(default True)
"""
if len(x) == self.channel_count:
if self.channel_format == cf_string:
x = [v.encode("utf-8") for v in x]
handle_error(
self.do_push_sample(
self.obj,
self.sample_type(*x),
c_double(timestamp),
c_int(pushthrough),
)
)
else:
raise ValueError(
"length of the sample (" + str(len(x)) + ") must "
"correspond to the stream's channel count ("
+ str(self.channel_count)
+ ")."
)
def push_chunk(self, x, timestamp=0.0, pushthrough=True):
"""Push a list of samples into the outlet.
samples -- A list of samples, preferably as a 2-D numpy array.
`samples` can also be a list of lists, or a list of
multiplexed values.
timestamp -- Optional, float or 1-D list of floats.
If float: the capture time of the most recent sample, in
agreement with local_clock(); if omitted/default (0.0), the current
time is used. The time stamps of other samples are
automatically derived according to the sampling rate of
the stream.
If list of floats: the time stamps for each sample.
Must be the same length as `samples`.
pushthrough Whether to push the chunk through to the receivers instead
of buffering it with subsequent samples. Note that the
chunk_size, if specified at outlet construction, takes
precedence over the pushthrough flag. (default True)
Note: performance is optimized for the following argument types:
- `samples`: 2-D numpy array
- `timestamp`: float
"""
# Convert timestamp to corresponding ctype
try:
timestamp_c = c_double(timestamp)
# Select the corresponding push_chunk method
liblsl_push_chunk_func = self.do_push_chunk
except TypeError:
try:
timestamp_c = (c_double * len(timestamp))(*timestamp)
liblsl_push_chunk_func = self.do_push_chunk_n
except TypeError:
raise TypeError("timestamp must be a float or an iterable of floats")
try:
n_values = self.channel_count * len(x)
data_buff = (self.value_type * n_values).from_buffer(x)
handle_error(
liblsl_push_chunk_func(
self.obj,
data_buff,
c_long(n_values),
timestamp_c,
c_int(pushthrough),
)
)
except TypeError:
# don't send empty chunks
if len(x):
if type(x[0]) is list:
x = [v for sample in x for v in sample]
if self.channel_format == cf_string:
x = [v.encode("utf-8") for v in x]
if len(x) % self.channel_count == 0:
# x is a flattened list of multiplexed values
constructor = self.value_type * len(x)
# noinspection PyCallingNonCallable
handle_error(
liblsl_push_chunk_func(
self.obj,
constructor(*x),
c_long(len(x)),
timestamp_c,
c_int(pushthrough),
)
)
else:
raise ValueError(
"Each sample must have the same number of channels ("
+ str(self.channel_count)
+ ")."
)
def have_consumers(self):
"""Check whether consumers are currently registered.
While it does not hurt, there is technically no reason to push samples
if there is no consumer.
"""
return bool(lib.lsl_have_consumers(self.obj))
def wait_for_consumers(self, timeout):
"""Wait until some consumer shows up (without wasting resources).
Returns True if the wait was successful, False if the timeout expired.
"""
return bool(lib.lsl_wait_for_consumers(self.obj, c_double(timeout)))
def get_info(self):
outlet_info = lib.lsl_get_info(self.obj)
return StreamInfo(handle=outlet_info)
# =========================
# === Resolve Functions ===
# =========================
def resolve_streams(wait_time=1.0):
"""Resolve all streams on the network.
This function returns all currently available streams from any outlet on
the network. The network is usually the subnet specified at the local
router, but may also include a group of machines visible to each other via
multicast packets (given that the network supports it), or list of
hostnames. These details may optionally be customized by the experimenter
in a configuration file (see Network Connectivity in the LSL wiki).
Keyword arguments:
wait_time -- The waiting time for the operation, in seconds, to search for
streams. Warning: If this is too short (<0.5s) only a subset
(or none) of the outlets that are present on the network may
be returned. (default 1.0)
Returns a list of StreamInfo objects (with empty desc field), any of which
can subsequently be used to open an inlet. The full description can be
retrieved from the inlet.
"""
# noinspection PyCallingNonCallable
buffer = (c_void_p * 1024)()
num_found = lib.lsl_resolve_all(byref(buffer), 1024, c_double(wait_time))
return [StreamInfo(handle=buffer[k]) for k in range(num_found)]
def resolve_byprop(prop, value, minimum=1, timeout=FOREVER):
"""Resolve all streams with a specific value for a given property.
If the goal is to resolve a specific stream, this method is preferred over
resolving all streams and then selecting the desired one.
Keyword arguments:
prop -- The StreamInfo property that should have a specific value (e.g.,
"name", "type", "source_id", or "desc/manufacturer").
value -- The string value that the property should have (e.g., "EEG" as
the type property).
minimum -- Return at least this many streams. (default 1)
timeout -- Optionally a timeout of the operation, in seconds. If the
timeout expires, less than the desired number of streams
(possibly none) will be returned. (default FOREVER)
Returns a list of matching StreamInfo objects (with empty desc field), any
of which can subsequently be used to open an inlet.
Example: results = resolve_Stream_byprop("type","EEG")
"""
# noinspection PyCallingNonCallable
buffer = (c_void_p * 1024)()
num_found = lib.lsl_resolve_byprop(
byref(buffer),
1024,
c_char_p(str.encode(prop)),
c_char_p(str.encode(value)),
minimum,
c_double(timeout),
)
return [StreamInfo(handle=buffer[k]) for k in range(num_found)]
def resolve_bypred(predicate, minimum=1, timeout=FOREVER):
"""Resolve all streams that match a given predicate.
Advanced query that allows to impose more conditions on the retrieved
streams; the given string is an XPath 1.0 predicate for the <description>
node (omitting the surrounding []'s), see also
http://en.wikipedia.org/w/index.php?title=XPath_1.0&oldid=474981951.
Keyword arguments:
predicate -- The predicate string, e.g. "name='BioSemi'" or
"type='EEG' and starts-with(name,'BioSemi') and
count(description/desc/channels/channel)=32"
minimum -- Return at least this many streams. (default 1)
timeout -- Optionally a timeout of the operation, in seconds. If the
timeout expires, less than the desired number of streams
(possibly none) will be returned. (default FOREVER)
Returns a list of matching StreamInfo objects (with empty desc field), any
of which can subsequently be used to open an inlet.
"""
# noinspection PyCallingNonCallable
buffer = (c_void_p * 1024)()
num_found = lib.lsl_resolve_bypred(
byref(buffer), 1024, c_char_p(str.encode(predicate)), minimum, c_double(timeout)
)
return [StreamInfo(handle=buffer[k]) for k in range(num_found)]
# ====================
# === Memory functions
# ====================
def free_char_p_array_memory(char_p_array, num_elements):
pointers = cast(char_p_array, POINTER(c_void_p))
for p in range(num_elements):
if pointers[p] is not None: # only free initialized pointers
lib.lsl_destroy_string(pointers[p])
# ====================
# === Stream Inlet ===
# ====================
class StreamInlet:
"""A stream inlet.
Inlets are used to receive streaming data (and meta-data) from the lab
network.
"""
def __init__(
self, info, max_buflen=360, max_chunklen=0, recover=True, processing_flags=0
):
"""Construct a new stream inlet from a resolved stream description.
Keyword arguments:
description -- A resolved stream description object (as coming from one
of the resolver functions). Note: the stream_inlet may also be
constructed with a fully-specified stream_info, if the desired
channel format and count is already known up-front, but this is
strongly discouraged and should only ever be done if there is
no time to resolve the stream up-front (e.g., due to
limitations in the client program).
max_buflen -- Optionally the maximum amount of data to buffer (in
seconds if there is a nominal sampling rate, otherwise
x100 in samples). Recording applications want to use a
fairly large buffer size here, while real-time
applications would only buffer as much as they need to
perform their next calculation. (default 360)
max_chunklen -- Optionally the maximum size, in samples, at which
chunks are transmitted (the default corresponds to the
chunk sizes used by the sender). Recording programs
can use a generous size here (leaving it to the network
how to pack things), while real-time applications may
want a finer (perhaps 1-sample) granularity. If left
unspecified (=0), the sender determines the chunk
granularity. (default 0)
recover -- Try to silently recover lost streams that are recoverable
(=those that that have a source_id set). In all other cases
(recover is False or the stream is not recoverable)
functions may throw a lost_error if the stream's source is
lost (e.g., due to an app or computer crash). (default True)
processing_flags -- Post-processing options. Use one of the post-processing
flags `proc_none`, `proc_clocksync`, `proc_dejitter`, `proc_monotonize`,
or `proc_threadsafe`. Can also be a logical OR combination of multiple
flags. Use `proc_ALL` for all flags. (default proc_none).
"""
if type(info) is list:
raise TypeError(
"description needs to be of type StreamInfo, " "got a list."
)
self.obj = lib.lsl_create_inlet(info.obj, max_buflen, max_chunklen, recover)
self.obj = c_void_p(self.obj)
if not self.obj:
raise RuntimeError("could not create stream inlet.")
if processing_flags > 0:
handle_error(lib.lsl_set_postprocessing(self.obj, processing_flags))
self.channel_format = info.channel_format()
self.channel_count = info.channel_count()
self.do_pull_sample = fmt2pull_sample[self.channel_format]
self.do_pull_chunk = fmt2pull_chunk[self.channel_format]
self.value_type = fmt2type[self.channel_format]
self.sample_type = self.value_type * self.channel_count
self.sample = self.sample_type()
self.buffers = {}
def __del__(self):
"""Destructor. The inlet will automatically disconnect if destroyed."""
# noinspection PyBroadException
try:
lib.lsl_destroy_inlet(self.obj)
except Exception:
pass
def info(self, timeout=FOREVER):
"""Retrieve the complete information of the given stream.
This includes the extended description. Can be invoked at any time of
the stream's lifetime.
Keyword arguments:
timeout -- Timeout of the operation. (default FOREVER)
Throws a TimeoutError (if the timeout expires), or LostError (if the
stream source has been lost).
"""
errcode = c_int()
result = lib.lsl_get_fullinfo(self.obj, c_double(timeout), byref(errcode))
handle_error(errcode)
return StreamInfo(handle=result)
def open_stream(self, timeout=FOREVER):
"""Subscribe to the data stream.