-
Notifications
You must be signed in to change notification settings - Fork 1
/
cloud_music_no_14.html
2804 lines (2636 loc) · 132 KB
/
cloud_music_no_14.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
<!DOCTYPE html>
<html>
<head>
<meta http-equiv='cache-control' content='no-cache'>
<meta http-equiv='expires' content='0'>
<meta http-equiv='pragma' content='no-cache'>
<title>cloud_music_no_14</title>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script src="dat.gui.js"></script>
<script src="jquery.js"></script>
<script src="p5.js"></script>
<script src="sprintf.js"></script>
<script src="three.js"></script>
<script src="ace.js"></script>
<script src="tinycolor.js"></script>
<script>
try {
var fs = require("fs");
var __dirname = fs.realpathSync.native(".");
} catch (e) {
console.log(e);
}
</script>
<script src="CsoundAudioNode.js"></script>
<script src="CsoundAC.js"></script>
<script src='Silencio.js'></script>
<script src='ChordSpace.js'></script>
<script src="TrackballControls.js"></script>
<link rel="stylesheet" href="w3.css">
<style>
.w3-container {
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
}
.w3-bar {
z-index: 1;
}
.dg {
font: 12px 'Verdana', sans-serif;
}
.dg .c {
background: transparent;
}
.dg.main .close-button {
background: transparent;
font: 15px 'Verdana', sans-serif;
}
.dg.main .close-button:hover {
background: transparent;
}
input {
font: 1em/1.25em Verdana, sans-serif;
background: transparent;
}
</style>
</head>
<body id="body" class="w3-medium w3-text-sand" style="height:100vh;">
<!-- #region Canvas -->
<canvas id="display" class="w3-container" style="background-color:black;height:100%;margin:0;padding:0;z-index:0;">
</canvas>
<!-- endregion-->
<!-- #region About -->
<div id="about_view" class="w3-container "
style="font-size:11px;position:absolute;top:70px;z-index:5;background:transparent;color:rgb(255, 255, 200, 67%);max-height: calc(100vh - 70px);overflow-y:auto;">
<h1 style="font-size: 15px;">Cloud Music No. 14</h1>
<h2 style="font-size: 13px;">Michael Gogins<br>
October 2022</h2>
<a rel="license" href="http://creativecommons.org/licenses/by-nc-sa/3.0/"><img alt="Creative Commons License"
style="border-width:0;" src="https://i.creativecommons.org/l/by-nc-sa/3.0/88x31.png" /></a>
<p>This work is licensed under a <a rel="license"
href="http://creativecommons.org/licenses/by-nc-sa/3.0/">Creative Commons
Attribution-NonCommercial-ShareAlike 3.0 Unported License.</a>
<p>This is an online piece of electroacoustic music, rendered in your Web
browser using high-resolution audio. It will play indefinitely, never ending,
always changing.
<p>The notes are played by a Csound orchestra that is embedded in this Web page using my <a
href="https://github.com/gogins/csound-wasm">WebAssembly build of Csound</a>. This in turn includes my
<a href="https://github.com/gogins/csound-ac">CsoundAC</a> library for algorithmic composition, used in this
piece to generate randomly selected but (I hope) musically sensible chord progressions and modulations that
are applied to the generated notes.
<p>The music is generated by sampling the bottom row of pixels from the moving image, downsampling that row into
fewer pixels, and translating those pixels into musical notes from left (lowest) to right (highest). Hue is
mapped to instrument, saturation is mapped to duration, and value is mapped to loudness. Generally speaking,
when a bright ring moves to the bottom of the the display, you should hear some notes generated by that
event.
<p>The viewer may exercise a certain amount of control over the piece by opening the <i>Controls</i>. Changing
the hue will change the arrangement of instruments. The tempo of both note generation and the visuals
may be controlled.
<p>When the user clicks on the <i>Record</i> button, the "fout" opcode is used to record the live audio
to memory in the browser. When the user clicks on <i>Pause</i>, the recorded
audio will automatically be downloaded to the user's Downloads directory. Such recording may be restarted
and paused again any number of times. This can be used in place of an audio loopback interface to make
a soundfile from a performance.
<p>This work is licensed under a Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License
(https://creativecommons.org/licenses/by-nc-sa/4.0/deed.en)
<p>Feel free to use this piece as a template for creating new pieces of this type... as long as it doesn't sound
too much like this one!
<p>Please report any problems you have playing this piece, or any ideas for enhancements, at <a
href="https://github.com/gogins/cloud-music/issues">cloud-music issues</a>.
<ul>
<li>To view the source code of this piece, use your browser menu to view the page source.
<li>To inspect or debug the code of this piece as it runs, use your browser menu to open the developer
tools.
</ul>
<h2 style="font-size: 13px;">Credits</h2>
<p>I created the visuals for this piece by adapting Scruffy's
<a href="https://www.shadertoy.com/view/4fXSRn"><i>TestShader09012024</i></a>,
which has an open-source license compatible with the license of this piece.
<p>My code in CsoundAC for working with chords, scales, and voice-leading implements basic ideas from <a
href="http://dmitri.tymoczko.com/">Dmitri Tymoczko's work in music theory</a>.
<p>Code for compiling and controlling shaders is adapted from <a
href="https://www.shadertoy.com">ShaderToy.com</a>.
<p>The algorithm for downsampling the video canvas is from <a
href="https://skemman.is/bitstream/1946/15343/3/SS_MSthesis.pdf">Sveinn Steinarsson's MS thesis</a> with
code from <a href="https://github.com/pingec/downsample-lttb">https://github.com/pingec/downsample-lttb</a>.
<p>Csound instruments are adapted from <a href="https://kunstmusik.com/">Steven Yi</a> (YiString and
FMWaterBell), Joseph T. Kung (Kung2 and Kung4), <a href="http://www.jlpublishing.com/">Lee Zakian</a>
(ZakianFlute), and others.
<p>
<a href="http://michaelgogins.tumblr.com">
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Calque_1" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="36" height="36"
viewBox="0 0 256 256" enable-background="new 0 0 256 256" xml:space="preserve">
<g>
<g>
<g>
<rect x="0.24" y="0.167" fill="#314358" width="255.52" height="256" />
</g>
</g>
<g>
<path fill="#FFFFFF"
d="M168.08,170.918c-2.969,1.416-8.647,2.648-12.881,2.754c-12.783,0.342-15.264-8.979-15.367-15.736v-49.705
h32.065V84.055h-31.954V43.382c0,0-23.008,0-23.383,0c-0.385,0-1.057,0.337-1.152,1.192c-1.368,12.448-7.192,34.296-31.416,43.032
v20.624h16.16v52.167c0,17.863,13.176,43.24,47.959,42.641c11.736-0.201,24.77-5.113,27.648-9.354L168.08,170.918" />
</g>
</g>
</svg>
</a>
<a href="https://github.com/gogins">
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Calque_1" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="36" height="36"
viewBox="0 0 256 256" enable-background="new 0 0 256 256" xml:space="preserve">
<g>
<path
d="M128.00106,0 C57.3172926,0 0,57.3066942 0,128.00106 C0,184.555281 36.6761997,232.535542 87.534937,249.460899 C93.9320223,250.645779 96.280588,246.684165 96.280588,243.303333 C96.280588,240.251045 96.1618878,230.167899 96.106777,219.472176 C60.4967585,227.215235 52.9826207,204.369712 52.9826207,204.369712 C47.1599584,189.574598 38.770408,185.640538 38.770408,185.640538 C27.1568785,177.696113 39.6458206,177.859325 39.6458206,177.859325 C52.4993419,178.762293 59.267365,191.04987 59.267365,191.04987 C70.6837675,210.618423 89.2115753,204.961093 96.5158685,201.690482 C97.6647155,193.417512 100.981959,187.77078 104.642583,184.574357 C76.211799,181.33766 46.324819,170.362144 46.324819,121.315702 C46.324819,107.340889 51.3250588,95.9223682 59.5132437,86.9583937 C58.1842268,83.7344152 53.8029229,70.715562 60.7532354,53.0843636 C60.7532354,53.0843636 71.5019501,49.6441813 95.9626412,66.2049595 C106.172967,63.368876 117.123047,61.9465949 128.00106,61.8978432 C138.879073,61.9465949 149.837632,63.368876 160.067033,66.2049595 C184.49805,49.6441813 195.231926,53.0843636 195.231926,53.0843636 C202.199197,70.715562 197.815773,83.7344152 196.486756,86.9583937 C204.694018,95.9223682 209.660343,107.340889 209.660343,121.315702 C209.660343,170.478725 179.716133,181.303747 151.213281,184.472614 C155.80443,188.444828 159.895342,196.234518 159.895342,208.176593 C159.895342,225.303317 159.746968,239.087361 159.746968,243.303333 C159.746968,246.709601 162.05102,250.70089 168.53925,249.443941 C219.370432,232.499507 256,184.536204 256,128.00106 C256,57.3066942 198.691187,0 128.00106,0 Z M47.9405593,182.340212 C47.6586465,182.976105 46.6581745,183.166873 45.7467277,182.730227 C44.8183235,182.312656 44.2968914,181.445722 44.5978808,180.80771 C44.8734344,180.152739 45.876026,179.97045 46.8023103,180.409216 C47.7328342,180.826786 48.2627451,181.702199 47.9405593,182.340212 Z M54.2367892,187.958254 C53.6263318,188.524199 52.4329723,188.261363 51.6232682,187.366874 C50.7860088,186.474504 50.6291553,185.281144 51.2480912,184.70672 C51.8776254,184.140775 53.0349512,184.405731 53.8743302,185.298101 C54.7115892,186.201069 54.8748019,187.38595 54.2367892,187.958254 Z M58.5562413,195.146347 C57.7719732,195.691096 56.4895886,195.180261 55.6968417,194.042013 C54.9125733,192.903764 54.9125733,191.538713 55.713799,190.991845 C56.5086651,190.444977 57.7719732,190.936735 58.5753181,192.066505 C59.3574669,193.22383 59.3574669,194.58888 58.5562413,195.146347 Z M65.8613592,203.471174 C65.1597571,204.244846 63.6654083,204.03712 62.5716717,202.981538 C61.4524999,201.94927 61.1409122,200.484596 61.8446341,199.710926 C62.5547146,198.935137 64.0575422,199.15346 65.1597571,200.200564 C66.2704506,201.230712 66.6095936,202.705984 65.8613592,203.471174 Z M75.3025151,206.281542 C74.9930474,207.284134 73.553809,207.739857 72.1039724,207.313809 C70.6562556,206.875043 69.7087748,205.700761 70.0012857,204.687571 C70.302275,203.678621 71.7478721,203.20382 73.2083069,203.659543 C74.6539041,204.09619 75.6035048,205.261994 75.3025151,206.281542 Z M86.046947,207.473627 C86.0829806,208.529209 84.8535871,209.404622 83.3316829,209.4237 C81.8013,209.457614 80.563428,208.603398 80.5464708,207.564772 C80.5464708,206.498591 81.7483088,205.631657 83.2786917,205.606221 C84.8005962,205.576546 86.046947,206.424403 86.046947,207.473627 Z M96.6021471,207.069023 C96.7844366,208.099171 95.7267341,209.156872 94.215428,209.438785 C92.7295577,209.710099 91.3539086,209.074206 91.1652603,208.052538 C90.9808515,206.996955 92.0576306,205.939253 93.5413813,205.66582 C95.054807,205.402984 96.4092596,206.021919 96.6021471,207.069023 Z"
fill="#161614"></path>
</g>
</svg>
</a>
</div>
<!-- endregion -->
<!-- #region Visuals to Music -->
<script id="draw-shader-fs" type="x-shader/x-fragment">#version 300 es
#line 187
precision highp float;
/**
* These are all of the standard ShaderToy inputs. If any of these are
* used in this shader, they must be created and initialized in the
* JavaScript code.
*/
uniform vec3 iResolution;
// viewport resolution (in pixels)
uniform float iTime;
// shader playback time (in seconds)
uniform float iTimeDelta;
// render time (in seconds)
uniform int iFrame;
// shader playback frame
uniform float iChannelTime[4];
// channel playback time (in seconds)
uniform vec3 iChannelResolution[4];
// channel resolution (in pixels)
uniform vec4 iMouse;
// mouse pixel coords. xy: current (if MLB down), zw: click
uniform sampler2D iChannel0;
// input channel. XX = 2D/Cube
uniform sampler2D iChannel1;
// input channel. XX = 2D/Cube
uniform sampler2D iChannel2;
// input channel. XX = 2D/Cube
uniform sampler2D iChannel3;
// input channel. XX = 2D/Cube
uniform vec4 iDate;
// (year, month, day, time in seconds)
uniform float iSampleRate;
// sound sample rate (i.e., 44100)
uniform float GraphicsTempo;
uniform float GraphicsHue;
uniform float GraphicsValue;
/**
* Theoretically, any fragment shader copied from the ShaderToy
* editor can replace the body of the mainImage function below,
* if all inputs actually used in the shader are defined and bound.
*/
void mainImage(out vec4 _ufragColor, in vec2 _ufragCoord);
out vec4 _ushadertoy_out_color;
void main(){
(_ushadertoy_out_color = vec4(0.0, 0.0, 0.0, 0.0));
(_ushadertoy_out_color = vec4(1.0, 1.0, 1.0, 1.0));
vec4 _ucolor = vec4(0.0, 0.0, 0.0, 1.0);
mainImage(_ucolor, gl_FragCoord.xy);
if ((_ushadertoy_out_color.x < 0.0))
{
(_ucolor = vec4(1.0, 0.0, 0.0, 1.0));
}
if ((_ushadertoy_out_color.y < 0.0))
{
(_ucolor = vec4(0.0, 1.0, 0.0, 1.0));
}
if ((_ushadertoy_out_color.z < 0.0))
{
(_ucolor = vec4(0.0, 0.0, 1.0, 1.0));
}
if ((_ushadertoy_out_color.w < 0.0))
{
(_ucolor = vec4(1.0, 1.0, 0.0, 1.0));
}
(_ushadertoy_out_color = vec4(_ucolor.xyz, 1.0));
}
float hash(int x) { return fract(sin(float(x))*7.847); }
float dSegment(vec2 a, vec2 b, vec2 c)
{
vec2 ab = b-a;
vec2 ac = c-a;
float h = clamp(dot(ab, ac)/dot(ab, ab), 0., 1.);
vec2 point = a+ab*h;
return length(c-point);
}
vec2 triangle_wave(vec2 a){
///return abs(fract((a+vec2(1.,0.5))*1.5)-.5);
return abs(fract((a*.9+vec2(1.,0.75))*1.75)-.325);
}
vec3 rgb2hsv(vec3 c){
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c){
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
vec3 palette(float t)
{
// A less bilious palette?
vec3 a = vec3(0.938, 0.328, 0.718);
vec3 b = vec3(0.659, 0.438, 0.328);
vec3 c = vec3(0.388, 0.388, 0.296);
vec3 d = vec3(2.538, 2.478, 0.168);
////vec3 a = vec3(0.75, 0.528, 0.718);
////vec3 b = vec3(0.659, .438, 1.328);
////vec3 c = vec3(0.388, 0.788, 0.5);
////vec3 d = vec3(2.538, 2.478, 0.168);
return a + b*cos( 6.28318*(c*t+d) );
}
vec2 rotateUV(vec2 uv, float rotation){
float mid = 0.5;
////float mid = 0.25;
return vec2(
cos(rotation) * (uv.x - mid) + sin(rotation) * (uv.y - mid) + mid,
cos(rotation) * (uv.y - mid) - sin(rotation) * (uv.x - mid) + mid
);
}
void mainImage( out vec4 fragColor, in vec2 fragCoord ){
vec2 uv = (fragCoord * 1.0 - iResolution.xy) / iResolution.y;
vec2 uv0 = uv;
vec3 finalColour = vec3(0.);
// Slow it down...
// Put a control on speed.
float GraphicsTempo_ = iTime * GraphicsTempo;
// It would be nice to put a control on overall rotation or tilt.
///for (float i = 0.; i < 36.; i++)
for (float i = 0.; i < 72.; i++)
{
uv = rotateUV(uv, GraphicsTempo_*.1);
uv = fract(uv*1.1)-.5;
uv = rotateUV(uv, GraphicsTempo_*.1);
float d = length(uv);
vec3 col = palette(length(uv0) + i*.1 + d + GraphicsTempo_);
d = sin(d*4.+GraphicsTempo_)/4.;
d = abs(d);
d = .02/d;
////d = .01/d;
finalColour += col * d * .1 - (i*.0002);
}
vec3 hsv_ = rgb2hsv(finalColour);
// Put controls on value and hue.
hsv_[2] = hsv_[2] * GraphicsValue;
hsv_[0] = hsv_[0] * GraphicsHue;
finalColour = hsv2rgb(hsv_);
fragColor = vec4(finalColour,1.0);
//fragColor = vec4(uv.x,uv.y,0.,1.0);
}
</script>
<script id="draw-shader-vs" type="x-shader/x-vertex">#version 300 es
in vec2 inPos;
void main() {
gl_Position = vec4(inPos.xy, 0.0, 1.0);
}
</script>
<script>
var shader_program = null;
var analyser = null;
// Set up for high-resolution displays.
var devicePixelRatio_ = window.devicePixelRatio || 1
var canvas = document.getElementById("display");
canvas.width = canvas.clientWidth * devicePixelRatio_;
canvas.height = canvas.clientHeight * devicePixelRatio_;
console.log("canvas.height: " + canvas.height);
console.log("canvas.width: " + canvas.width);
var gl = canvas.getContext("webgl2", { antialias: true });
if (!gl) {
alert("Could not create webgl2 context.");
}
let extensions = gl.getSupportedExtensions();
console.log("Supported extensions:\n" + extensions);
if ("gpu" in navigator) {
var gpu_adapter = navigator.gpu.requestAdapter();
console.log("WebGPU adapter: " + gpu_adapter);
} else {
console.warn("WebGPU is not available on this platform.");
}
var EXT_color_buffer_float = gl.getExtension("EXT_color_buffer_float");
if (!EXT_color_buffer_float) {
alert("EXT_color_buffer_float is not available on this platform.");
}
var WEBGL_debug_shaders = gl.getExtension("WEBGL_debug_shaders");
const audio_texture_level = 0;
const audio_texture_internalFormat = gl.R32F;
const audio_texture_width = 512;
const audio_texture_height = 2;
const audio_texture_border = 0;
const audio_texture_srcFormat = gl.RED;
const audio_texture_srcType = gl.FLOAT;
var frequency_domain_data = new Uint8Array(audio_texture_width * 2);
var time_domain_data = new Uint8Array(audio_texture_width * 2);
var audio_data = new Float32Array(audio_texture_width * 2);
var image_sample_buffer = new Uint8ClampedArray();
var channel0_texture_unit = 0;
var channel0_texture = gl.createTexture();
channel0_texture.name = "channel0_texture";
var channel0_sampler = gl.createSampler();
channel0_sampler.name - "channel0_sampler";
var current_events = new Map();
var prior_events = current_events;
var rendering_frame = 0;
function write_audio_texture(analyser, texture_unit, texture, sampler) {
if (analyser != null) {
analyser.getByteFrequencyData(frequency_domain_data);
analyser.getByteTimeDomainData(time_domain_data);
for (let i = 0; i < audio_texture_width; ++i) {
// Map frequency domain magnitudes to [0, 1].
let sample = frequency_domain_data[i];
sample = sample / 255.;
audio_data[i] = sample;
}
let audio_data_width = audio_texture_width * 2;
for (let j = 0; j < audio_texture_width; ++j) {
// Map time domain amplitudes to [-1, 1].
let sample = time_domain_data[j];
sample = sample / 255.;
audio_data[audio_texture_width + j] = sample;
}
}
gl.activeTexture(gl.TEXTURE0 + texture_unit);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindSampler(texture_unit, sampler);
gl.texImage2D(gl.TEXTURE_2D, audio_texture_level, audio_texture_internalFormat, audio_texture_width, audio_texture_height, audio_texture_border, audio_texture_srcFormat, audio_texture_srcType, audio_data);
gl.samplerParameteri(sampler, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.samplerParameteri(sampler, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.samplerParameteri(sampler, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.samplerParameteri(sampler, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.samplerParameteri(sampler, gl.TEXTURE_WRAP_R, gl.CLAMP_TO_EDGE);
gl.samplerParameteri(sampler, gl.TEXTURE_COMPARE_MODE, gl.NONE);
gl.samplerParameteri(sampler, gl.TEXTURE_COMPARE_FUNC, gl.LEQUAL);
if (false && analyser) { // For debugging.
let is_texture = gl.isTexture(texture);
let uniform_count = gl.getProgramParameter(shader_program, gl.ACTIVE_UNIFORMS);
let uniform_index;
for (let uniform_index = 0; uniform_index < uniform_count; ++uniform_index) {
uniform_info = gl.getActiveUniform(shader_program, uniform_index);
console.log(uniform_info);
const location = gl.getUniformLocation(shader_program, uniform_info.name);
const value = gl.getUniform(shader_program, location);
console.log("Uniform location: " + location);
console.log("Uniform value: " + value);
}
const unit = gl.getUniform(shader_program, shader_program.iChannel0);
console.log("Sampler texture unit: " + unit);
console.log("Texture unit: " + texture_unit);
gl.activeTexture(gl.TEXTURE0 + texture_unit);
let texture2D = gl.getParameter(gl.TEXTURE_BINDING_2D);
console.log("Texture binding 2D " + texture2D);
var debug_framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, debug_framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture2D, 0);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
console.log("These attachments don't work.");
}
// Read the contents of the debug_framebuffer (data stores the pixel data).
var data = new Float32Array(1024);
// What comes out, should be what went in.
gl.readPixels(0, 0, 512, 2, gl.RED, gl.FLOAT, data);
//console.log("\nfrequency domain: \n" + data.slice(0, 512));
//console.log("time domain: \n" + data.slice(512));
gl.deleteFramebuffer(debug_framebuffer);
}
}
function load_scene() {
var webgl_viewport_size;
var webgl_buffers = {};
var mouse_position = [0, 0, 0, 0];
function create_scene() {
canvas.addEventListener('mousemove', (e) => {
mouse_position = [e.clientX, e.clientY];
});
shader_program = gl.createProgram();
for (let i = 0; i < 2; ++i) {
let shader_code = document.getElementById(i == 0 ? "draw-shader-vs" : "draw-shader-fs").text;
let shader_object = gl.createShader(i == 0 ? gl.VERTEX_SHADER : gl.FRAGMENT_SHADER);
gl.shaderSource(shader_object, shader_code);
gl.compileShader(shader_object);
let status = gl.getShaderParameter(shader_object, gl.COMPILE_STATUS);
if (!status) {
console.warn(gl.getShaderInfoLog(shader_object));
}
gl.attachShader(shader_program, shader_object);
gl.linkProgram(shader_program);
console.log("shader:" + WEBGL_debug_shaders.getTranslatedShaderSource(shader_object));
}
status = gl.getProgramParameter(shader_program, gl.LINK_STATUS);
if (!status) {
console.warn(gl.getProgramInfoLog(shader_program));
}
shader_program.inPos = gl.getAttribLocation(shader_program, "inPos");
shader_program.iMouse = gl.getUniformLocation(shader_program, "iMouse");
shader_program.iResolution = gl.getUniformLocation(shader_program, "iResolution");
shader_program.iTime = gl.getUniformLocation(shader_program, "iTime");
shader_program.iTimeDelta = gl.getUniformLocation(shader_program, "iTimeDelta");
shader_program.iFrame = gl.getUniformLocation(shader_program, "iFrame");
shader_program.iChannel0 = gl.getUniformLocation(shader_program, "iChannel0");
shader_program.iChannel1 = gl.getUniformLocation(shader_program, "iChannel1");
shader_program.iChannel2 = gl.getUniformLocation(shader_program, "iChannel2");
shader_program.iChannel3 = gl.getUniformLocation(shader_program, "iChannel3");
shader_program.iSampleRate = gl.getUniformLocation(shader_program, "iSampleRate");
shader_program.GraphicsTempo = gl.getUniformLocation(shader_program, "GraphicsTempo");
shader_program.GraphicsHue = gl.getUniformLocation(shader_program, "GraphicsHue");
shader_program.GraphicsValue = gl.getUniformLocation(shader_program, "GraphicsValue");
gl.useProgram(shader_program);
gl.uniform1f(shader_program.iSampleRate, 48000.);
gl.uniform1f(shader_program.GraphicsTempo, parameters.GraphicsTempo);
gl.uniform1f(shader_program.GraphicsHue, parameters.GraphicsHue);
gl.uniform1f(shader_program.GraphicsValue, parameters.GraphicsValue);
var pos = [-1, -1,
1, -1,
1, 1,
-1, 1];
var inx = [0, 1, 2, 0, 2, 3];
webgl_buffers.pos = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, webgl_buffers.pos);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(pos), gl.STATIC_DRAW);
webgl_buffers.inx = gl.createBuffer();
webgl_buffers.inx.len = inx.length;
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, webgl_buffers.inx);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(inx), gl.STATIC_DRAW);
gl.enableVertexAttribArray(shader_program.inPos);
gl.vertexAttribPointer(shader_program.inPos, 2, gl.FLOAT, false, 0, 0);
gl.enable(gl.DEPTH_TEST);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
write_audio_texture(analyser, channel0_texture_unit, channel0_texture, channel0_sampler);
window.onresize = resize;
resize();
requestAnimationFrame(render_scene);
}
function resize() {
webgl_viewport_size = [window.innerWidth, window.innerHeight];
canvas.width = webgl_viewport_size[0] * window.devicePixelRatio;
canvas.height = webgl_viewport_size[1] * window.devicePixelRatio;
image_sample_buffer = new Uint8ClampedArray(canvas.width * 4);
prior_image_sample_buffer = new Uint8ClampedArray(canvas.width * 4);
console.info("resize: image_sample_buffer.length: " + image_sample_buffer.length);
}
function clientWaitAsync(sync, flags, interval_ms) {
return new Promise((resolve, reject) => {
function test() {
const result = gl.clientWaitSync(sync, flags, 0);
if (result === gl.WAIT_FAILED) {
reject();
return;
}
// This is the workaround for platforms where maximum
// timeout is always 0.
if (result === gl.TIMEOUT_EXPIRED) {
setTimeout(test, interval_ms);
return;
}
resolve();
}
test();
});
}
async function getBufferSubDataAsync(target, buffer, srcByteOffset, dstBuffer,
/* optional */ dstOffset, /* optional */ length) {
const sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
await clientWaitAsync(sync, 0, 10);
gl.deleteSync(sync);
gl.bindBuffer(target, buffer);
gl.getBufferSubData(target, srcByteOffset, dstBuffer, dstOffset, length);
gl.bindBuffer(target, null);
}
/**
* Converts an RGB color value to HSV. The formula is
* adapted from http://en.wikipedia.org/wiki/HSV_color_space.
* Assumes r, g, and b are in [0, 255] and
* returns h, s, and v in [0, 1].
*/
var rgb_to_hsv = function (rgb) {
r = rgb[0] / 255;
g = rgb[1] / 255;
b = rgb[2] / 255;
var max = Math.max(r, g, b);
var min = Math.min(r, g, b);
var h, s, v = max;
var d = max - min;
s = max === 0 ? 0 : d / max;
if (max == min) {
h = 0;
} else {
// More efficient than switch?
if (max == r) {
h = (g - b) / d + (g < b ? 6 : 0);
} else if (max == g) {
h = (b - r) / d + 2;
} else if (max == b) {
h = (r - g) / d + 4;
}
h /= 6;
}
return [h, s, v];
}
async function readPixelsAsync(x, y, w, h, format, type, sample) {
const buffer = gl.createBuffer();
gl.bindBuffer(gl.PIXEL_PACK_BUFFER, buffer);
gl.bufferData(gl.PIXEL_PACK_BUFFER, sample.byteLength, gl.STREAM_READ);
gl.readPixels(x, y, w, h, format, type, 0);
gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
await getBufferSubDataAsync(gl.PIXEL_PACK_BUFFER, buffer, 0, sample);
gl.deleteBuffer(buffer);
}
/**
* Adapts https://github.com/pingec/downsample-lttb from time
* series data to vectors of float HSV pixels. Our data is not
* [[time, value], [time, value],...], but rather
* [[pixel index0, hsv0[2]], [pixel index1, hsv1[2]], ...].
*/
function downsample_lttb(data, buckets) {
if (buckets >= data.length || buckets === 0) {
return data; // Nothing to do
}
let sampled_data = [],
sampled_data_index = 0;
// Bucket size. Leave room for start and end data points
let bucket_size = (data.length - 2) / (buckets - 2);
// Triangles are points {a, b, c}.
let a = 0, // Initially a is the first point in the triangle
max_area_point,
max_area,
area,
next_a;
sampled_data[sampled_data_index++] = data[a]; // Always add the first point
for (let i = 0; i < buckets - 2; i++) {
// Calculate point average for next bucket (containing c)
let avg_x = 0,
avg_y = 0,
avg_range_start = Math.floor((i + 1) * bucket_size) + 1,
avg_range_end = Math.floor((i + 2) * bucket_size) + 1;
avg_range_end = avg_range_end < data.length ? avg_range_end : data.length;
let avg_range_length = avg_range_end - avg_range_start;
for (; avg_range_start < avg_range_end; avg_range_start++) {
avg_x += data[avg_range_start][0] * 1; // * 1 enforces Number (value may be Date)
avg_y += data[avg_range_start][1] * 1;
}
avg_x /= avg_range_length;
avg_y /= avg_range_length;
// Get the range for this bucket
let range_offs = Math.floor((i + 0) * bucket_size) + 1,
range_to = Math.floor((i + 1) * bucket_size) + 1;
// Point a
let point_a_x = data[a][0] * 1, // enforce Number (value may be Date)
point_a_y = data[a][1] * 1;
max_area = area = -1;
for (; range_offs < range_to; range_offs++) {
// Calculate triangle area over three buckets
area = Math.abs((point_a_x - avg_x) * (data[range_offs][1] - point_a_y) -
(point_a_x - data[range_offs][0]) * (avg_y - point_a_y)
) * 0.5;
if (area > max_area) {
max_area = area;
max_area_point = data[range_offs];
next_a = range_offs; // Next a is this b
}
}
sampled_data[sampled_data_index++] = max_area_point; // Pick this point from the bucket
a = next_a; // This a is the next a (chosen b)
}
sampled_data[sampled_data_index++] = data[data.length - 1]; // Always add last
return sampled_data; ///sampled_data;
}
/**
* Translates one row of RGBA pixels, the width of the WebGL
* canvas, to Csound events.
*
* https://skemman.is/bitstream/1946/15343/3/SS_MSthesis.pdf
*/
var instrument_count = 8;
var sampled_events = new Array();
var playlist = new Map();
// Put a control on this.
var gi_Composition_tempo = 25;
var frame_translation_count = 0;
var midi_key_begin = 36;
var midi_key_range = 60;
var maximum_voices = 8;
// Put a control on this.
var event_velocity_threshold = 100;
var midi_key_end = midi_key_begin + midi_key_range;
var sample_count = 0;
var root_progression = 0;
var on_events = new Array();
var off_events = new Array();
var playing_events = new Map();
var event_tag = 0;
var root_progressions = [2, 3, -4, 5, -1, 3];
async function translate_sample_to_csound_events(maximum_events, threshold, parent_rendering_frame) {
if (csound == null) {
return;
}
if (!csound.is_playing) {
return;
}
let x = 0;
// y is zero at the bottom of the canvas.
let y = 0;
let width = canvas.width;
let height = 1;
let format = gl.RGBA;
let type = gl.UNSIGNED_BYTE;
readPixelsAsync(x, y, width, height, format, type, image_sample_buffer);
// Translate the sample format from byte RGBA to float HSV.
let hsv_image_sample = [];
for (let byte_i = 0; byte_i < image_sample_buffer.length; byte_i = byte_i + 4) {
let rgb = image_sample_buffer.slice(byte_i, byte_i + 3);
let hsv = rgb_to_hsv(rgb);
hsv_image_sample.push([hsv_image_sample.length + 1, hsv[2], hsv]);
}
// Downsample the HSV samples.
let downsampled_pixels = downsample_lttb(hsv_image_sample, midi_key_range);
sampled_events.length = 0;
on_events.length = 0;
off_events.length = 0;
score.clear();
// Translate the HSV samples to Csound event vectors.
// 0 is p1 insno (hsv[0](, tagged by downsampled pixel,
// positive for on or negative for off.
// 1 is p2 time always 0.
// 2 is p3 duration (either hsv[1] or -1, must be 0 for off
// events).
// 3 is p4 MIDI key (downsampled pixel).
// 4 is p5 MIDI velocity (hsv[2]).
for (let downsampled_pixel_i = 0; downsampled_pixel_i < downsampled_pixels.length; downsampled_pixel_i++) {
let hsv = downsampled_pixels[downsampled_pixel_i][2];
let instrument_number = 1 + (hsv[0] * instrument_count);
// The instrument number must have a unique fractional tag.
let time = 0;
let duration = (.25 + (60 / parameters.NoteTempo * hsv[1] * 2)) * parameters.NoteDurationFactor;
let midi_key = Math.floor(midi_key_begin + downsampled_pixel_i);
//instrument_number = 4;
let insno = sprintf("%d.%d", Math.floor(instrument_number), midi_key);
instrument_number = parseFloat(insno);
let midi_velocity = hsv[2] * 128;
let event_for_pixel = [instrument_number, time, duration, midi_key, midi_velocity];
sampled_events.push(event_for_pixel);
}
console.log(sprintf("sampled_events.length: %d\n", sampled_events.length));
// Events that are playing but not loud enough, are turned off.
for (let sampled_event_i = 0; sampled_event_i < sampled_events.length; sampled_event_i++) {
let sampled_event = sampled_events[sampled_event_i];
let key = sampled_event[0];
if (playing_events.has(key) == true) {
let off_event = playing_events.get(key);
let instrument_number = off_event[0];
if (threshold > sampled_event[4]) {
csound.KillInstance(instrument_number, "", 4, true);
console.log("Turned off instrument number: " + instrument_number + ".");
playing_events.delete(key);
}
}
}
console.log(sprintf("playing_events.size: %d\n", playing_events.size));
// Events that are loud enough but not playing, are turned on.
for (let sampled_event_i = 0; sampled_event_i < sampled_events.length; sampled_event_i++) {
let sampled_event = sampled_events[sampled_event_i];
let key = sampled_event[0];
if (playing_events.has(key) == false) {
if (sampled_event[4] >= threshold) {
playing_events.set(key, sampled_event);
on_events.push(sampled_event);
}
}
}
console.log(sprintf("playing_events.size: %d\n", playing_events.size));
console.log(sprintf("on_events.length: %d\n", on_events.length));
CsoundAC.setCorrectNegativeDurations(false);
// Limit number of on events, play N loudest only. Could also pick at random.
on_events.sort(function (a, b) {
if (a[5] < b[5]) {
return 1;
}
if (b[5] < a[5]) {
return -1;
}
return 0;
});
let voices = Math.min(on_events.length, maximum_voices);
for (let i = 0; i < voices; i++) {
let on_event = on_events[i];
let time = 0;
let duration = on_event[2];
let status = 144;
let instrument_number = on_event[0];
///let instrument_number = 8;
let key = on_event[3];
let velocity = on_event[4];
velocity = 60 + (velocity / 7);
let phase = 0;
let pan = .01 + Math.random() * .8;
let depth = 0;
let height = 0;
let pitches = 4095;
score.add(time, duration, status, instrument_number, key, velocity, phase, pan, depth, height, pitches);
}
if (score.size() > 0) {
if (sample_count % 4 == 0) {
if (sample_count % 24 == 0) {
let scales = scale.modulations(chord);
if (scales.size() > 1) {
scale = scales.get(Math.floor(Math.random() * scales.size()));
csound_message_callback(sprintf("\nScale: %s\n", scale.name()));
}
}
let root_progression = root_progressions[Math.floor(Math.random() * root_progressions.length)];
let chord_name = chord.eOP().name();
if (chord_name.length == 0) {
chord_name = chord.eOP().toString();
}
csound_message_callback(sprintf("\nChord: %s\n\n", chord_name));
chord = scale.transpose_degrees(chord, root_progression, 3)
}
CsoundAC.apply(score, chord, 0, 1000000, true);
let score_text = score.getCsoundScore(12., true);
csound.ReadScore(score_text);
}
sample_count++;
}
/**
* As the last step in rendering the scene, this function reads
* one row of the canvas (the sample) into an array of pixels,
* then translates that row into a set of Csound events, the
* mapping determined by a separate function. An attempt is made
* to avoid stalling the WebGL rendering pipeline by reading the
* pixels from the canvas only when a memory fence around
* the GPU pipeline becomes passable.
*/
async function sample_canvas(current_rendering_frame) {
//console.info("frame_sample_to_score.");
gi_Composition_tempo = Math.floor(parseFloat(parameters.NoteTempo));
event_velocity_threshold = parseFloat(parameters.NoteDensity);
await translate_sample_to_csound_events(16, event_velocity_threshold, current_rendering_frame);
}
/**
* The graphics rendering loop polls the current time and tempo in
* order to compute when to sample notes from the canvas.
*/
var current_time;
var next_time = 0;
function render_scene(milliseconds) {
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
let tyme = milliseconds / 1000;
gl.uniform1f(shader_program.iTime, tyme);
gl.uniform3f(shader_program.iResolution, canvas.width, canvas.height, 0);
gl.uniform4f(shader_program.iMouse, mouse_position[0], mouse_position[1], 0, 0);
gl.uniform1f(shader_program.GraphicsTempo, parameters.GraphicsTempo);
gl.uniform1f(shader_program.GraphicsHue, parameters.GraphicsHue);
gl.uniform1f(shader_program.GraphicsValue, parameters.GraphicsValue);
//write_audio_texture(analyser, channel0_texture_unit, channel0_texture, channel0_sampler);
gl.drawElements(gl.TRIANGLES, webgl_buffers.inx.len, gl.UNSIGNED_SHORT, 0);
current_time = performance.now() / 1000;
if (current_time >= next_time) {
if (typeof parameters !== 'undefined') {
next_time = current_time + (60 / parameters.NoteTempo);
}
sample_canvas(rendering_frame);
}
rendering_frame++;
requestAnimationFrame(render_scene);
}
create_scene();
};
</script>
<script>
var CsoundAC = null;
var csound = null;
var score = null;
var scale = null;
var chord = null;
var message_callback_buffer = "";
var console_editor = ace.edit("console_view");
//console_editor.setTheme("ace/theme/gob");
console_editor.setReadOnly(true);
console_editor.setShowPrintMargin(false);
console_editor.setDisplayIndentGuides(false);
console_editor.renderer.setOption("showGutter", false);
console_editor.renderer.setOption("showLineNumbers", true);
(async function () {
CsoundAC = await createCsoundAC();
score = new CsoundAC.Score();
scale = new CsoundAC.Scale("F major");
chord = scale.chord(1, 5, 3);
//CsoundAC.CHORD_SPACE_DEBUGGING(false);
var txt = "\n";
txt += "Browser CodeName: " + navigator.appCodeName + "\n";
txt += "Browser Name: " + navigator.appName + "\n";
txt += "Browser Version: " + navigator.appVersion + "\n";
txt += "Cookies Enabled: " + navigator.cookieEnabled + "\n";
txt += "Browser Language: " + navigator.language + "\n";
txt += "Browser Online: " + navigator.onLine + "\n";
txt += "Platform: " + navigator.platform + "\n";
txt += "User-agent header: " + navigator.userAgent + "\n";
txt += "gl.VENDOR: " + gl.getParameter(gl.VENDOR) + "\n";
txt += "gl.RENDERER: " + gl.getParameter(gl.RENDERER) + "\n";
txt += "gl.VERSION: " + gl.getParameter(gl.VERSION) + "\n";
txt += "gl.SHADING_LANGUAGE_VERSION: " + gl.getParameter(gl.SHADING_LANGUAGE_VERSION) + "\n";
csound_message_callback(txt);
csound_message_callback(CsoundAC.chord_space_version() + "\n");
let CM = CsoundAC.chordForName("CM");
CM = CM.T(-4.);
csound_message_callback(CM.information());
csound_message_callback(CM.information_debug(-1));
}());
window.onload = async function () {
$("#controls_view").css("display", "none");
$("#about_view").css("display", "none");
$("#console_view").css("display", "none");
$("#menu_item_play").click(async function (event) {
console.log("menu_item_play click...");
if (csound == null) {
try {
csound_message_callback("Trying to load CsoundAudioNode...\n");
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext = new AudioContext();
await audioContext.audioWorklet.addModule('CsoundAudioProcessor.js').then(function () {
csound_message_callback("Creating CsoundAudioNode...\n");
csound = new CsoundAudioNode(audioContext, csound_message_callback);
csound_message_callback("CsoundAudioNode (AudioWorklet) is available in this JavaScript context.\n");
analyser = new AnalyserNode(audioContext);
analyser.fftSize = 2048;
console.log("Analyzer buffer size: " + analyser.frequencyBinCount);
csound.connect(analyser);
console.log("csound: " + csound);
}, function (error) {
csound_message_callback(error + '\n');
});
csound_image_player = new CsoundImagePlayer(csound, CsoundAC, canvas);
} catch (e) {
csound_message_callback(e + '\n');
}
}
if (csound.is_playing == false) {
let csd = await document.getElementById('csd').value;
console_editor.setValue("");
let result = await csound.CompileCsdText(csd);
csound_message_callback("CompileCsdText returned: " + result);
await csound.Start();
if (localStorage.length > 0) {
$('#restore').trigger('click');
} else {
// Tricky! Otherwise we get _patch_ defaults, probably
// not correct; we need _html_ defaults.
$('#default').trigger('click');
}
await csound.Perform();
scale = new CsoundAC.Scale("F major");
csound_message_callback("Csound is playing...\n");
next_time = 0;
$("#menu_item_play").html("Stop");
} else {
await csound.Stop();
await csound.Cleanup();
csound.Reset();
csound_message_callback("Csound is stopping...\n");
$("#menu_item_play").html("Play");
}
});
$("#menu_item_fullscreen").click(function (event) {
console.log("menu_item_fullscreen click...");
const display = document.getElementById("display");
if (display.requestFullscreen) {
display.requestFullscreen();
} else if (display.webkitRequestFullscreen) {
display.webkitRequestFullscreen();
} else if (elem.msRequestFullscreen) {
display.msRequestFullscreen();
}
});
$("#menu_item_controls").click(function (event) {
console.log("menu_item_controls click...");