forked from ecraft2learn/ai
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathecraft2learn -camera using Snap code.js
More file actions
3224 lines (3175 loc) · 156 KB
/
ecraft2learn -camera using Snap code.js
File metadata and controls
3224 lines (3175 loc) · 156 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/**
* Implements JavaScript functions that extend Snap! to access AI cloud services and the machine learning library tensorflow.js
* Authors: Ken Kahn
* License: New BSD
*/
"use strict";
window.ecraft2learn =
(function () {
let this_url = document.querySelector('script[src*="ecraft2learn.js"]').src; // the URL where this library lives
let load_script = function (url, when_loaded, if_error) {
var script = document.createElement("script");
script.type = "text/javascript";
if (url.indexOf("//") < 0) {
// is relative to this_url
var last_slash_index = this_url.lastIndexOf('/');
url = this_url.substring(0, last_slash_index+1) + url;
}
script.src = url;
if (when_loaded) {
script.addEventListener('load', when_loaded);
}
if (if_error) {
script.addEventListener('error', if_error);
}
document.head.appendChild(script);
};
const inside_snap = function () {
// this library can be used directly in JavaScript or with other JavaScript-based languages
// a small amount is Snap! specific and this is used to make those parts conditional on being inside Snap!
return typeof world === 'object' && typeof WorldMorph === 'function' && world instanceof WorldMorph;
};
let get_key = function (key_name) {
// API keys are provided by Snap! reporters
var key = run_snap_block(key_name);
var get_hash_parameter = function (name, parameters, default_value) {
var parts = decodeURI(parameters).split('&');
var value = default_value;
parts.some(function (part) {
var name_and_value = part.split('=');
if (name_and_value[0] === name) {
value = name_and_value[1];
return true;
}
});
return value;
};
if (key && key !== "Enter your key here") {
return key;
}
try {
let parameters = top.window.location.hash || top.window.location.search;
// location.search appropriate if this is running inside of the AI guide
if (parameters) {
// top.window in case this is running in an iframe
key = get_hash_parameter(key_name, parameters.substring(1));
if (key) {
return key;
}
}
var element = top.document.getElementById(key_name);
if (element) {
return element.value;
}
} catch (ignore) {
// top.window may signal an error if iframe and container are different domains
}
// key missing to explain how to obtain keys
inform("Missing API key",
"No value reported by the '" + key_name +
"' reporter. After obtaining the key edit the reporter in the 'Variables' area.\n" +
"Do you want to visit https://github.com/ecraft2learn/ai/wiki to learn how to get a key?",
function () {
window.onbeforeunload = null; // don't warn about reload
document.location.assign("https://github.com/ecraft2learn/ai/wiki");
});
};
let run_snap_block = function (labelSpec) { // add parameters later
// runs a Snap! block that matches labelSpec
// labelSpec if it takes areguments will look something like 'label %txt of size %n'
var ide = get_snap_ide(ecraft2learn.snap_context);
// based upon https://github.com/jmoenig/Snap--Build-Your-Own-Blocks/issues/1791#issuecomment-313529328
var allBlocks = ide.sprites.asArray().concat([ide.stage])
.map(function (item) {return item.customBlocks})
.reduce(function (a, b) {return a.concat(b)})
.concat(ide.stage.globalBlocks);
var blockSpecs = allBlocks.map(function (block) {return block.blockSpec()});
var index = blockSpecs.indexOf(labelSpec);
if (index < 0) {
return;
}
var blockTemplate = allBlocks[index].templateInstance();
return invoke_block_morph(blockTemplate);
};
let get_snap_ide = function (start) {
// finds the Snap! IDE_Morph that is the element 'start' or one of its ancestors
if (!inside_snap()) {
return;
}
let ide = start;
while (ide && !(ide instanceof IDE_Morph)) {
ide = ide.parent;
}
if (!ide) {
// not as general but works well (for now)
return world.children[0];
}
return ide;
};
const enhance_snap_openProject = function () {
if (!inside_snap()) {
return;
}
if (typeof world.Arduino === 'function') {
return; // inside Snap4Arduino
}
let original_open_project = SnapSerializer.prototype.openProject;
SnapSerializer.prototype.openProject = function (project, ide) {
if (ecraft2learn.snap_project_opened && window.parent === window) {
// already been opened and not inside an iframe
// problem with the following is that some ecraft2learn functions have
// closed over outer variables and also some window's listeners have been added
// ecraft2learn.initialise_all();
// find the URL where this library lives and reload it
let this_url = document.querySelector('script[src*="ecraft2learn.js"]').src;
ecraft2learn = undefined;
load_script(this_url,
function () {
original_open_project(project, ide);
ecraft2learn.snap_project_opened = true;
});
} else {
original_open_project(project, ide);
ecraft2learn.snap_project_opened = true;
}
};
};
const stop_all_scripts = function () {
if (window.speechSynthesis) {
window.speechSynthesis.cancel(); // should stop all utterances
}
if (ecraft2learn.stop_speech_recognition) {
ecraft2learn.stop_speech_recognition();
}
if (ecraft2learn.support_window) {
Object.values(ecraft2learn.support_window).forEach(function (support_window) {
support_window.postMessage('stop', '*');
});
}
ecraft2learn.outstanding_callbacks.forEach(function (callback) {
callback.stopped_by_user = true;
});
ecraft2learn.outstanding_callbacks = []; // removes all outstanding callbacks
};
const track_whether_snap_is_stopped = function () {
if (!inside_snap()) {
return;
}
var ide = get_snap_ide();
var original_stopAllScripts = ide.stopAllScripts.bind(ide);
ide.stopAllScripts = function () {
stop_all_scripts();
original_stopAllScripts();
};
};
if (document.body) {
track_whether_snap_is_stopped();
enhance_snap_openProject();
} else {
window.addEventListener('load', track_whether_snap_is_stopped, false);
window.addEventListener('load', enhance_snap_openProject, false);
}
let get_global_variable_value = function (name, default_value) {
// returns the value of the Snap! global variable named 'name'
// if none exists returns default_value
var ide = get_snap_ide(ecraft2learn.snap_context);
var value;
try {
value = ide.globalVariables.getVar(name);
} catch (e) {
return default_value;
}
if (value === undefined) {
return default_value;
}
if (typeof value === 'string') {
return value;
}
return value.contents;
};
const record_callbacks = function () {
Array.from(arguments).forEach(function (callback) {
if (callback && inside_snap() && callback instanceof Context) {
ecraft2learn.outstanding_callbacks.push(callback);
}
});
};
let invoke_callback = function (callback) { // any number of additional arguments
// callback could either be a Snap! object or a JavaScript function
if (inside_snap() && inside_snap() && callback instanceof Context) { // assume Snap! callback
if (callback.stopped_by_user) {
return;
}
if (!(callback.expression instanceof CommandBlockMorph ||
callback.expression instanceof ReporterBlockMorph)) {
return;
}
// invoke the callback with the argments (other than the callback itself)
// if BlockMorph then needs a receiver -- apparently callback is good enough
// return invoke(callback, new List(Array.prototype.slice.call(arguments, 1)), (callback instanceof BlockMorph && callback));
var stage = world.children[0].stage; // this.parentThatIsA(StageMorph);
// var process = stage.threads.startProcess(callback.expression,
// callback.receiver,
// stage.isThreadSafe,
// true,
// function (result) {
// console.log(result);
// },
// false,
// false);
var process = new Process(null, callback.receiver, null, true);
// callback.emptySlots+1 is in case callback is passed more arguments than callback has empty slots
let parameters = callback.emptySlots > 0 ?
Array.prototype.slice.call(arguments, 1, callback.emptySlots+1) :
Array.prototype.slice.call(arguments, 1);
process.initializeFor(callback, new List(parameters));
stage.threads.processes.push(process);
return process;
} else if (typeof callback === 'function') { // assume JavaScript callback
callback.apply(this, Array.prototype.slice.call(arguments, 1));
}
// otherwise no callback provided so ignore it
};
let invoke_block_morph = function (block_morph) {
if (!(block_morph instanceof BlockMorph)) {
console.error("Invoke_block_morph called on non-BlockMorph");
return;
}
return invoke(block_morph, new List(Array.prototype.slice.call(arguments, 1)), block_morph);
};
let is_callback = function (x) {
return (inside_snap() && x instanceof Context) || typeof x === 'function';
};
let javascript_to_snap = function (x) {
if (!inside_snap()) {
return x;
}
if (Array.isArray(x) || x instanceof Float32Array) {
return new List(x.map(javascript_to_snap));
}
if (typeof x === 'object') {
if (x instanceof List) {
return x;
}
if (x === null) {
return []; // is the best we can do?
}
return new List(Object.keys(x).map(function (key) {
return new List([key, javascript_to_snap(x[key])]);
}));
}
return x;
};
const snap_to_javascript = (x, only_numbers) => {
const numberify = function (x) {
if (typeof x === 'string') {
return +x;
}
return x;
}
if (x instanceof List) {
x = snap_to_javascript(x.asArray(), only_numbers);
} else if (x instanceof Array) {
x = x.map((y) => snap_to_javascript(y, only_numbers));
}
if (only_numbers) {
return numberify(x);
}
return x;
};
let add_photo_to_canvas = function (image_or_video, new_width, new_height, mirrored) {
// Capture a photo by fetching the current contents of the video
// and drawing it into a canvas, then converting that to a PNG
// format data URL. By drawing it on an offscreen canvas and then
// drawing that to the screen, we can change its size and/or apply
// other changes before drawing it.
let width, height;
if (!image_or_video) {
image_or_video = ecraft2learn.video;
}
if (new_width) {
width = new_width;
} else {
width = image_or_video.width;
}
if (new_height) {
height = new_height;
} else {
height = image_or_video.height;
}
let canvas = document.createElement('canvas');
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
const draw_on_canvas = () => {
if (mirrored) {
canvas.getContext('2d').translate(width, 0);
canvas.getContext('2d').scale(-1, 1);
}
// drawImage(image, sx, sy, sWidth, sHeight, dx, dy, dWidth, dHeight);
if (new_width && new_height) {
canvas.getContext('2d').drawImage(image_or_video, 0, 0, image_or_video.width, image_or_video.height, 0, 0, new_width, new_height);
} else {
canvas.getContext('2d').drawImage(image_or_video, 0, 0);
}
}
let draw_image = function () {
// is this still used? Only if camera is being used before it has been initialised?
draw_on_canvas();
ecraft2learn.video.removeEventListener('waiting', draw_image);
};
ecraft2learn.video.addEventListener('waiting', draw_image);
draw_on_canvas();
return canvas;
};
let get_mary_tts_voice = function (voice_number) { // official name
return get_voice_from(voice_number, mary_tts_voices.map(function (voice) { return voice[0]; }));
};
var get_voice = function (voice_number) {
return get_voice_from(voice_number, window.speechSynthesis.getVoices());
};
var warned_about_missing_voice_numbers = [];
var get_voice_from = function (voice_number, voices) {
if (voices.length === 0) {
inform("No voices",
"This browser has no voices available.\n" +
"Either try a different browser or try using the MARY TTS instead.");
return;
}
voice_number = +voice_number; // convert to nunber if is a string
if (typeof voice_number === 'number' && !isNaN(voice_number)) {
voice_number--; // Snap (and Scratch) use 1-indexing so convert here
if (voice_number === -1) {
voice_number = 0;
if (ecraft2learn.default_language) {
mary_tts_voices.some(function (voice, index) {
if (voice[2].indexOf("-") >= 0) {
// language and dialect specified
if (voice[2] === ecraft2learn.default_language) {
voice_number = index;
return true;
}
} else {
if (voice[2] === ecraft2learn.default_language.substring(0, 2)) {
voice_number = index;
return true;
}
}
});
}
}
if (voice_number >= 0 && voice_number < voices.length) {
return voices[Math.floor(voice_number)];
} else if (warned_about_missing_voice_numbers.indexOf(voice_number) < 0) {
warned_about_missing_voice_numbers.push(voice_number);
inform("No such voice",
"Only voice numbers between 1 and " + voices.length + " are available.\n" +
"There is no voice number " + (voice_number+1) + ".");
}
}
};
var check_for_voices = function (no_voices_callback, voices_callback) {
if (window.speechSynthesis.getVoices().length === 0) {
// either there are no voices or they haven't loaded yet
if (ecraft2learn.waited_for_voices) {
invoke_callback(no_voices_callback);
} else {
// voices not loaded so wait for them and try again
var onvoiceschanged_ran = false; // so both onvoiceschanged_ran and timeout don't both run
window.speechSynthesis.onvoiceschanged = function () {
onvoiceschanged_ran = true;
ecraft2learn.waited_for_voices = true;
check_for_voices(no_voices_callback, voices_callback);
window.speechSynthesis.onvoiceschanged = undefined;
};
// but don't wait forever because there might not be any
setTimeout(function () {
if (!onvoiceschanged_ran) {
// only if onvoiceschanged didn't run
ecraft2learn.waited_for_voices = true;
invoke_callback(no_voices_callback);
window.speechSynthesis.onvoiceschanged = undefined;
}
},
10000);
return;
}
} else {
invoke_callback(voices_callback);
}
};
var get_matching_voice = function (builtin_voices, name_parts, default_voice_number) {
var voices = builtin_voices ?
window.speechSynthesis.getVoices().map(function (voice) { return voice.name.toLowerCase(); }) :
mary_tts_voices.map(function (voice) { return voice[1].toLowerCase(); });
var voice_number;
if (!Array.isArray(name_parts) && typeof name_parts !== 'string') {
// convert from a Snap list to a JavaScript array
name_parts = name_parts.contents;
}
name_parts = name_parts.map(function (part) {
return part.toLowerCase();
});
var name_parts_double_white_space = name_parts.map(function (part) {
return " " + part + " ";
});
var name_parts_left_white_space = name_parts.map(function (part) {
return " " + part;
});
var name_parts_right_white_space = name_parts.map(function (part) {
return part + " ";
});
var name_matches = function (name, parts) {
return parts.every(function (part) {
return name.indexOf(part) >= 0;
});
};
[name_parts_double_white_space, name_parts_left_white_space, name_parts_right_white_space, name_parts].some(
// prefer matches with white space
// so that "male" doesn't match "female" unless no other choice
function (parts) {
voices.some(function (voice_name, index) {
if (name_matches(voice_name, parts)) {
voice_number = index+1; // using 1-indexing
return true;
}
});
return voice_number > 0;
});
if (voice_number >= 0) {
return voice_number;
}
// no match so try using just the first argument to find a matching language entry
var matching_language_entry = language_entry(name_parts[0]);
if (matching_language_entry) {
voice_number = voice_number_of_language_code(matching_language_entry[1], builtin_voices);
}
if (voice_number >= 0) {
return voice_number;
}
if (ecraft2learn.language_defaults[name_parts[0]]) {
// try again since the defaults don't necessaryily match the list of languages
// e.g. zh-CN is not the same as cmn-Hans-CN
voice_number = voice_number_of_language_code(ecraft2learn.language_defaults[name_parts[0]], builtin_voices);
}
if (voice_number >= 0) {
return voice_number;
}
if (typeof default_voice_number === 'undefined') {
inform("Unable to find a matching voice",
"This browser does not have a voice that matches " + name_parts.join("-"));
default_voice_number = 0;
}
return default_voice_number; // interpreted as the default voice for the default_language
};
var voice_number_of_language_code = function (code, builtin_voices) {
if (builtin_voices) {
return builtin_voice_number_with_language_code(code);
}
return mary_tts_voice_number_with_language_code(code);
};
var speak = function (message, pitch, rate, voice_number, volume, language, finished_callback) {
// speaks 'message' optionally with the specified pitch, rate, voice, volume, and language
// finished_callback is called with the spoken text
// see https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance
var maximum_length = 200; // not sure what a good value is but long text isn't spoken in some browsers
var break_into_short_segments = function (text) {
var segments = [];
var break_text = function (text) {
var segment, index;
if (text.length < maximum_length) {
return text.length+1;
}
segment = text.substring(0, maximum_length);
index = segment.lastIndexOf(". ") || segment.lastIndexOf(".\n");
if (index > 0) {
return index+2;
}
index = segment.lastIndexOf(".");
if (index === segment.length-1) {
// final period need not have space after it
return index+1;
}
index = segment.lastIndexOf(", ");
if (index > 0) {
return index+2;
}
index = segment.lastIndexOf(" ");
if (index > 0) {
return index+1;
}
// give up - no periods, commas, or spaces
return Math.min(text.length+1, maximum_length);
};
var best_break;
while (text.length > 0) {
best_break = break_text(text);
if (best_break > 1) {
segments.push(text.substring(0, best_break-1));
}
text = text.substring(best_break);
}
return segments;
};
var segments, speech_utterance_index;
record_callbacks(finished_callback);
if (message.length > maximum_length) {
segments = break_into_short_segments(message);
segments.forEach(function (segment, index) {
// finished_callback is only for the last segment
var callback = index === segments.length-1 &&
finished_callback &&
function () {
invoke_callback(finished_callback, message); // entire message not just the segments
};
ecraft2learn.speak(segment, pitch, rate, voice_number, volume, language, callback);
});
return;
}
// else is less than the maximum_length
var utterance = new SpeechSynthesisUtterance(message);
ecraft2learn.utterance = utterance; // without this utterance may be garbage collected before onend can run
if (typeof language === 'string' && language !== "") {
utterance.lang = language;
if (!voice_number) {
voice_number = get_matching_voice(true, [language]);
if (voice_number === undefined) {
voice_number = 0;
}
}
} else if (ecraft2learn.default_language) {
utterance.lang = ecraft2learn.default_language;
}
pitch = +pitch; // if string try convering to a number
if (typeof pitch === 'number' && pitch > 0) {
utterance.pitch = pitch;
}
rate = +rate;
if (typeof rate === 'number' && rate > 0) {
if (rate < .1) {
// A very slow rate breaks Chrome's speech synthesiser
rate = .1;
}
if (rate > 2) {
rate = 2; // high rate also breaks Chrome's speech synthesis
}
utterance.rate = rate;
}
if (!voice_number && ecraft2learn.default_language) {
let voices = window.speechSynthesis.getVoices();
voices.some(function (voice, index) {
if (voice.lang === ecraft2learn.default_language) {
voice_number = index+1; // 1-indexing
return true;
}
});
}
utterance.voice = get_voice(voice_number);
volume = +volume;
if (typeof volume && volume > 0) {
utterance.volume = volume;
}
utterance.onend = function (event) {
ecraft2learn.speaking_ongoing = false;
invoke_callback(finished_callback, message);
};
ecraft2learn.speaking_ongoing = true;
window.speechSynthesis.speak(utterance);
};
var no_voices_alert = function () {
if (!ecraft2learn.no_voices_alert_given) {
ecraft2learn.no_voices_alert_given = true;
inform("No voices available",
"This browser has no voices available.\n" +
"Either try a different browser or try using the MARY TTS instead.");
}
};
const read_file = function (file, callback) {
let reader = new FileReader();
reader.onloadend = function () {
invoke_callback(callback, reader.result);
};
reader.readAsText(file);
};
const file_to_string = function (callback) {
let input = document.createElement('input');
input.type = 'file';
input.onchange = function () {
document.getElementById("world").style.display = 'block';
input_container.remove();
read_file(input.files[0], callback);
}
document.getElementById("world").style.display = 'none';
let input_container = document.createElement('div');
let instructions = document.createElement('p');
instructions.innerHTML = "<b> Click the file chooser and select a saved training file.</b> It should be a JSON file.";
input_container.appendChild(instructions);
input_container.appendChild(input);
document.body.appendChild(input_container);
};
const load_transfer_training_from_file = (source_name, callback) => {
file_to_string((training_data_as_string) => {
load_transfer_training(source_name, training_data_as_string, callback);
});
};
const load_transfer_training_from_URL = function(source_name, URL, user_callback) {
let error_callback = function (message) {
inform("Error reading " + URL, message);
};
let callback = function (training_data_as_string) {
load_transfer_training(source_name, training_data_as_string, user_callback);
}
ecraft2learn.read_url(URL, callback, error_callback);
};
const create_costume = function (canvas, name) {
if (!name) {
name = "photo " + Date.now(); // needs to be unique
}
return new Costume(canvas, name);
};
const add_costume = function (costume, sprite) {
var ide = get_snap_ide();
if (!sprite) {
sprite = ide.stage;
}
sprite.addCostume(costume);
sprite.wearCostume(costume);
ide.hasChangedMedia = true;
};
const post_image = function post_image(image, cloud_provider, callback, error_callback) {
// based upon https://developer.mozilla.org/en-US/docs/Web/Guide/HTML/Forms/Sending_forms_through_JavaScript
cloud_provider = cloud_provider.trim();
if (cloud_provider === 'Watson') {
cloud_provider = 'IBM Watson';
}
let key = get_key(cloud_provider + " image key");
let formData;
if (!key) {
callback("No key provided so unable to ask " + cloud_provider + " to analyse an image.");
return;
}
let XHR = new XMLHttpRequest();
XHR.addEventListener('load', function(event) {
show_message(""); // remove loading message
callback(event);
});
if (!error_callback) {
error_callback = function (event) {
console.error(event);
}
}
XHR.addEventListener('error', function (event) {
show_message(""); // remove loading message
error_callback(event);
});
show_message("Contacting " + cloud_provider);
switch (cloud_provider) {
case "IBM Watson":
formData = new FormData();
formData.append("images_file", image, "blob.png");
// beginning early December 2017 Watson began signalling No 'Access-Control-Allow-Origin' header
// Note that "Lite" plans are deleted after 30 days of inactivity...
// var proxy_url = "https://toontalk.appspot.com/p/" +
// encodeURIComponent("https://gateway-a.watsonplatform.net/visual-recognition/api/v3/classify?version=2016-05-19&api_key=" + key);
// XHR.open('POST', proxy_url);
XHR.open('POST', "https://apikey:" + key + "gateway.watsonplatform.net/visual-recognition/api/v3/classify?version=2018-03-19");
XHR.send(formData);
break;
case "Google":
XHR.open('POST', "https://vision.googleapis.com/v1/images:annotate?key=" + key);
XHR.setRequestHeader('Content-Type', 'application/json; charset=UTF-8');
XHR.send(JSON.stringify({"requests":[{"image":{"content": image.substring("data:image/png;base64,".length)},
"features":[{"type": "LABEL_DETECTION", "maxResults":32},
{"type": "TEXT_DETECTION", "maxResults":32},
{"type": "FACE_DETECTION", "maxResults":32},
{"type": "IMAGE_PROPERTIES", "maxResults":32}
]}]
}));
break;
case "Microsoft":
// see https://social.msdn.microsoft.com/Forums/en-US/807ee18d-45e5-410b-a339-c8dcb3bfa25b/testing-project-oxford-ocr-how-to-use-a-local-file-in-base64-for-example?forum=mlapi
XHR.open('POST', "https://westeurope.api.cognitive.microsoft.com/vision/v1.0/analyze?visualFeatures=Description,Tags,Faces,Color,Categories&subscription-key=" + key);
XHR.setRequestHeader('Content-Type', 'application/octet-stream');
XHR.send(image);
break;
}
};
const machine_learning_browser_warning = function () {
if (window.navigator.userAgent.indexOf("Chrome") < 0) {
inform("Possible browser compatibility problem",
"Machine learning has been tested in Chrome. If you encounter problems switch to Chrome.");
} else if (window.navigator.userAgent.indexOf("arm") >= 0 &&
window.navigator.userAgent.indexOf("X11") >= 0) {
inform("Possible Raspberry Pi problem",
"You may find that the Raspberry Pi is too slow for machine learning to work well.");
}
};
let load_transfer_training = (source_name, training_data, callback) => {
record_callbacks(callback);
let source;
const training_heading = '{"saved_' + source_name + '_training":';
if (training_data.slice(0, training_heading.length) === training_heading) {
source = 'training using ' + source_name;
} else {
inform("Error loading " + source_name + " training", "Unrecognised saved training");
return;
}
let new_window = !ecraft2learn.support_window[source] || ecraft2learn.support_window[source].closed;
if (new_window) {
create_machine_learning_window(source, undefined, undefined, undefined, true);
} else {
ecraft2learn.support_window[source].postMessage({training_data: training_data}, "*");
}
let receive_messages_from_iframe =
function (event) {
if (event.data === "Loaded") {
ecraft2learn.support_window[source].postMessage({training_data: training_data}, "*");
} else if (typeof event.data.data_set_loaded !== 'undefined') {
ecraft2learn.training_buckets[source] = event.data.data_set_loaded;
invoke_callback(callback, "Ready");
window.removeEventListener('message', receive_messages_from_iframe);
}
};
window.addEventListener('message', receive_messages_from_iframe, false);
};
let train = function (options) {
// options can be
let source = options.source; // can be 'training using camera','training using microphone', "posenet", or more
let buckets_as_snap_list = options.buckets_as_snap_list; // list of labels (as Snap! object)
let add_to_previous_training = options.add_to_previous_training; // if false will throw away any current training
let page_introduction = options.page_introduction; // optional HTML that will appear in place of the default on training page
let callback = options.callback; // if defined will be called when training finished
let together = options.together; // if true enable togetherJS collaboration
let together_url = options.together_url; // another Snap! (or NetsBlox) wants to collaborate using this URL
let iframe_in_new_tab = options.iframe_in_new_tab; // if not true then iframe is either full size covering up Snap! or a single pixel
let training_name = options.training_name; // used by audio training
let buckets = buckets_as_snap_list.contents;
if (source === 'training using microphone' && buckets.indexOf('_background_noise_') < 0) {
buckets.push('_background_noise_');
}
let buckets_equal = function (buckets1, buckets2) {
if (!buckets1 || !buckets2) {
return false;
}
return buckets1 === buckets2 ||
(buckets1.length === buckets2.length &&
buckets1.every(function (bucket_name, index) {
return bucket_name === buckets2[index];
}));
};
record_callbacks(callback);
if (!ecraft2learn.support_window[source] || ecraft2learn.support_window[source].closed) {
let machine_learning_window = create_machine_learning_window(source);
ecraft2learn.training_buckets[source] = buckets;
let receive_messages_from_iframe =
function (event) {
if (event.data === "Loaded") {
machine_learning_window.postMessage({training_class_names: buckets,
training_name: training_name},
"*");
} else if (event.data === "Ready") {
if (page_introduction) {
machine_learning_window.postMessage({new_introduction: page_introduction}, "*");
}
invoke_callback(callback, "Ready");
}
};
window.addEventListener('message', receive_messages_from_iframe, false);
return;
}
if (add_to_previous_training &&
// either the same bucket labels or the previous one was empty
(ecraft2learn.training_buckets[source] && ecraft2learn.training_buckets[source].length === 0 ||
buckets_equal(buckets, ecraft2learn.training_buckets[source]))) {
if (ecraft2learn.support_iframe[source]) {
if (ecraft2learn.training_buckets[source].length === 0) {
ecraft2learn.training_buckets[source] = buckets;
ecraft2learn.support_window[source].postMessage({training_class_names: buckets,
training_name: training_name},
"*");
if (page_introduction) {
ecraft2learn.support_window[source].postMessage({new_introduction: page_introduction}, "*");
}
}
open_support_window(source);
} else if (iframe_in_new_tab) {
// would like to go to that window: ecraft2learn.support_window.focus[source]();
// but browsers don't allow it unless clear the user initiated it
inform("Training tab ready",
"Go to the training window whenever you want to add to the training.");
}
ecraft2learn.support_window[source].postMessage('restart', '*');
invoke_callback(callback, "Ready");
} else {
if (iframe_in_new_tab) {
ecraft2learn.support_window[source].close();
}
if (ecraft2learn.support_iframe[source]) {
ecraft2learn.support_iframe[source].remove();
}
ecraft2learn.support_window[source] = undefined;
// start over
train(options);
}
};
const open_support_window = function (source) {
if (!ecraft2learn.support_window[source] || ecraft2learn.support_window[source].closed) {
create_machine_learning_window(source);
}
ecraft2learn.support_iframe[source].style.width = "100%";
ecraft2learn.support_iframe[source].style.height = "100%";
ecraft2learn.support_window[source].postMessage('Show support iframe', '*');
};
const create_machine_learning_window = function (source, iframe_in_new_tab, together_url, together, one_pixel_iframe) {
let URL, support_window;
source = source.trim(); // ignore white spaces on ends
if (together_url) {
URL = together_url;
} else {
if (source === 'training using camera') {
URL = "/camera-train/index.html?translate=1";
if (together) {
URL += "&together=1";
}
} else if (source === 'training using microphone') {
URL = "/microphone-train/index.html?translate=1";
} else if (source === 'training using microphone (old version)') {
URL = "/microphone-train/index-old.html?translate=1";
} else if (source === 'posenet') {
URL = "/posenet/index.html?translate=1";
} else if (source === 'style transfer') {
URL = "/style-transfer/index.html";
} else if (source === 'image classifier') {
URL = "/mobilenet/index.html";
} else if (source === 'tensorflow.js') {
URL = "/tensorflow/index.html";
}
if (window.location.hostname === "localhost" || window.location.protocol === 'file') {
URL = ".." + URL;
} else {
URL = "https://ecraft2learn.github.io/ai" + URL;
}
}
if (iframe_in_new_tab) {
// deprecated -- only works for source === 'training using camera'
machine_learning_browser_warning();
support_window = window.open(URL, "Training " + buckets);
window.addEventListener('unload',
function () {
support_window.close();
});
} else {
let iframe = document.createElement('iframe');
document.body.appendChild(iframe);
iframe.src = URL;
if (one_pixel_iframe) {
iframe.style.width = '1px';
iframe.style.height = '1px';
} else {
iframe.style.width = '100%';
iframe.style.height = '100%';
}
iframe.style.border = 0;
iframe.style.position = 'absolute';
iframe.style.backgroundColor = 'white';
// see https://sites.google.com/a/chromium.org/dev/Home/chromium-security/deprecating-permissions-in-cross-origin-iframes
if (source === 'training using microphone') {
iframe.allow = "microphone";
} else if (source === 'training using camera' || source === 'classify image' || source === 'posenet') {
iframe.allow = "camera";
}
ecraft2learn.support_iframe[source] = iframe;
support_window = iframe.contentWindow;
}
ecraft2learn.support_window[source] = support_window;
window.addEventListener(
'message',
function (event) {
if (event.data === 'Hide support iframe') {
if (typeof ecraft2learn.support_iframe[source] !== 'undefined') {
ecraft2learn.support_iframe[source].style.width = "1px";
ecraft2learn.support_iframe[source].style.height = "1px";
}
} else if (event.data === "Ready" && typeof ecraft2learn.support_window_is_ready !== 'undefined') {
ecraft2learn.support_window_is_ready[source] = true;
} else if (typeof event.data.show_message !== 'undefined') {
show_message(event.data.show_message, event.data.duration);
} else if (typeof event.data.error !== 'undefined') {
inform("Error message received from a support window", event.data.error);
}
},
false);
return support_window;
};
const open_posenet_window = function () {
machine_learning_browser_warning();
return open_support_window('posenet');
};
const machine_learning_window_request = function (machine_learning_window,
message_maker,
training_image_width,
training_image_height,
image,
alert_message){
if (!machine_learning_window) {
if (alert_message) {
inform("Training request warning", alert_message);
}
return;
}
let post_image = function () {
let canvas = add_photo_to_canvas(image || ecraft2learn.video,
training_image_width,
training_image_height);
let image_URL = canvas.toDataURL('image/png');
machine_learning_window.postMessage(message_maker(image_URL), "*");
}
if (ecraft2learn.video) {
post_image();
} else {
// better to use 640x480 and then scale it down before sending it off to the training tab
ecraft2learn.setup_camera(640, 480, post_image);
}
};
const posenet_window_request =
function (message_maker, training_image_width, training_image_height, image, alert_message) {
// if image is undefined then the video element is used
// if alert_message is undefined no message is displayed if the posenet window hasn't been created
machine_learning_window_request(ecraft2learn.support_window['posenet'], message_maker, training_image_width, training_image_height, image, alert_message);
};
const support_window_request =
function (alert_message, message_maker, training_image_width, training_image_height, image) {
machine_learning_window_request(ecraft2learn.support_window['training using camera'], message_maker, training_image_width, training_image_height, image, alert_message);
};
const TRAINING_IMAGE_WIDTH = 300;
const TRAINING_IMAGE_HEIGHT = 250;
var get_costumes = function (sprite) {
if (!sprite) {
alert("get_costumes called without specifying which sprite");
return;
}
return sprite.costumes.contents;
};
var costume_of_sprite = function (costume_number, sprite) {
var costumes = get_costumes(sprite);
if (costume_number < 0 || costume_number > costumes.length) {
inform("Invalid costume number",
"Cannot add costume number " + costume_number +
" to training bucket.\n" +
"Only numbers between 1 and " +
costumes.length + " are permitted.");
return;
}
return costumes[costume_number-1]; // 1-indexing to zero-indexing
};
const create_costume_with_style = function(style, costume, callback) {
// adds a costume to the sprite by applying the style of the sprite's costume number
// callback if provided will be called after this completes
// style can be any of the following
if (not_a_costume(costume, 'create costume in style', callback)) {
return;
}
let style_to_folder_name = {
"Katsushika Hokusai's Wave": 'wave',
"Francis Picabia's Udnie": 'udnie',
"Pablo Picasso's La Muse": 'la_muse',
"Mathura Style": 'mathura',
"Leonid Afremov's Rain Princess": 'rain_princess',
"Edvard Munch's Scream": 'scream',
"Théodore Géricault's Raft of the Medusa": 'wreck',
"Matilde Pérez": 'matilde_perez', // this one doesn't work well so isn't in the menu of styles
"Roberto Matta": 'matta',
};
let time_stamp = Date.now();
let costume_canvas = costume.contents;
request_of_support_window('style transfer',
'Ready',
() => {
return {style_transfer_request: {URL: costume_canvas.toDataURL(),
style: style_to_folder_name[style.trim()],
time_stamp: time_stamp}};
},
(message) => {
return typeof message.style_transfer_response !== 'undefined' &&
// reponse received and it is for the same request (time stamps match)
message.style_transfer_response.time_stamp === time_stamp;
},
(message) => {
// support window has responded with a data URL
// need to create a canvas and draw the image on it
let new_canvas = document.createElement('canvas');