-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathjulius.js
122 lines (101 loc) · 3.89 KB
/
julius.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
(function(window, navigator, undefined) {
var postBuffer = function() {
var that = this;
return function(e) {
var buffer = e.inputBuffer.getChannelData(0);
if (that.audio._transfer) {
var out = e.outputBuffer.getChannelData(0);
for (var i = 0; i < 4096; i++) {
out[i] = buffer[i];
}
}
// Transfer audio to the recognizer
that.recognizer.postMessage(buffer);
};
};
var initializeAudio = function(audio) {
audio.context = new (window.AudioContext || window.webkitAudioContext)();
audio.processor = audio.context.createScriptProcessor(4096, 1, 1);
};
var bootstrap = function(pathToDfa, pathToDict, options) {
var audio = this.audio;
var recognizer = this.recognizer;
var terminate = this.terminate;
// Compatibility
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
navigator.getUserMedia(
{ audio: true },
function(stream) {
audio.source = audio.context.createMediaStreamSource(stream);
audio.source.connect(audio.processor);
audio.processor.connect(audio.context.destination);
// Bootstrap the recognizer
recognizer.postMessage({
type: 'begin',
pathToDfa: pathToDfa,
pathToDict: pathToDict,
options: options
});
},
function(err) {
terminate();
console.error('JuliusJS failed: could not capture microphone input.');
}
);
};
var Julius = function(pathToDfa, pathToDict, options) {
var that = this;
options = options || {};
// The context's nodemap: `source` -> `processor` -> `destination`
this.audio = {
// `AudioContext`
context: null,
// `AudioSourceNode` from captured microphone input
source: null,
// `ScriptProcessorNode` for julius
processor: null,
_transfer: options.transfer
};
// Do not pollute the object
delete options.transfer;
// _Recognition is offloaded to a separate thread to avoid slowing UI_
this.recognizer = new Worker(options.pathToWorker || 'worker.js');
this.recognizer.onmessage = function(e) {
if (e.data.type === 'begin') {
that.audio.processor.onaudioprocess = postBuffer.call(that);
} else if (e.data.type === 'recog') {
if (e.data.firstpass) {
typeof that.onfirstpass === 'function' &&
that.onfirstpass(e.data.sentence, e.data.score);
} else
typeof that.onrecognition === 'function' &&
that.onrecognition(e.data.sentence);
} else if (e.data.type === 'log') {
typeof that.onlog === 'function' &&
that.onlog(e.data.sentence);
} else if (e.data.type === 'error') {
console.error(e.data.error);
that.terminate();
} else {
console.info('Unexpected data received from julius:');
console.info(e.data);
}
};
initializeAudio(this.audio);
bootstrap.call(this, pathToDfa, pathToDict, options);
};
Julius.prototype.onfirstpass = function(sentence) { /* noop */ };
Julius.prototype.onrecognition = function(sentence, score) { /* noop */ };
Julius.prototype.onlog = function(obj) { console.log(obj); };
Julius.prototype.onfail = function() { /* noop */ };
Julius.prototype.terminate = function(cb) {
this.audio.processor.onaudioprocess = null;
this.recognizer.terminate();
console.error('JuliusJS was terminated.');
typeof this.onfail === 'function' && this.onfail();
};
window.Julius = Julius;
}(window,window.navigator) );