-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.js
More file actions
95 lines (77 loc) · 3.11 KB
/
app.js
File metadata and controls
95 lines (77 loc) · 3.11 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
const createError = require('http-errors');
const express = require('express');
const path = require('path');
const cookieParser = require('cookie-parser');
const logger = require('morgan');
const expressLayouts = require('express-ejs-layouts');
require("dotenv").config();
const fs = require('fs');
// Note: The Azure Cognitive Services SDK is written in ECMAScript. This webapp is written in Commons JS, hence, we'll use a dynamic import to handle the SDK, and it's associated logic.
import("microsoft-cognitiveservices-speech-sdk").then(
(sdk) => {
const speechConfig = sdk.SpeechConfig.fromSubscription(process.env.SPEECH_KEY, process.env.SPEECH_REGION);
speechConfig.speechRecognitionLanguage = "en-US";
function fromFile(fileName) {
let audioConfig = sdk.AudioConfig.fromWavFileInput(fs.readFileSync(fileName));
let speechRecogniser = new sdk.SpeechRecognizer(speechConfig, audioConfig);
speechRecogniser.recognizeOnceAsync(result => {
switch (result.reason) {
case sdk.ResultReason.RecognizedSpeech:
console.log(`RECOGNISED: Text=${result.text}`);
break;
case sdk.ResultReason.NoMatch:
console.log("NOMATCH: Speech could not be recongised.");
break;
case sdk.ResultReason.Canceled:
const cancellation = sdk.CancellationDetails.fromResult(result);
console.log(`CANCELED: Reason=${cancellation.reason}`);
if (cancellation.reason === sdk.CancellationReason.Error) {
console.log(`CANCELED: ErrorCode=${cancellation.ErrorCode}`);
console.log(`CANCELED: ErrorDetails=${cancellation.errorDetails}`)
console.log("CANCELED: Did you set up the speech resource key and region values?");
}
break;
}
speechRecogniser.close();
});
}
fromFile();
}).catch((error) => {
console.log("Failed to import Microsoft Speech SDK", error);
});
// Initialise express app using the variable app
const app = express();
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'ejs');
// Middleware
// Standard middleware
app.use(expressLayouts);
app.use(logger('dev'));
app.use(express.json());
app.use(express.urlencoded({extended: true}))
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
// Routes are located at userRoutes.js
const userRoutes = require('./routes/userRoutes');
app.use('/', userRoutes);
// Error handling Middleware
// Catch 404 and forward to error handler
app.use(function(req, res, next) {
next(createError(404));
});
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
});
// Declare port number
const PORT = process.env.PORT || 5000;
app.listen(PORT, () => {
console.log(`Server started on port ${PORT}`);
})
module.exports = app;