diff --git a/Dockerfile b/Dockerfile index 148be3ae..8ba307dc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,7 +6,7 @@ RUN npm ci COPY . . ARG NODE_ENV ARG MONGO_PROD_DB -RUN npm run seed +# RUN npm run seed RUN npm run build RUN npm prune --omit=dev EXPOSE 4000 diff --git a/src/resolvers/userResolver.ts b/src/resolvers/userResolver.ts index 35096d11..a0fb770e 100644 --- a/src/resolvers/userResolver.ts +++ b/src/resolvers/userResolver.ts @@ -223,13 +223,13 @@ const resolvers: any = { code: 'ValidationError', }, }) - let invitee; + let invitee const invitation = await Invitation.findOne({ 'invitees.email': email }) .sort({ createdAt: -1 }) - .exec(); - + .exec() + if (invitation) { - invitee = invitation.invitees.find(invitee => invitee.email === email); + invitee = invitation.invitees.find((invitee) => invitee.email === email) } const user = await User.create({ role: role || invitee?.role || 'user', diff --git a/src/utils/extractFileData.ts b/src/utils/extractFileData.ts index c0100003..9e1e8ec5 100644 --- a/src/utils/extractFileData.ts +++ b/src/utils/extractFileData.ts @@ -1,50 +1,55 @@ -import { ReadStream } from 'fs'; -import { GraphQLError } from 'graphql'; -import * as xlsx from 'xlsx'; +import { ReadStream } from 'fs' +import { GraphQLError } from 'graphql' +import * as xlsx from 'xlsx' -const EmailPattern: RegExp = /^[^\s@]+@[^\s@]+\.[^\s@]+$/; +const EmailPattern = /^[^\s@]+@[^\s@]+\.[^\s@]+$/ -type Role = 'trainee' | 'admin' | 'ttl' | 'coordinator'; +type Role = 'trainee' | 'admin' | 'ttl' | 'coordinator' export async function extractFileData(file: any) { try { - const { createReadStream, filename } = await file; - const stream: ReadStream = createReadStream(); + const { createReadStream, filename } = await file + const stream: ReadStream = createReadStream() const buffer = await new Promise((resolve, reject) => { - const chunks: any[] = []; - stream.on('data', (chunk) => chunks.push(chunk)); - stream.on('end', () => resolve(Buffer.concat(chunks))); - stream.on('error', reject); - }); + const chunks: any[] = [] + stream.on('data', (chunk) => chunks.push(chunk)) + stream.on('end', () => resolve(Buffer.concat(chunks))) + stream.on('error', reject) + }) - const workbook: xlsx.WorkBook = xlsx.read(buffer, { type: 'buffer' }); - const invitees: { email: string; role: Role }[] = []; - const invalidRows: string[] = []; + const workbook: xlsx.WorkBook = xlsx.read(buffer, { type: 'buffer' }) + const invitees: { email: string; role: Role }[] = [] + const invalidRows: string[] = [] workbook.SheetNames.forEach((sheetName) => { - const worksheet: xlsx.WorkSheet = workbook.Sheets[sheetName]; - const data: any[] = xlsx.utils.sheet_to_json(worksheet); + const worksheet: xlsx.WorkSheet = workbook.Sheets[sheetName] + const data: any[] = xlsx.utils.sheet_to_json(worksheet) data.forEach((row) => { - const email = (row as { email?: string }).email?.trim(); - const role = (row as { role?: string }).role?.trim().toLowerCase(); - - const validRoles: Role[] = ['trainee', 'admin', 'ttl', 'coordinator']; - if (email && EmailPattern.test(email) && role && validRoles.includes(role as Role)) { - invitees.push({ email, role: role as Role }); + const email = (row as { email?: string }).email?.trim() + const role = (row as { role?: string }).role?.trim().toLowerCase() + + const validRoles: Role[] = ['trainee', 'admin', 'ttl', 'coordinator'] + if ( + email && + EmailPattern.test(email) && + role && + validRoles.includes(role as Role) + ) { + invitees.push({ email, role: role as Role }) } else { - invalidRows.push(JSON.stringify(row)); + invalidRows.push(JSON.stringify(row)) } - }); - }); + }) + }) - return { invitees, invalidRows, filename }; + return { invitees, invalidRows, filename } } catch (error) { throw new GraphQLError('Failed to process the file.', { extensions: { code: 'FILE_PROCESSING_ERROR', }, - }); + }) } }