Skip to content

Commit

Permalink
Removed a step to seed data during dock image building (#277)
Browse files Browse the repository at this point in the history
  • Loading branch information
MugemaneBertin2001 committed Sep 11, 2024
1 parent 26bb22d commit 73b4c1b
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 33 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ RUN npm ci
COPY . .
ARG NODE_ENV
ARG MONGO_PROD_DB
RUN npm run seed
# RUN npm run seed
RUN npm run build
RUN npm prune --omit=dev
EXPOSE 4000
Expand Down
8 changes: 4 additions & 4 deletions src/resolvers/userResolver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -223,13 +223,13 @@ const resolvers: any = {
code: 'ValidationError',
},
})
let invitee;
let invitee
const invitation = await Invitation.findOne({ 'invitees.email': email })
.sort({ createdAt: -1 })
.exec();
.exec()

if (invitation) {
invitee = invitation.invitees.find(invitee => invitee.email === email);
invitee = invitation.invitees.find((invitee) => invitee.email === email)
}
const user = await User.create({
role: role || invitee?.role || 'user',
Expand Down
61 changes: 33 additions & 28 deletions src/utils/extractFileData.ts
Original file line number Diff line number Diff line change
@@ -1,50 +1,55 @@
import { ReadStream } from 'fs';
import { GraphQLError } from 'graphql';
import * as xlsx from 'xlsx';
import { ReadStream } from 'fs'
import { GraphQLError } from 'graphql'
import * as xlsx from 'xlsx'

const EmailPattern: RegExp = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
const EmailPattern = /^[^\s@]+@[^\s@]+\.[^\s@]+$/

type Role = 'trainee' | 'admin' | 'ttl' | 'coordinator';
type Role = 'trainee' | 'admin' | 'ttl' | 'coordinator'

export async function extractFileData(file: any) {
try {
const { createReadStream, filename } = await file;
const stream: ReadStream = createReadStream();
const { createReadStream, filename } = await file
const stream: ReadStream = createReadStream()

const buffer = await new Promise<Buffer>((resolve, reject) => {
const chunks: any[] = [];
stream.on('data', (chunk) => chunks.push(chunk));
stream.on('end', () => resolve(Buffer.concat(chunks)));
stream.on('error', reject);
});
const chunks: any[] = []
stream.on('data', (chunk) => chunks.push(chunk))
stream.on('end', () => resolve(Buffer.concat(chunks)))
stream.on('error', reject)
})

const workbook: xlsx.WorkBook = xlsx.read(buffer, { type: 'buffer' });
const invitees: { email: string; role: Role }[] = [];
const invalidRows: string[] = [];
const workbook: xlsx.WorkBook = xlsx.read(buffer, { type: 'buffer' })
const invitees: { email: string; role: Role }[] = []
const invalidRows: string[] = []

workbook.SheetNames.forEach((sheetName) => {
const worksheet: xlsx.WorkSheet = workbook.Sheets[sheetName];
const data: any[] = xlsx.utils.sheet_to_json(worksheet);
const worksheet: xlsx.WorkSheet = workbook.Sheets[sheetName]
const data: any[] = xlsx.utils.sheet_to_json(worksheet)

data.forEach((row) => {
const email = (row as { email?: string }).email?.trim();
const role = (row as { role?: string }).role?.trim().toLowerCase();

const validRoles: Role[] = ['trainee', 'admin', 'ttl', 'coordinator'];
if (email && EmailPattern.test(email) && role && validRoles.includes(role as Role)) {
invitees.push({ email, role: role as Role });
const email = (row as { email?: string }).email?.trim()
const role = (row as { role?: string }).role?.trim().toLowerCase()

const validRoles: Role[] = ['trainee', 'admin', 'ttl', 'coordinator']
if (
email &&
EmailPattern.test(email) &&
role &&
validRoles.includes(role as Role)
) {
invitees.push({ email, role: role as Role })
} else {
invalidRows.push(JSON.stringify(row));
invalidRows.push(JSON.stringify(row))
}
});
});
})
})

return { invitees, invalidRows, filename };
return { invitees, invalidRows, filename }
} catch (error) {
throw new GraphQLError('Failed to process the file.', {
extensions: {
code: 'FILE_PROCESSING_ERROR',
},
});
})
}
}

0 comments on commit 73b4c1b

Please sign in to comment.