Skip to content

Commit

Permalink
Expanded for NodeConf EU
Browse files Browse the repository at this point in the history
  • Loading branch information
Qard committed Nov 6, 2019
1 parent c1b0ffb commit 3728e8f
Show file tree
Hide file tree
Showing 32 changed files with 73,767 additions and 68 deletions.
6 changes: 0 additions & 6 deletions assets/about.js

This file was deleted.

36,638 changes: 36,635 additions & 3 deletions assets/test.csv

Large diffs are not rendered by default.

36,634 changes: 36,634 additions & 0 deletions assets/test.ldjson

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions code-surfer.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import React from 'react'
import { CodeSurfer } from "mdx-deck-code-surfer"
import vsDarkPlus from "prism-react-renderer/themes/vsDarkPlus"
// import vsDarkPlus from "prism-react-renderer/themes/vsDarkPlus"

const CustomCodeSurfer = ({ title, code, steps }) => (
<CodeSurfer
title={title}
code={code}
lang="javascript"
showNumbers={false}
theme={vsDarkPlus}
// theme={vsDarkPlus}
steps={steps}
/>
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@ function pipeDirect (source, target) {
if (target.write && target.end) {
pipeStream(source, target)
return target
} else if (typeof target === 'function') {
}

if (typeof target === 'function') {
return target(source)
} else {
throw new Error('Unrecognized target type')
}

throw new Error('Unrecognized target type')
}

function pipe (source, ...transforms) {
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
19 changes: 19 additions & 0 deletions examples/microtask-priority.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
async function* doUntilTimeout (task, ms) {
let running = true
setTimeout(() => {
running = false
}, ms)

while (running) {
yield await task()
}
}

let n = 0
const numbers = doUntilTimeout(() => n++, 1000)

let sum = 0
for await (let number of numbers) {
sum += number
}
console.log(sum)
File renamed without changes.
29 changes: 18 additions & 11 deletions assets/stream-line-split.js → examples/stream-line-split.js
Original file line number Diff line number Diff line change
@@ -1,31 +1,38 @@
const pipe = require('async-iterator-pipe')

async function* lineSplit (iterator) {
let buffer = ''
let buffer = Buffer.alloc(0)

for await (let chunk of iterator) {
buffer += chunk.toString()
let position = buffer.indexOf('\n')
buffer = Buffer.concat([ buffer, chunk ])

let position = buffer.indexOf(0x0a)
while (position >= 0) {
yield buffer.slice(0, position)

buffer = buffer.slice(position + 1)
position = buffer.indexOf('\n')
position = buffer.indexOf(0x0a)
}
}
}

async function* csv (iterator) {
let keys

for await (let line of iterator) {
const values = line.split(',')
const values = line.toString().split(',')

if (!keys) {
keys = values
} else {
const data = {}
for (let i = 0; i < values.length; i++) {
data[keys[i]] = values[i]
}
yield data
continue
}

const data = {}
for (let i = 0; i < values.length; i++) {
data[keys[i]] = values[i]
}

yield data
}
}

Expand Down
File renamed without changes.
File renamed without changes.
5 changes: 5 additions & 0 deletions intro/about.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
module.exports = {
"name": "Stephen Belanger",
"github": "https://github.com/qard",
"twitter": "https://twitter.com/stephenbelanger"
}
File renamed without changes.
File renamed without changes.
17 changes: 12 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,17 @@
"author": "Stephen Belanger <[email protected]> (https://github.com/qard)",
"license": "MIT",
"devDependencies": {
"mdx-deck": "^1.8.2",
"async-iterator-pipe": "^1.0.1",
"blessed": "^0.1.81",
"blessed-contrib": "^4.8.18",
"mdx-deck": "^1.10.2",
"mdx-deck-code-surfer": "^0.5.5",
"now": "^13.1.3",
"prism-react-renderer": "^0.1.5",
"raw-loader": "^1.0.0"
}
"now": "^16.3.1",
"prism-react-renderer": "^1.0.1",
"raw-loader": "^3.1.0",
"relative-microtime": "^2.0.0",
"styled-components": "^4.4.0",
"terminal-in-react": "^4.3.1"
},
"dependencies": {}
}
18 changes: 18 additions & 0 deletions performance/capped-array.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
class CappedArray extends Array {
constructor(limit) {
super()
this.limit = limit
for (let i = 0; i < limit; i++) {
this.push(0)
}
}

push (value) {
super.push(value)
while (this.length > this.limit) {
this.shift()
}
}
}

module.exports = CappedArray
7 changes: 7 additions & 0 deletions performance/container.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
const test = process.argv[2]
const mode = process.argv[3]

const producer = require(`./${test}/producer`)
const run = require(`./${test}/${mode}`)

run(producer(), process.stdout)
41 changes: 41 additions & 0 deletions performance/csv-to-json/async-iterator.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
const pipe = require('async-iterator-pipe')
const { splitLine, zipKeyValueLists } = require('./helper')

async function* csv (iterator) {
let buffer = Buffer.alloc(0)
let keys

for await (let chunk of iterator) {
buffer = Buffer.concat([ buffer, chunk ])

let position = buffer.indexOf(0x0a)
while (position >= 0 && buffer.length) {
const line = buffer.slice(0, position).toString()

const values = splitLine(line)
if (!keys) {
keys = values
} else {
yield zipKeyValueLists(keys, values)
}

buffer = buffer.slice(position + 1)
position = buffer.indexOf(0x0a)
}
}
}

async function* toJSON (iterator) {
for await (let item of iterator) {
yield `${JSON.stringify(item)}\n`
}
}

module.exports = (input, output) => {
pipe(
input,
csv,
toJSON,
output
)
}
15 changes: 15 additions & 0 deletions performance/csv-to-json/helper.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
module.exports = {
splitLine (line) {
return line
.match(/\s*(?:([^,]+)|"([^,]+)")/g)
.map(value => value.trim().replace(/"/g, ''))
},

zipKeyValueLists (keys, values) {
const data = {}
for (let i = 0; i < values.length; i++) {
data[keys[i]] = values[i]
}
return data
}
}
43 changes: 43 additions & 0 deletions performance/csv-to-json/producer.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
const { Readable } = require('stream')

module.exports = (headerCount = 10, batchSize = 10) => {
const headers = []
for (let i = 0; i < headerCount; i++) {
headers.push(`header_${i}`)
}
const headerBuffer = Buffer.from(`${headers.join(',')}\n`)

function makeLine () {
const values = []
for (let i = 0; i < headerCount; i++) {
values.push(Math.random() * 1000)
}
return values.join(',')
}

function makeLines (n) {
const lines = []
for (let i = 0; i < n; i++) {
lines.push(makeLine())
}
return lines.join('\n')
}

let buffer = Buffer.from(headerBuffer)

return new Readable({
read (size) {
while (buffer.length < size) {
buffer = Buffer.concat([
buffer,
Buffer.from(makeLines(batchSize))
])
}

setImmediate(() => {
this.push(buffer.slice(0, size))
buffer = buffer.slice(size + 1)
})
}
})
}
49 changes: 49 additions & 0 deletions performance/csv-to-json/stream.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
const { Transform, pipeline } = require('stream')
const { splitLine, zipKeyValueLists } = require('./helper')

const csv = new Transform({
readableObjectMode: true,
transform (data, encoding, callback) {
if (!this.buffer) this.buffer = Buffer.alloc(0)
let buffer = Buffer.concat([ this.buffer, data ])

let position = buffer.indexOf(0x0a)
while (position >= 0 && buffer.length) {
const line = buffer.slice(0, position).toString()

const values = splitLine(line)
if (!this.headers) {
this.headers = values
} else {
this.push(zipKeyValueLists(this.headers, values))
}

buffer = buffer.slice(position + 1)
position = buffer.indexOf(0x0a)
}

if (buffer.length) {
this.buffer = buffer
}

setImmediate(callback)
}
})

const ldjson = new Transform({
writableObjectMode: true,
transform (data, encoding, callback) {
this.push(`${JSON.stringify(data)}\n`, encoding)
setImmediate(callback)
}
})

module.exports = (input, output) => {
pipeline(
input,
csv,
ldjson,
output,
err => console.error(err)
)
}
25 changes: 25 additions & 0 deletions performance/make-line.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
const MeasureStream = require('./measure-stream')

// const { join } = require('path')
// const { spawn } = require('child_process')
//
// module.exports = (test, mode) => {
// const gauge = new Measure(mode)
//
// const proc = spawn('node', ['container.js', test, mode], {
// cwd: __dirname
// })
// proc.stdout.pipe(gauge)
//
// return gauge.data
// }

module.exports = (test, mode) => {
const gauge = new MeasureStream(mode)

const producer = require(`./${test}/producer`)
const run = require(`./${test}/${mode}`)
run(producer(), gauge)

return gauge.data
}
40 changes: 40 additions & 0 deletions performance/measure-stream.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
const { Writable } = require('stream')
const microtime = require('relative-microtime')

const CappedArray = require('./capped-array')

const colors = ['red', 'yellow']
let n = 0

class MeasureStream extends Writable {
constructor(mode) {
super()
this.timer = microtime()

this.data = {
title: mode,
x: new CappedArray(50),
y: new CappedArray(50),
style: {
line: colors[n++]
}
}
}

_write (chunk, encoding, callback) {
const { x, y } = this.data
const now = this.timer() // ns
const bucket = Math.floor(now / 1000 / 1000)

if (!x.length || x[x.length - 1] !== bucket) {
x.push(bucket)
y.push(chunk.length)
} else {
y[y.length - 1] = chunk.length + y[y.length - 1]
}

setImmediate(callback)
}
}

module.exports = MeasureStream
4 changes: 4 additions & 0 deletions performance/notes.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
1. show upper test together to demonstrate async-iterators are equal or slightly faster
2. show upper test separately to confirm absolute performance in isolation
3. show csv-to-json separately first to show absolute performance
4. show csv-to-json together to demonstrate issue of microtask queue starving Node.js idle queue
Loading

1 comment on commit 3728e8f

@vercel
Copy link

@vercel vercel bot commented on 3728e8f Nov 6, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.