Skip to content

Commit 3b25dff

Browse files
committed
Async work
1 parent 78f7dff commit 3b25dff

File tree

7 files changed

+301
-7
lines changed

7 files changed

+301
-7
lines changed

particle/Particle.test.ts

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4106,7 +4106,7 @@ string`
41064106
equal(program.particleAt(0).content, "Breck", "Macro evaluated")
41074107
}
41084108

4109-
testParticles.wakeTest = equal => {
4109+
testParticles.wakeTest = async equal => {
41104110
// Arrange
41114111
let str = ""
41124112
class Foo extends Particle {
@@ -4115,7 +4115,8 @@ testParticles.wakeTest = equal => {
41154115
}
41164116
}
41174117
// Act
4118-
const particle = new (<any>Foo)(`c
4118+
const particle = new (<any>Foo)()
4119+
await particle.loadFromStream(`c
41194120
b
41204121
a
41214122
d
@@ -4126,6 +4127,20 @@ g
41264127
equal(str, "abcdefg")
41274128
}
41284129

4130+
testParticles.fromStreamTest = async equal => {
4131+
// Arrange
4132+
const particle = new Particle()
4133+
if (!particle.isNodeJs()) return
4134+
const fs = require("fs")
4135+
const path = require("path")
4136+
const filepath = path.join(__dirname, "readme.scroll")
4137+
const stream = fs.createReadStream(filepath, {
4138+
encoding: "utf8"
4139+
})
4140+
await particle.loadFromStream(stream)
4141+
equal(particle.toString(), fs.readFileSync(filepath, "utf8"), "Stream loaded correctly")
4142+
}
4143+
41294144
testParticles.queryMethods = equal => {
41304145
// Arrange
41314146
const particle = <any>Particle.fromCsv(Particle.iris)

particle/Particle.ts

Lines changed: 103 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ import { AbstractParticle } from "./AbstractParticle.particle"
22
import { particlesTypes } from "../products/particlesTypes"
33

44
const { Utils } = require("../products/Utils.js")
5+
const { Readable } = require("stream")
56

67
declare type int = particlesTypes.int
78
declare type atom = particlesTypes.atom
@@ -143,6 +144,39 @@ class ParserPool {
143144
return line.substr(0, firstBreak > -1 ? firstBreak : undefined)
144145
}
145146

147+
async createParticleAsync(parentParticle: particlesTypes.particle, block: string, index?: number): particlesTypes.particle {
148+
const rootParticle = parentParticle.root
149+
if (rootParticle.particleTransformers) {
150+
// A macro may return multiple new blocks.
151+
const blocks = splitBlocks(rootParticle._transformBlock(block), SUBPARTICLE_MEMBRANE, PARTICLE_MEMBRANE)
152+
153+
const newParticles: particlesTypes.particle[] = []
154+
for (const [newBlockIndex, block] of blocks.entries()) {
155+
const particle = await this._createParticleAsync(parentParticle, block, index === undefined ? undefined : index + newBlockIndex)
156+
newParticles.push(particle)
157+
}
158+
return newParticles[0]
159+
}
160+
161+
const newParticle = await this._createParticleAsync(parentParticle, block, index)
162+
return newParticle
163+
}
164+
165+
async _createParticleAsync(parentParticle: particlesTypes.particle, block: string, index?: number): particlesTypes.particle {
166+
index = index === undefined ? parentParticle.length : index
167+
const parser: any = this._getMatchingParser(block, parentParticle, index)
168+
const { particleBreakSymbol } = parentParticle
169+
const lines = block.split(particleBreakSymbol)
170+
const subparticles = lines
171+
.slice(1)
172+
.map(line => line.substr(1))
173+
.join(particleBreakSymbol)
174+
const particle = new parser(undefined, lines[0], parentParticle, index)
175+
if (subparticles.length) await particle.loadFromStream(subparticles)
176+
await particle.wake()
177+
return particle
178+
}
179+
146180
createParticle(parentParticle: particlesTypes.particle, block: string, index?: number): particlesTypes.particle {
147181
const rootParticle = parentParticle.root
148182
if (rootParticle.particleTransformers) {
@@ -174,7 +208,6 @@ class Particle extends AbstractParticle {
174208
this._setSubparticles(subparticles)
175209
if (index !== undefined) parent._getSubparticlesArray().splice(index, 0, this)
176210
else if (parent) parent._getSubparticlesArray().push(this)
177-
this.wake()
178211
}
179212

180213
private _uid: int
@@ -1804,6 +1837,75 @@ class Particle extends AbstractParticle {
18041837
blocks.forEach((block, index) => parserPool.createParticle(this, block))
18051838
}
18061839

1840+
async loadFromStream(input) {
1841+
const { edgeSymbol, particleBreakSymbol } = this
1842+
const parserPool = this._getParserPool()
1843+
1844+
let buffer = ""
1845+
const breakRegex = new RegExp(`${particleBreakSymbol}(?!${edgeSymbol})`)
1846+
1847+
// Node.js Readable stream
1848+
if (typeof process !== "undefined" && input instanceof require("stream").Readable) {
1849+
for await (const chunk of input) {
1850+
buffer += chunk.toString("utf8")
1851+
1852+
while (true) {
1853+
const breakIndex = buffer.search(breakRegex)
1854+
if (breakIndex === -1) break
1855+
1856+
const block = buffer.slice(0, breakIndex)
1857+
buffer = buffer.slice(breakIndex + particleBreakSymbol.length)
1858+
1859+
await parserPool.createParticleAsync(this, block)
1860+
}
1861+
}
1862+
// Process remaining buffer
1863+
await parserPool.createParticleAsync(this, buffer)
1864+
}
1865+
// Browser ReadableStream
1866+
else if (typeof ReadableStream !== "undefined" && input instanceof ReadableStream) {
1867+
const reader = input.getReader()
1868+
try {
1869+
while (true) {
1870+
const { done, value } = await reader.read()
1871+
if (done) break
1872+
1873+
buffer += new TextDecoder().decode(value) // Convert Uint8Array to string
1874+
1875+
while (true) {
1876+
const breakIndex = buffer.search(breakRegex)
1877+
if (breakIndex === -1) break
1878+
1879+
const block = buffer.slice(0, breakIndex)
1880+
buffer = buffer.slice(breakIndex + particleBreakSymbol.length)
1881+
1882+
await parserPool.createParticleAsync(this, block)
1883+
}
1884+
}
1885+
// Process remaining buffer
1886+
await parserPool.createParticleAsync(this, buffer)
1887+
} finally {
1888+
reader.releaseLock()
1889+
}
1890+
}
1891+
// Plain string input (works in both environments)
1892+
else if (typeof input === "string") {
1893+
buffer = input
1894+
while (true) {
1895+
const breakIndex = buffer.search(breakRegex)
1896+
if (breakIndex === -1) break
1897+
1898+
const block = buffer.slice(0, breakIndex)
1899+
buffer = buffer.slice(breakIndex + particleBreakSymbol.length)
1900+
1901+
await parserPool.createParticleAsync(this, block)
1902+
}
1903+
await parserPool.createParticleAsync(this, buffer)
1904+
} else {
1905+
throw new Error("Unsupported input type. Expected string, Node.js Readable, or ReadableStream.")
1906+
}
1907+
}
1908+
18071909
protected _getCueIndex() {
18081910
// StringMap<int> {cue: index}
18091911
// When there are multiple tails with the same cue, index stores the last content.

particle/readme.scroll

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
title Particle
22

3-
This folder contains the base Particle library. This code is used in both the browser lib and node.js lib.
3+
This folder contains the base Particle library.
44

5-
? What are the differences between browser lib and node lib?
5+
This code is used in both the browser lib and node.js lib.
66

7+
? What are the differences between browser lib and node lib?
78
The Node lib contains a dozen or so extra static methods. The implementations otherwise are almost identical with some slight overloads for the different environments.

perf/stream.js

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
#! /usr/bin/env node
2+
3+
const { Particle } = require("../products/Particle.js")
4+
const { Readable } = require("stream")
5+
const path = require("path")
6+
const { Utils } = require("../products/Utils.js")
7+
const { Timer } = Utils
8+
9+
const randomStrings = Particle.fromDisk(path.join(__dirname, "..", "readme.scroll")).map(block => block.toString() + "\n")
10+
const getRandomBlock = () => randomStrings[Math.floor(Math.random() * randomStrings.length)]
11+
12+
const timer = new Timer()
13+
14+
function createRandomStream(blockCount = 10000000) {
15+
let blocksGenerated = 0
16+
17+
return new Readable({
18+
encoding: "utf8",
19+
read(size) {
20+
if (blocksGenerated >= blockCount) {
21+
this.push(null) // End the stream
22+
return
23+
}
24+
25+
// Generate a random block
26+
this.push(getRandomBlock())
27+
blocksGenerated++
28+
29+
// Push multiple lines per read for efficiency
30+
const maxBlocksPerChunk = 1000
31+
for (let i = 0; i < maxBlocksPerChunk && blocksGenerated < blockCount; i++) {
32+
this.push(getRandomBlock())
33+
blocksGenerated++
34+
}
35+
}
36+
})
37+
}
38+
39+
const main = async () => {
40+
timer.tick("start")
41+
// Arrange
42+
const particle = new Particle()
43+
const stream = createRandomStream(1e6)
44+
45+
// Act
46+
await particle.loadFromStream(stream)
47+
48+
timer.tick("loaded")
49+
50+
// Assert
51+
console.log(`Parsed ${particle.length} blocks and ${particle.numberOfLines} lines`)
52+
timer.tick("finish")
53+
54+
const str = particle.toString()
55+
timer.tick("toString")
56+
57+
console.log(`${str.length} bytes`)
58+
59+
const newPart = new Particle(str)
60+
timer.tick("from str")
61+
}
62+
63+
main()

sandbox/SandboxApp.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ This is my content
107107
}
108108
const fs = new ScrollFileSystem(files)
109109
const file = new ScrollFile(particle.toString(), "/main", fs)
110-
await file.fuse()
110+
await file.singlePassFuse()
111111
this.file = file
112112
willowBrowser.setHtmlOfElementWithIdHack("scrollFileSystemConsole", file.fusedCode)
113113
}

scrollFileSystem/ScrollFileSystem.ts

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,12 @@ class DiskWriter implements Storage {
151151
return file.content
152152
}
153153

154+
async createReadStream(absolutePath: string) {
155+
return fs.createReadStream(absolutePath, {
156+
encoding: "utf8"
157+
})
158+
}
159+
154160
async list(folder: string) {
155161
if (isUrl(folder)) {
156162
return [] // URLs don't support directory listing
@@ -299,6 +305,23 @@ class ScrollFile {
299305
this.codeAtStart = await this.fileSystem.read(filePath)
300306
}
301307

308+
async singlePassFuse() {
309+
const { fileSystem, filePath, defaultParser, codeAtStart } = this
310+
this.scrollProgram = new defaultParser()
311+
this.scrollProgram.setFile(this)
312+
if (codeAtStart !== undefined) {
313+
await this.scrollProgram.loadFromStream(codeAtStart)
314+
} else {
315+
this.timestamp = await fileSystem.getCTime(filePath)
316+
const stream = fileSystem.createReadStream(filePath)
317+
await this.scrollProgram.loadFromStream(stream)
318+
}
319+
// What happens if we encounter a new parser?
320+
// very frequently if we encounter 1 parser we will encounter a sequence of parsers so
321+
// perhaps on wake, for now, we switch into collecting parsers mode
322+
// and then when we hit a non parser, only at that moment do we recompile the parsers
323+
}
324+
302325
async fuse() {
303326
// PASS 1: READ FULL FILE
304327
await this._readCodeFromStorage()
@@ -321,8 +344,9 @@ class ScrollFile {
321344
this.fusedCode = fusedCode
322345
this.parser = fusedFile?.parser || defaultParser
323346
// PASS 3: PARSER WITH CUSTOM PARSER OR STANDARD SCROLL PARSER
324-
this.scrollProgram = new this.parser(fusedCode, filePath)
347+
this.scrollProgram = new this.parser(undefined, filePath)
325348
this.scrollProgram.setFile(this)
349+
await this.scrollProgram.loadFromStream(fusedCode)
326350
return this
327351
}
328352
}

scrollFileSystem/perf.js

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
#!/usr/bin/env node
2+
3+
/*
4+
This file contains a simple set of perf tests that can be run manually to keep fusion perf in check.
5+
*/
6+
7+
// rm perf.cpuprofile; rm perf.heapprofile; node --cpu-prof --cpu-prof-name=perf.cpuprofile --heap-prof --heap-prof-name=perf.heapprofile perf.js
8+
9+
const fs = require("fs")
10+
const path = require("path")
11+
const { Utils } = require("../products/Utils.js")
12+
const { Timer } = Utils
13+
const { Particle } = require("../products/Particle.js")
14+
const { Fusion } = require("../products/Fusion.js")
15+
const { ScrollFile } = require("scroll-cli")
16+
17+
class PerfTest {
18+
constructor(folderPath) {
19+
this.folderPath = folderPath
20+
this.timer = new Timer()
21+
this.files = []
22+
this.simpleStrings = []
23+
this.particles = []
24+
this.fusedFiles = []
25+
this.scrollFiles = []
26+
}
27+
28+
gatherFiles() {
29+
this.files = fs
30+
.readdirSync(this.folderPath)
31+
.filter(file => file.endsWith(".scroll"))
32+
.map(file => path.join(this.folderPath, file))
33+
console.log(`Found ${this.files.length} .scroll files`)
34+
this.tick("Finding files")
35+
return this
36+
}
37+
38+
readToStrings() {
39+
this.simpleStrings = this.files.map(file => fs.readFileSync(file, "utf8"))
40+
this.tick("Reading files to strings")
41+
return this
42+
}
43+
44+
parseToParticles() {
45+
this.particles = this.simpleStrings.map(str => new Particle(str))
46+
this.tick("Parsing to Particles")
47+
return this
48+
}
49+
50+
async fuseFiles() {
51+
const fusion = new Fusion()
52+
this.fusedFiles = await Promise.all(this.files.map(file => fusion.fuseFile(file)))
53+
this.tick("Fusing files")
54+
return this
55+
}
56+
57+
parseAsScroll() {
58+
this.scrollFiles = this.simpleStrings.map(str => new ScrollFile(str))
59+
this.tick("Parsing as Scroll")
60+
return this
61+
}
62+
63+
tick(message) {
64+
this.printMemoryUsage()
65+
console.log("")
66+
this.timer.tick(message)
67+
console.log("----------")
68+
}
69+
70+
printMemoryUsage() {
71+
const used = process.memoryUsage()
72+
console.log("\nMemory Usage:")
73+
for (let key in used) {
74+
console.log(`${key}: ${Math.round((used[key] / 1024 / 1024) * 100) / 100} MB`)
75+
}
76+
}
77+
78+
async runAll() {
79+
this.tick("Starting performance tests...\n")
80+
this.gatherFiles().readToStrings().parseToParticles()
81+
await this.fuseFiles()
82+
this.parseAsScroll()
83+
}
84+
}
85+
86+
// Run the tests
87+
const dir = "/Users/breck/pldb.io/concepts"
88+
const perfTest = new PerfTest(dir)
89+
perfTest.runAll().catch(console.error)

0 commit comments

Comments
 (0)