-
Notifications
You must be signed in to change notification settings - Fork 1
/
n.js
137 lines (113 loc) · 4.04 KB
/
n.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
const fs = require('fs')
const yargs = require('yargs/yargs')
async function main() {
// parse commandline argument -c
// set filename to first commandline argument
var argv = yargs(process.argv.slice(2)).argv;
let programPath = null;
if (argv._.length == 1) {
programPath = argv._[0];
}
if (programPath === null && !("c" in argv)) {
console.error("Must specify a program: 'n.js python.py', or an inline command: 'n.js -c \"print(1+1)\"'")
process.exit(1);
}
if (programPath !== null && ("c" in argv)) {
console.error("Can't specify both program path and inline command")
process.exit(1);
}
let program = "";
if (programPath !== null) {
// read contents of file programPath
program = await fs.promises.readFile(programPath, 'utf8');
}
if ("c" in argv) {
program = argv.c;
}
if (process.env.BACALHAU_JOB_SPEC == "") {
console.error("Must specify a BACALHAU_JOB_SPEC environment variable")
process.exit(1);
}
let jobSpec
try {
jobSpec = JSON.parse(process.env.BACALHAU_JOB_SPEC)
} catch(e) {
console.error("Error processing BACALHAU_JOB_SPEC json: " + e.toString())
process.exit(1);
}
// set system stdout to dev null
// hide "Loading distutils", "Loaded distutils" messages.
let oldStdoutWrite = process.stdout.write;
process.stdout.write = function() {};
let pyodide_pkg = await import("./pyodide/pyodide.js");
let LOGGING_ON = false;
let pyodide = await pyodide_pkg.loadPyodide({
indexURL: "./pyodide/",
stdout: (s) => {
if (LOGGING_ON) {
console.log(s)
}
},
stderr: (s) => {
if (LOGGING_ON) {
console.log(s)
}
},
});
// await pyodide.loadPackage("micropip");
// log any errors with fs setup
LOGGING_ON = true;
process.stdout.write = oldStdoutWrite;
if (Array.isArray(jobSpec.inputs)) {
// console.log("job spec = ", JSON.stringify(jobSpec))
// console.log("about to mount input volumes")
jobSpec.inputs.forEach(inputVolume => {
const hostPath = inputVolume.path
const wasmPath = inputVolume.path.replace('/pyodide_inputs', '')
if (!fs.existsSync(wasmPath)) {
// console.log(`mkdir ${wasmPath}`);
pyodide.FS.mkdir(wasmPath);
}
// console.log(`mounting ${hostPath} to ${wasmPath}`)
pyodide.FS.mount(pyodide.FS.filesystems.NODEFS, { root: hostPath }, wasmPath);
})
// console.log("done mounting input volumes")
}
if (Array.isArray(jobSpec.outputs)) {
// console.log("about to mount output volumes")
jobSpec.outputs.forEach(outputVolume => {
const hostPath = outputVolume.path
const wasmPath = outputVolume.path.replace('/pyodide_outputs', '')
if (!fs.existsSync(wasmPath)) {
// console.log(`mkdir ${wasmPath}`);
pyodide.FS.mkdir(wasmPath);
}
// console.log(`mounting ${hostPath} to ${wasmPath}`)
pyodide.FS.mount(pyodide.FS.filesystems.NODEFS, { root: hostPath }, wasmPath);
})
// console.log("done mounting input volumes")
}
await pyodide.runPythonAsync(program);
// TODO: support requirements
// TODO: expose requirements as a webserver, if needed?
// console.log(await pyodide.runPythonAsync(`
// import micropip
// micropip.install("pandas")
// `))
// console.log(await pyodide.runPythonAsync(`
// import pandas
// from io import StringIO
// # CSV String with out headers
// csvString = """Spark,25000,50 Days,2000
// Pandas,20000,35 Days,1000
// Java,15000,,800
// Python,15000,30 Days,500
// PHP,18000,30 Days,800"""
// # Convert String into StringIO
// csvStringIO = StringIO(csvString)
// import pandas as pd
// df = pd.read_csv(csvStringIO, sep=",", header=None)
// print(df)
// `));
}
main()