Skip to content

Commit

Permalink
Integrate --timeline into the rest of the db query subsystem (#1483)
Browse files Browse the repository at this point in the history
* Integrate --timeline into the rest of the db query subsystem

* Fix vscode, cleanup
  • Loading branch information
V-FEXrt authored Dec 6, 2023
1 parent 4fa5f88 commit d9e2a5b
Show file tree
Hide file tree
Showing 10 changed files with 215 additions and 293 deletions.
23 changes: 6 additions & 17 deletions extensions/vscode/lsp-client/src/timelineNode.ts
Original file line number Diff line number Diff line change
Expand Up @@ -104,34 +104,23 @@ function updatePanel(panel: vscode.WebviewPanel): void {

function setTimeline(): void {
timelinePanel.title = 'Timeline';
useWake("",
(stdout: string) => {
timelinePanel.webview.html = stdout;
});
useWake((stdout: string) => {
timelinePanel.webview.html = stdout;
});
}

function refreshTimeline(): void {
useWake("job-reflections", (jobReflections: string) => {
useWake( "file-accesses", (fileAccesses: string) => {
const message = {
jobReflections: JSON.parse(jobReflections),
fileAccesses: JSON.parse(fileAccesses)
};
// Send a message to the webview.
timelinePanel.webview.postMessage(message);
});
});
setTimeline();
}

function useWake(option: string, callback: (stdout: string) => void): void {
function useWake(callback: (stdout: string) => void): void {
if (wakeBinary == '') {
vscode.window.showErrorMessage(`Timeline: the path to wake binary is empty. Please provide a valid path in the extension's settings.`);
return;
}

const extraArgs = option === "" ? [] : [option];
// spawn process in directory where the wake executable is and run it with --timeline
const process = spawn(wakeBinary, [`--timeline`, ...extraArgs], { cwd: `${wakeBinary.substring(0, wakeBinary.lastIndexOf('/'))}` });
const process = spawn(wakeBinary, [`--timeline`, `--last`], { cwd: `${wakeBinary.substring(0, wakeBinary.lastIndexOf('/'))}` });

let err = "";
let stdout = "";
Expand Down
31 changes: 12 additions & 19 deletions share/wake/html/timeline_main.js
Original file line number Diff line number Diff line change
Expand Up @@ -178,21 +178,14 @@ class JobNode {
}
}

function fillOneDependency(access, dependency, jobMap) {
const job = access.job;
if (access.type === 2) {
return job;
}
if (jobMap.has(job)) {
jobMap.get(job).dependencies.add(dependency);
}
return dependency;
}
function fillAllDependencies(dependencies, jobMap) {
for (let dep of dependencies) {
let writer = dep.writer;
let reader = dep.reader;

function fillAllDependencies(accesses, jobMap) {
let dependency = -1;
for (let access of accesses) {
dependency = fillOneDependency(access, dependency, jobMap);
if (jobMap.has(writer) && jobMap.has(reader)) {
jobMap.get(reader).dependencies.add(writer);
}
}
}

Expand Down Expand Up @@ -353,18 +346,18 @@ timeline.on('click', function (properties) {
window.addEventListener("message", event => {
const message = event.data;
const newJobReflections = message.jobReflections;
const newFileAccesses = message.fileAccesses;
processChanges(newJobReflections, newFileAccesses);
const newFileDependencies = message.fileDependencies;
processChanges(newJobReflections, newFileDependencies);
});

function processChanges(newJobReflections, newFileAccesses) {
function processChanges(newJobReflections, newFileDependencies) {
jobMap.clear();

for (const job of newJobReflections) {
jobMap.set(job.job, new JobNode(job));
}

fillAllDependencies(newFileAccesses, jobMap);
fillAllDependencies(newFileDependencies, jobMap);

let topSortedJobs = topSort(jobMap);
assignParents(jobMap, topSortedJobs);
Expand Down Expand Up @@ -407,4 +400,4 @@ function jobsEqual(jobA, jobB) {

// Process initial data
processChanges(JSON.parse(document.getElementById("jobReflections").textContent),
JSON.parse(document.getElementById("fileAccesses").textContent));
JSON.parse(document.getElementById("fileDependencies").textContent));
9 changes: 9 additions & 0 deletions src/json/json5.h
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,15 @@ struct JAST {
static bool parse(const std::string &body, std::ostream &errs, JAST &out);
static bool parse(const char *body, size_t len, std::ostream &errs, JAST &out);

template <class T>
static JAST from_vec(const std::vector<T> &items) {
JAST json(JSON_ARRAY);
for (const T &i : items) {
json.add("", i.to_json());
}
return json;
}

const JAST &get(const std::string &key) const;
JAST &get(const std::string &key);
wcl::optional<const JAST *> get_opt(const std::string &key) const;
Expand Down
147 changes: 109 additions & 38 deletions src/runtime/database.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,7 @@ struct Database::detail {
sqlite3_stmt *get_tags;
sqlite3_stmt *get_all_tags;
sqlite3_stmt *get_edges;
sqlite3_stmt *get_job_visualization;
sqlite3_stmt *get_file_access;
sqlite3_stmt *get_file_dependency;
sqlite3_stmt *get_output_files;
sqlite3_stmt *remove_output_files;
sqlite3_stmt *remove_all_jobs;
Expand Down Expand Up @@ -140,8 +139,7 @@ struct Database::detail {
get_tags(0),
get_all_tags(0),
get_edges(0),
get_job_visualization(0),
get_file_access(0),
get_file_dependency(0),
get_interleaved_output(0) {}
};

Expand Down Expand Up @@ -466,22 +464,12 @@ std::string Database::open(bool wait, bool memory, bool tty) {
"select distinct user.job_id as user, used.job_id as used"
" from filetree user, filetree used"
" where user.access=1 and user.file_id=used.file_id and used.access=2";
const char *sql_get_job_visualization =
"select j.job_id, j.label, j.directory, j.commandline, j.environment, j.stack, j.stdin, "
"j.starttime, j.endtime, j.stale, r.time, r.cmdline, s.status, s.runtime, s.cputime, "
"s.membytes, s.ibytes, s.obytes"
" from jobs j left join stats s on j.stat_id=s.stat_id join runs r on j.run_id=r.run_id"
" where substr(cast(j.commandline as varchar), 1, 8) != '<source>'"
" and substr(cast(j.commandline as varchar), 1, 7) != '<claim>'"
" and substr(cast(j.commandline as varchar), 1, 7) != '<mkdir>'"
" and substr(cast(j.commandline as varchar), 1, 7) != '<write>'"
" and substr(cast(j.commandline as varchar), 1, 6) != '<hash>'"
" and substr(cast(j.label as varchar), 1, 6) != '<hash>'";
const char *sql_get_file_access =
"select access, job_id"
" from filetree"
" where access != 1"
" order by file_id, access desc, job_id";
const char *sql_get_file_dependency =
"SELECT l.job_id, r.job_id"
" FROM filetree l"
" INNER JOIN filetree r"
" ON l.file_id = r.file_id"
" WHERE l.access = 2 AND r.access = 0";
const char *sql_get_output_files =
"select f.path"
" from filetree ft join files f on f.file_id=ft.file_id join jobs j on ft.job_id=j.job_id"
Expand Down Expand Up @@ -554,8 +542,7 @@ std::string Database::open(bool wait, bool memory, bool tty) {
PREPARE(sql_get_tags, get_tags);
PREPARE(sql_get_all_tags, get_all_tags);
PREPARE(sql_get_edges, get_edges);
PREPARE(sql_get_job_visualization, get_job_visualization);
PREPARE(sql_get_file_access, get_file_access);
PREPARE(sql_get_file_dependency, get_file_dependency);
PREPARE(sql_get_output_files, get_output_files);
PREPARE(sql_remove_output_files, remove_output_files);
PREPARE(sql_remove_all_jobs, remove_all_jobs);
Expand Down Expand Up @@ -620,8 +607,7 @@ void Database::close() {
FINALIZE(get_tags);
FINALIZE(get_all_tags);
FINALIZE(get_edges);
FINALIZE(get_job_visualization);
FINALIZE(get_file_access);
FINALIZE(get_file_dependency);
FINALIZE(get_output_files);
FINALIZE(remove_output_files);
FINALIZE(remove_all_jobs);
Expand Down Expand Up @@ -1191,6 +1177,95 @@ std::string Time::as_string() const {
return buf;
}

JAST JobReflection::to_json() const {
JAST json(JSON_OBJECT);
json.add("job", job);
json.add("label", label.c_str());
json.add("stale", stale);
json.add("directory", directory.c_str());

std::stringstream commandline_stream;
for (const std::string &line : commandline) {
commandline_stream << line << " ";
}
json.add("commandline", commandline_stream.str());

std::stringstream environment_stream;
for (const std::string &line : environment) {
environment_stream << line << " ";
}
json.add("environment", environment_stream.str());

json.add("stack", stack.c_str());

json.add("stdin_file", stdin_file.c_str());

json.add("starttime", starttime.as_int64());
json.add("endtime", endtime.as_int64());
json.add("wake_start", wake_start.as_int64());

json.add("wake_cmdline", wake_cmdline.c_str());

std::string out_stream;
std::string err_stream;
for (auto &write : std_writes) {
if (write.second == 1) {
out_stream += write.first;
}
if (write.second == 2) {
err_stream += write.first;
}
}

json.add("stdout_payload", out_stream.c_str());
json.add("stderr_payload", err_stream.c_str());

std::stringstream usage_stream;
usage_stream << "status: " << usage.status << "<br>"
<< "runtime: " << usage.runtime << "<br>"
<< "cputime: " << usage.cputime << "<br>"
<< "membytes: " << std::to_string(usage.membytes) << "<br>"
<< "ibytes: " << std::to_string(usage.ibytes) << "<br>"
<< "obytes: " << std::to_string(usage.obytes);
json.add("usage", usage_stream.str());

std::stringstream visible_stream;
for (const auto &visible_file : visible) {
visible_stream << visible_file.path << "<br>";
}
json.add("visible", visible_stream.str());

std::stringstream inputs_stream;
for (const auto &input : inputs) {
inputs_stream << input.path << "<br>";
}
json.add("inputs", inputs_stream.str());

std::stringstream outputs_stream;
for (const auto &output : outputs) {
outputs_stream << output.path << "<br>";
}
json.add("outputs", outputs_stream.str());

std::stringstream tags_stream;
for (const auto &tag : tags) {
tags_stream << "{<br>"
<< " job: " << tag.job << ",<br>"
<< " uri: " << tag.uri << ",<br>"
<< " content: " << tag.content << "<br>},<br>";
}
json.add("tags", tags_stream.str());

return json;
}

JAST FileDependency::to_json() const {
JAST json(JSON_OBJECT);
json.add("writer", writer);
json.add("reader", reader);
return json;
}

static JobReflection find_one(const Database *db, sqlite3_stmt *query) {
const char *why = "Could not describe job";
JobReflection desc;
Expand Down Expand Up @@ -1277,17 +1352,17 @@ std::vector<std::string> Database::get_outputs() const {
return out;
}

static std::vector<FileAccess> get_all_file_accesses(const Database *db, sqlite3_stmt *query) {
const char *why = "Could not get file access";
std::vector<FileAccess> out;
static std::vector<FileDependency> get_all_file_dependencies(const Database *db,
sqlite3_stmt *query) {
const char *why = "Could not get file dependencies";
std::vector<FileDependency> out;

db->begin_txn();
while (sqlite3_step(query) == SQLITE_ROW) {
FileAccess access;
// grab flat values
access.type = sqlite3_column_int(query, 0);
access.job = sqlite3_column_int64(query, 1);
out.emplace_back(access);
FileDependency dep;
dep.writer = sqlite3_column_int64(query, 0);
dep.reader = sqlite3_column_int64(query, 1);
out.emplace_back(dep);
}
finish_stmt(why, query, db->imp->debugdb);
db->end_txn();
Expand Down Expand Up @@ -1372,10 +1447,6 @@ std::vector<std::pair<std::string, int>> Database::get_interleaved_output(long j
return out;
}

std::vector<JobReflection> Database::get_job_visualization() const {
return find_all(this, imp->get_job_visualization);
}

std::vector<FileAccess> Database::get_file_accesses() const {
return get_all_file_accesses(this, imp->get_file_access);
std::vector<FileDependency> Database::get_file_dependencies() const {
return get_all_file_dependencies(this, imp->get_file_dependency);
}
15 changes: 10 additions & 5 deletions src/runtime/database.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
#include <string>
#include <vector>

#include "json/json5.h"

struct FileReflection {
std::string path;
std::string hash;
Expand Down Expand Up @@ -78,6 +80,8 @@ struct JobReflection {
std::vector<FileReflection> inputs;
std::vector<FileReflection> outputs;
std::vector<JobTag> tags;

JAST to_json() const;
};

struct JobEdge {
Expand All @@ -86,9 +90,11 @@ struct JobEdge {
JobEdge(long user_, long used_) : user(user_), used(used_) {}
};

struct FileAccess {
int type; // file access type from wake.db; 0=visible, 1=input, 2=output
long job; // id of the job which has the access
struct FileDependency {
long writer; // The job that writes a file
long reader; // The job that reads said file

JAST to_json() const;
};

struct Database {
Expand Down Expand Up @@ -172,8 +178,7 @@ struct Database {
std::vector<JobEdge> get_edges();
std::vector<JobTag> get_tags();

std::vector<JobReflection> get_job_visualization() const;
std::vector<FileAccess> get_file_accesses() const;
std::vector<FileDependency> get_file_dependencies() const;

std::vector<std::pair<std::string, int>> get_interleaved_output(long job_id) const;
};
Expand Down
Loading

0 comments on commit d9e2a5b

Please sign in to comment.