Skip to content

Commit

Permalink
feat: added support for nvidia container toolkit
Browse files Browse the repository at this point in the history
  • Loading branch information
ABeltramo committed Jul 2, 2024
1 parent 1d8f181 commit ef9f735
Show file tree
Hide file tree
Showing 6 changed files with 79 additions and 15 deletions.
14 changes: 14 additions & 0 deletions src/moonlight-server/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,20 @@ else ()
endif ()
FetchContent_MakeAvailable(cpptrace)

FetchContent_Declare(
boost_json
GIT_REPOSITORY https://github.com/boostorg/json.git
GIT_TAG "boost-1.75.0")

FetchContent_GetProperties(boost_json)
if (NOT boost_json_POPULATED)
FetchContent_Populate(boost_json)

add_library(boost_json INTERFACE)
target_include_directories(boost_json INTERFACE ${boost_json_SOURCE_DIR}/include)
endif ()
target_link_libraries_system(wolf_runner PRIVATE boost_json)

##############################

find_package(Threads REQUIRED)
Expand Down
56 changes: 53 additions & 3 deletions src/moonlight-server/runners/docker.hpp
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#pragma once
#include <boost/json/src.hpp>
#include <boost/thread/thread.hpp>
#include <chrono>
#include <control/control.hpp>
Expand Down Expand Up @@ -94,7 +95,8 @@ class RunDocker : public state::Runner {
std::shared_ptr<state::devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) override;
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) override;

toml::value serialise() override {
return {{"type", "docker"},
Expand Down Expand Up @@ -140,7 +142,8 @@ void RunDocker::run(std::size_t session_id,
std::shared_ptr<state::devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) {
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) {

std::vector<std::string> full_env;
full_env.insert(full_env.end(), this->container.env.begin(), this->container.env.end());
Expand Down Expand Up @@ -186,6 +189,53 @@ void RunDocker::run(std::size_t session_id,
"[DOCKER] Unable to use fake-udev, check the env variable WOLF_DOCKER_FAKE_UDEV_PATH and the file at {}",
fake_udev_cli_path);
}

// Add equivalent of --gpu=all if on NVIDIA without the custom driver volume
auto final_json_opts = this->base_create_json;
if (get_vendor(render_node) == NVIDIA && !utils::get_env("NVIDIA_DRIVER_VOLUME_NAME")) {
logs::log(logs::info, "NVIDIA_DRIVER_VOLUME_NAME not set, assuming nvidia driver toolkit is installed..");
boost::json::error_code ec;
auto parsed_json = boost::json::parse({final_json_opts.data(), final_json_opts.size()}, ec).as_object();
if (!ec) {
auto default_gpu_config = boost::json::array{ // [
boost::json::object{// {
{"Driver", "nvidia"},
{"DeviceIDs", {"all"}},
{"Capabilities", boost::json::array{{"gpu"}}}}};
if (auto host_config_ptr = parsed_json.if_contains("HostConfig")) {
auto host_config = host_config_ptr->as_object();
if (host_config.find("DeviceRequests") == host_config.end()) {
host_config["DeviceRequests"] = default_gpu_config;
parsed_json["HostConfig"] = host_config;
final_json_opts = boost::json::serialize(parsed_json);
} else {
logs::log(logs::debug, "DeviceRequests manually set in base_create_json, skipping..");
}
} else {
logs::log(logs::warning, "HostConfig not found in base_create_json.");
parsed_json["HostConfig"] = boost::json::object{{"DeviceRequests", default_gpu_config}};
final_json_opts = boost::json::serialize(parsed_json);
}
}

// Setup -e NVIDIA_VISIBLE_DEVICES=all -e NVIDIA_DRIVER_CAPABILITIES=all if not present
{
auto nvd_env = std::find_if(full_env.begin(), full_env.end(), [](const std::string &env) {
return env.find("NVIDIA_VISIBLE_DEVICES") != std::string::npos;
});
if (nvd_env == full_env.end()) {
full_env.push_back("NVIDIA_VISIBLE_DEVICES=all");
}

auto nvd_caps_env = std::find_if(full_env.begin(), full_env.end(), [](const std::string &env) {
return env.find("NVIDIA_DRIVER_CAPABILITIES") != std::string::npos;
});
if (nvd_caps_env == full_env.end()) {
full_env.push_back("NVIDIA_DRIVER_CAPABILITIES=all");
}
}
}

Container new_container = {.id = "",
.name = fmt::format("{}_{}", this->container.name, session_id),
.image = this->container.image,
Expand All @@ -195,7 +245,7 @@ void RunDocker::run(std::size_t session_id,
.devices = devices,
.env = full_env};

if (auto docker_container = docker_api.create(new_container, this->base_create_json)) {
if (auto docker_container = docker_api.create(new_container, final_json_opts)) {
auto container_id = docker_container->id;
docker_api.start_by_id(container_id);

Expand Down
3 changes: 2 additions & 1 deletion src/moonlight-server/runners/process.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ void RunProcess::run(std::size_t session_id,
std::shared_ptr<state::devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) {
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) {
logs::log(logs::debug, "[PROCESS] Starting process: {}", this->run_cmd);

std::future<std::string> std_out, err_out;
Expand Down
3 changes: 2 additions & 1 deletion src/moonlight-server/runners/process.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ class RunProcess : public state::Runner {
std::shared_ptr<state::devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) override;
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) override;

toml::value serialise() override {
return {{"type", "process"}, {"run_cmd", this->run_cmd}};
Expand Down
3 changes: 2 additions & 1 deletion src/moonlight-server/state/data-structures.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ struct Runner {
std::shared_ptr<devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) = 0;
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) = 0;

virtual toml::value serialise() = 0;
};
Expand Down
15 changes: 6 additions & 9 deletions src/moonlight-server/wolf.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -270,21 +270,17 @@ auto setup_sessions_handlers(const immer::box<state::AppState> &app_state,
/* Adding custom state folder */
mounted_paths.push_back({session->app_state_folder, "/home/retro"});

/* Additional GPU devices */
/* GPU specific adjustments */
auto additional_devices = linked_devices(render_node);
std::copy(additional_devices.begin(), additional_devices.end(), std::back_inserter(all_devices));

/* nvidia needs some extra paths */
if (get_vendor(render_node) == NVIDIA) {
auto gpu_vendor = get_vendor(render_node);
if (gpu_vendor == NVIDIA) {
if (auto driver_volume = utils::get_env("NVIDIA_DRIVER_VOLUME_NAME")) {
logs::log(logs::info, "Mounting nvidia driver {}:/usr/nvidia", driver_volume);
mounted_paths.push_back({driver_volume, "/usr/nvidia"});
} else {
logs::log(logs::info, "NVIDIA_DRIVER_VOLUME_NAME not set, assuming nvidia driver toolkit is installed..");
}
}

if (get_vendor(render_node) == INTEL) {
} else if (gpu_vendor == INTEL) {
full_env.set("INTEL_DEBUG", "norbc"); // see: https://github.com/games-on-whales/wolf/issues/50
}

Expand Down Expand Up @@ -319,7 +315,8 @@ auto setup_sessions_handlers(const immer::box<state::AppState> &app_state,
session_devices_queue,
all_devices.persistent(),
mounted_paths.persistent(),
full_env.persistent());
full_env.persistent(),
render_node);

/* App exited, cleanup */
logs::log(logs::debug, "[STREAM_SESSION] Remove virtual audio sink");
Expand Down

0 comments on commit ef9f735

Please sign in to comment.