Skip to content

Commit

Permalink
Merge branch 'refs/heads/dev-nvidia-ct' into stable
Browse files Browse the repository at this point in the history
# Conflicts:
#	src/core/src/platforms/all/helpers/helpers/utils.hpp
  • Loading branch information
ABeltramo committed Jul 24, 2024
2 parents f2d2b7a + 1beab40 commit c8fdfd2
Show file tree
Hide file tree
Showing 12 changed files with 98 additions and 52 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/linux-build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
g++ \
cmake \
ninja-build \
libboost-locale-dev libboost-thread-dev libboost-filesystem-dev libboost-log-dev libboost-stacktrace-dev \
libboost-locale-dev libboost-thread-dev libboost-filesystem-dev libboost-log-dev libboost-stacktrace-dev libboost-container-dev \
libssl-dev \
libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
libwayland-dev wayland-protocols libinput-dev libxkbcommon-dev libgbm-dev \
Expand Down Expand Up @@ -175,7 +175,7 @@ jobs:
sudo apt-get install -y libunwind-dev
sudo apt-get install -y \
ninja-build \
libboost-locale-dev libboost-thread-dev libboost-filesystem-dev libboost-log-dev libboost-stacktrace-dev \
libboost-locale-dev libboost-thread-dev libboost-filesystem-dev libboost-log-dev libboost-stacktrace-dev libboost-container-dev \
libssl-dev \
libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
libwayland-dev libwayland-server0 libinput-dev libxkbcommon-dev libgbm-dev \
Expand Down
2 changes: 1 addition & 1 deletion docker/wolf.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ RUN apt-get update -y && \
ccache \
git \
clang \
libboost-thread-dev libboost-locale-dev libboost-filesystem-dev libboost-log-dev libboost-stacktrace-dev \
libboost-thread-dev libboost-locale-dev libboost-filesystem-dev libboost-log-dev libboost-stacktrace-dev libboost-container-dev \
libwayland-dev libwayland-server0 libinput-dev libxkbcommon-dev libgbm-dev \
libcurl4-openssl-dev \
libssl-dev \
Expand Down
2 changes: 1 addition & 1 deletion docs/modules/dev/pages/manual_build.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ apt-get install -y --no-install-recommends \
ccache \
git \
clang \
libboost-thread-dev libboost-locale-dev libboost-filesystem-dev libboost-log-dev libboost-stacktrace-dev \
libboost-thread-dev libboost-locale-dev libboost-filesystem-dev libboost-log-dev libboost-stacktrace-dev libboost-container-dev \
libwayland-dev libwayland-server0 libinput-dev libxkbcommon-dev libgbm-dev \
libcurl4-openssl-dev \
libssl-dev \
Expand Down
14 changes: 0 additions & 14 deletions src/core/src/platforms/all/docker/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,20 +11,6 @@ target_sources(wolf_docker
docker/json_formatters.hpp
docker/docker.cpp)

FetchContent_Declare(
boost_json
GIT_REPOSITORY https://github.com/boostorg/json.git
GIT_TAG "boost-1.75.0")

FetchContent_GetProperties(boost_json)
if (NOT boost_json_POPULATED)
FetchContent_Populate(boost_json)

add_library(boost_json INTERFACE)
target_include_directories(boost_json INTERFACE ${boost_json_SOURCE_DIR}/include)
target_link_libraries_system(wolf_docker PRIVATE boost_json)
endif ()

find_package(CURL) # REQUIRED UnixSockets removed because it doesn't work in Github Actions
target_link_libraries_system(wolf_docker
PRIVATE
Expand Down
26 changes: 8 additions & 18 deletions src/core/src/platforms/all/docker/docker/docker.cpp
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
#include <boost/json/src.hpp>
#include <core/docker.hpp>
#include <curl/curl.h>
#include <docker/formatters.hpp>
#include <docker/json_formatters.hpp>
#include <helpers/logger.hpp>
#include <helpers/utils.hpp>
#include <range/v3/view.hpp>
#include <string_view>

namespace wolf::core::docker {
using namespace ranges;
using namespace utils;
namespace json = boost::json;

enum METHOD : int {
Expand Down Expand Up @@ -104,23 +105,12 @@ req(CURL *handle,
}
}

json::value parse(std::string_view json) {
json::error_code ec;
auto parsed = json::parse({json.data(), json.size()}, ec);
if (!ec) {
return parsed;
} else {
logs::log(logs::error, "Error while parsing JSON: {} \n {}", ec.message(), json);
return json::object(); // Returning an empty object should allow us to continue most of the times
}
}

std::optional<Container> DockerAPI::get_by_id(std::string_view id) const {
if (auto conn = docker_connect(socket_path)) {
auto url = fmt::format("http://localhost/{}/containers/{}/json", DOCKER_API_VERSION, id);
auto raw_msg = req(conn.value().get(), GET, url);
if (raw_msg && raw_msg->first == 200) {
auto json = parse(raw_msg->second);
auto json = parse_json(raw_msg->second);
return json::value_to<Container>(json);
} else if (raw_msg) {
logs::log(logs::warning, "[CURL] error {} - {}", raw_msg->first, raw_msg->second);
Expand All @@ -135,7 +125,7 @@ std::vector<Container> DockerAPI::get_containers(bool all) const {
auto url = fmt::format("http://localhost/{}/containers/json{}", DOCKER_API_VERSION, all ? "?all=true" : "");
auto raw_msg = req(conn.value().get(), GET, url);
if (raw_msg && raw_msg->first == 200) {
auto json = parse(raw_msg->second);
auto json = parse_json(raw_msg->second);
auto containers = json::value_to<std::vector<json::value>>(json);
return containers //
| ranges::views::transform([this](const json::value &container) { //
Expand Down Expand Up @@ -173,7 +163,7 @@ std::optional<Container> DockerAPI::create(const Container &container,
exposed_ports[fmt::format("{}/{}", port.public_port, port.type == docker::TCP ? "tcp" : "udp")] = json::object();
}

auto post_params = parse(custom_params).as_object();
auto post_params = parse_json(custom_params).as_object();
post_params["Image"] = container.image;
merge_array(&post_params, "Env", json::value_from(container.env).as_array());

Expand All @@ -190,7 +180,7 @@ std::optional<Container> DockerAPI::create(const Container &container,
auto json_payload = json::serialize(post_params);
auto raw_msg = req(conn.value().get(), POST, url, json_payload);
if (raw_msg && raw_msg->first == 201) {
auto json = parse(raw_msg->second);
auto json = parse_json(raw_msg->second);
auto created_id = json.at("Id").as_string();
return get_by_id(std::string_view{created_id.data(), created_id.size()});
} else if (raw_msg && raw_msg->first == 404) { // 404 returned when the image is not present
Expand Down Expand Up @@ -335,7 +325,7 @@ bool DockerAPI::exec(std::string_view id, const std::vector<std::string_view> &c
auto raw_msg = req(conn.value().get(), POST, api_url, json_payload);
if (raw_msg && raw_msg->first == 201) {
// Exec request created, start it
auto json = parse(raw_msg->second);
auto json = parse_json(raw_msg->second);
std::string exec_id = json.at("Id").as_string().data();
api_url = fmt::format("http://localhost/{}/exec/{}/start", DOCKER_API_VERSION, exec_id);
post_params = json::object{{"Detach", false}, {"Tty", false}};
Expand All @@ -347,7 +337,7 @@ bool DockerAPI::exec(std::string_view id, const std::vector<std::string_view> &c
api_url = fmt::format("http://localhost/{}/exec/{}/json", DOCKER_API_VERSION, exec_id);
raw_msg = req(conn.value().get(), GET, api_url);
if (raw_msg && raw_msg->first == 200) {
json = parse(raw_msg->second);
json = parse_json(raw_msg->second);
auto exit_code = json.at("ExitCode").as_int64();
if (exit_code != 0) {
logs::log(logs::warning, "Docker exec failed ({}), {}", exit_code, console);
Expand Down
10 changes: 9 additions & 1 deletion src/core/src/platforms/all/helpers/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,17 @@ FetchContent_MakeAvailable(fmtlib)
target_link_libraries_system(wolf_helpers INTERFACE fmt::fmt-header-only)

# Boost for logging
find_package(Boost REQUIRED COMPONENTS log)
find_package(Boost REQUIRED COMPONENTS log container)
include_directories(${Boost_INCLUDE_DIRS})
target_link_libraries(wolf_helpers INTERFACE ${Boost_LIBRARIES})

FetchContent_Declare(
boost_json
GIT_REPOSITORY https://github.com/boostorg/json.git
GIT_TAG "boost-1.75.0")
set(BOOST_JSON_BUILD_TESTS OFF)
FetchContent_MakeAvailable(boost_json)
target_link_libraries(wolf_helpers INTERFACE Boost::json)

# All users of this library will need at least C++17
target_compile_features(wolf_helpers INTERFACE cxx_std_17)
14 changes: 14 additions & 0 deletions src/core/src/platforms/all/helpers/helpers/utils.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
#include <algorithm>
#include <boost/endian.hpp>
#include <optional>
#include <boost/json.hpp>
#include <helpers/logger.hpp>
#include <range/v3/view.hpp>
#include <sstream>
#include <stdlib.h>
Expand Down Expand Up @@ -121,4 +123,16 @@ template <typename T, typename F> T lazy_value_or(const std::optional<T> &opt, F
return fn();
}

namespace json = boost::json;
inline json::value parse_json(std::string_view json) {
json::error_code ec;
auto parsed = json::parse({json.data(), json.size()}, ec);
if (!ec) {
return parsed;
} else {
logs::log(logs::error, "Error while parsing JSON: {} \n {}", ec.message(), json);
return json::object(); // Returning an empty object should allow us to continue most of the times
}
}

} // namespace utils
54 changes: 51 additions & 3 deletions src/moonlight-server/runners/docker.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,8 @@ class RunDocker : public state::Runner {
std::shared_ptr<state::devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) override;
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) override;

toml::value serialise() override {
return {{"type", "docker"},
Expand Down Expand Up @@ -140,7 +141,8 @@ void RunDocker::run(std::size_t session_id,
std::shared_ptr<state::devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) {
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) {

std::vector<std::string> full_env;
full_env.insert(full_env.end(), this->container.env.begin(), this->container.env.end());
Expand Down Expand Up @@ -186,6 +188,52 @@ void RunDocker::run(std::size_t session_id,
"[DOCKER] Unable to use fake-udev, check the env variable WOLF_DOCKER_FAKE_UDEV_PATH and the file at {}",
fake_udev_cli_path);
}

// Add equivalent of --gpu=all if on NVIDIA without the custom driver volume
auto final_json_opts = this->base_create_json;
if (get_vendor(render_node) == NVIDIA && !utils::get_env("NVIDIA_DRIVER_VOLUME_NAME")) {
logs::log(logs::info, "NVIDIA_DRIVER_VOLUME_NAME not set, assuming nvidia driver toolkit is installed..");
{
auto parsed_json = utils::parse_json(final_json_opts).as_object();
auto default_gpu_config = boost::json::array{ // [
boost::json::object{// {
{"Driver", "nvidia"},
{"DeviceIDs", {"all"}},
{"Capabilities", boost::json::array{{"gpu"}}}}};
if (auto host_config_ptr = parsed_json.if_contains("HostConfig")) {
auto host_config = host_config_ptr->as_object();
if (host_config.find("DeviceRequests") == host_config.end()) {
host_config["DeviceRequests"] = default_gpu_config;
parsed_json["HostConfig"] = host_config;
final_json_opts = boost::json::serialize(parsed_json);
} else {
logs::log(logs::debug, "DeviceRequests manually set in base_create_json, skipping..");
}
} else {
logs::log(logs::warning, "HostConfig not found in base_create_json.");
parsed_json["HostConfig"] = boost::json::object{{"DeviceRequests", default_gpu_config}};
final_json_opts = boost::json::serialize(parsed_json);
}
}

// Setup -e NVIDIA_VISIBLE_DEVICES=all -e NVIDIA_DRIVER_CAPABILITIES=all if not present
{
auto nvd_env = std::find_if(full_env.begin(), full_env.end(), [](const std::string &env) {
return env.find("NVIDIA_VISIBLE_DEVICES") != std::string::npos;
});
if (nvd_env == full_env.end()) {
full_env.push_back("NVIDIA_VISIBLE_DEVICES=all");
}

auto nvd_caps_env = std::find_if(full_env.begin(), full_env.end(), [](const std::string &env) {
return env.find("NVIDIA_DRIVER_CAPABILITIES") != std::string::npos;
});
if (nvd_caps_env == full_env.end()) {
full_env.push_back("NVIDIA_DRIVER_CAPABILITIES=all");
}
}
}

Container new_container = {.id = "",
.name = fmt::format("{}_{}", this->container.name, session_id),
.image = this->container.image,
Expand All @@ -195,7 +243,7 @@ void RunDocker::run(std::size_t session_id,
.devices = devices,
.env = full_env};

if (auto docker_container = docker_api.create(new_container, this->base_create_json)) {
if (auto docker_container = docker_api.create(new_container, final_json_opts)) {
auto container_id = docker_container->id;
docker_api.start_by_id(container_id);

Expand Down
3 changes: 2 additions & 1 deletion src/moonlight-server/runners/process.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ void RunProcess::run(std::size_t session_id,
std::shared_ptr<state::devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) {
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) {
logs::log(logs::debug, "[PROCESS] Starting process: {}", this->run_cmd);

std::future<std::string> std_out, err_out;
Expand Down
3 changes: 2 additions & 1 deletion src/moonlight-server/runners/process.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ class RunProcess : public state::Runner {
std::shared_ptr<state::devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) override;
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) override;

toml::value serialise() override {
return {{"type", "process"}, {"run_cmd", this->run_cmd}};
Expand Down
3 changes: 2 additions & 1 deletion src/moonlight-server/state/data-structures.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ struct Runner {
std::shared_ptr<devices_atom_queue> plugged_devices_queue,
const immer::array<std::string> &virtual_inputs,
const immer::array<std::pair<std::string, std::string>> &paths,
const immer::map<std::string, std::string> &env_variables) = 0;
const immer::map<std::string, std::string> &env_variables,
std::string_view render_node) = 0;

virtual toml::value serialise() = 0;
};
Expand Down
15 changes: 6 additions & 9 deletions src/moonlight-server/wolf.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -292,21 +292,17 @@ auto setup_sessions_handlers(const immer::box<state::AppState> &app_state,
/* Adding custom state folder */
mounted_paths.push_back({session->app_state_folder, "/home/retro"});

/* Additional GPU devices */
/* GPU specific adjustments */
auto additional_devices = linked_devices(render_node);
std::copy(additional_devices.begin(), additional_devices.end(), std::back_inserter(all_devices));

/* nvidia needs some extra paths */
if (get_vendor(render_node) == NVIDIA) {
auto gpu_vendor = get_vendor(render_node);
if (gpu_vendor == NVIDIA) {
if (auto driver_volume = utils::get_env("NVIDIA_DRIVER_VOLUME_NAME")) {
logs::log(logs::info, "Mounting nvidia driver {}:/usr/nvidia", driver_volume);
mounted_paths.push_back({driver_volume, "/usr/nvidia"});
} else {
logs::log(logs::info, "NVIDIA_DRIVER_VOLUME_NAME not set, assuming nvidia driver toolkit is installed..");
}
}

if (get_vendor(render_node) == INTEL) {
} else if (gpu_vendor == INTEL) {
full_env.set("INTEL_DEBUG", "norbc"); // see: https://github.com/games-on-whales/wolf/issues/50
}

Expand All @@ -321,7 +317,8 @@ auto setup_sessions_handlers(const immer::box<state::AppState> &app_state,
devices_q,
all_devices.persistent(),
mounted_paths.persistent(),
full_env.persistent());
full_env.persistent(),
render_node);

/* App exited, cleanup */
logs::log(logs::debug, "[STREAM_SESSION] Remove virtual audio sink");
Expand Down

0 comments on commit c8fdfd2

Please sign in to comment.