3
0
Fork 0
forked from mirrors/nixpkgs

Add primus and extend bumblebee to support 32bit/64bit multilib architectures.

Using primusrun will work as expected in a multilib environment. Even if the initial program
executes a antoehr program of the another architecture. Assuming the program does not modify
LD_LIBRARY_PATH inappropriately.

This does not update virtualgl for seemless multilib. I was unable to get a mixed 64/32 bit
environment to work with VirtualGL. The mechanism VirtualGL uses to inject the fake GL library would
fail if both 32bit and 64 bit libraries were in the environment. Instead the bumblebee package
creates a optirun32 executable that can be used to run a 32bit executable with optimus on a 64 bit
host. This is not created if the host is 32bit.

For my usage, gaming under wine, the primusrun executable works as expected regardless of
32bit/64bit.
This commit is contained in:
Corey O'Connor 2014-05-03 07:28:21 -07:00
parent 20487919b2
commit b2f3e10a35
6 changed files with 160 additions and 26 deletions

View file

@ -30,7 +30,7 @@ with lib;
boot.kernelModules = [ "bbswitch" ];
boot.extraModulePackages = [ kernel.bbswitch kernel.nvidia_x11 ];
environment.systemPackages = [ pkgs.bumblebee ];
environment.systemPackages = [ pkgs.bumblebee pkgs.primus ];
systemd.services.bumblebeed = {
description = "Bumblebee Hybrid Graphics Switcher";

View file

@ -1,54 +1,82 @@
# The bumblebee package allows a program to be rendered on an
# dedicated video card by spawning an additional X11 server
# and streaming the results via VirtualGL to the primary server.
# dedicated video card by spawning an additional X11 server and
# streaming the results via VirtualGL or primus to the primary server.
# The package is rather chaotic; it's also quite recent.
# As it may change a lot, some of the hacks in this nix expression
# will hopefully not be needed in the future anymore.
# To test: make sure that the 'bbswitch' kernel module is installed,
# then run 'bumblebeed' as root and 'optirun glxgears' as user.
# To test:
# 1. make sure that the 'bbswitch' kernel module is installed,
# 2. then run 'bumblebeed' as root
# 3. Then either 'optirun glxinfo' or 'primusrun glxinfo' as user.
#
# The glxinfo output should indicate the NVidia driver is being used
# and all expected extensions are supported.
#
# To use at startup, see hardware.bumblebee options.
# This nix expression supports for now only the native nvidia driver.
# It should not be hard to generalize this approach to support the
# nouveau driver as well (parameterize commonEnv over the module
# package, and parameterize the two wrappers as well)
# nouveau driver as well (parameterize hostEnv, i686Env over the
# module package, and parameterize the two wrappers as well)
{ stdenv, fetchurl, pkgconfig, help2man
, libX11, glibc, glib, libbsd
, makeWrapper, buildEnv, module_init_tools
, linuxPackages, virtualgl, xorg, xkeyboard_config
, xorg, xkeyboard_config
, nvidia_x11, virtualgl
# The below should only be non-null in a x86_64 system. On a i686
# system the above nvidia_x11 and virtualgl will be the i686 packages.
# TODO: Confusing. Perhaps use "SubArch" instead of i686?
, nvidia_x11_i686 ? null
, virtualgl_i686 ? null
}:
with stdenv.lib;
let
version = "3.2.1";
name = "bumblebee-${version}";
# isolated X11 environment with the nvidia module
# it should include all components needed for bumblebeed and
# Isolated X11 environment without the acceleration driver module.
# Includes the rest of the components needed for bumblebeed and
# optirun to spawn the second X server and to connect to it.
commonEnv = buildEnv {
x11Env = buildEnv {
name = "bumblebee-env";
paths = [
module_init_tools
linuxPackages.nvidia_x11
xorg.xorgserver
xorg.xrandr
xorg.xrdb
xorg.setxkbmap
xorg.libX11
xorg.libXext
xorg.xf86inputevdev
];
};
# The environment for the host architecture.
hostEnv = buildEnv {
name = "bumblebee-x64-env";
paths = [
nvidia_x11
virtualgl
];
# the nvidia GLX module overwrites the one of xorgserver,
# thus nvidia_x11 must be before xorgserver in the paths.
ignoreCollisions = true;
};
# The environment for the sub architecture, i686, if there is one
i686Env = if virtualgl_i686 != null
then buildEnv {
name = "bumblebee-i686-env";
paths = [
nvidia_x11_i686
virtualgl_i686
];
}
else null;
allEnvs = [hostEnv] ++ optional (i686Env != null) i686Env;
ldPathString = makeLibraryPath allEnvs;
in stdenv.mkDerivation {
inherit name;
@ -63,6 +91,7 @@ in stdenv.mkDerivation {
# Substitute the path to the actual modinfo program in module.c.
# Note: module.c also calls rmmod and modprobe, but those just have to
# be in PATH, and thus no action for them is required.
substituteInPlace src/module.c \
--replace "/sbin/modinfo" "${module_init_tools}/sbin/modinfo"
@ -75,26 +104,38 @@ in stdenv.mkDerivation {
# Note that it has several runtime dependencies.
buildInputs = [ stdenv makeWrapper pkgconfig help2man libX11 glib libbsd ];
# The order of LDPATH is very specific: First X11 then the host
# environment then the optional sub architecture paths.
#
# The order for MODPATH is the opposite: First the environment that
# includes the acceleration driver. As this is used for the X11
# server, which runs under the host architecture, this does not
# include the sub architecture components.
configureFlags = [
"--with-udev-rules=$out/lib/udev/rules.d"
"CONF_DRIVER=nvidia"
"CONF_DRIVER_MODULE_NVIDIA=nvidia"
"CONF_LDPATH_NVIDIA=${commonEnv}/lib"
"CONF_MODPATH_NVIDIA=${commonEnv}/lib/xorg/modules"
"CONF_LDPATH_NVIDIA=${x11Env}/lib:${ldPathString}"
"CONF_MODPATH_NVIDIA=${hostEnv}/lib/xorg/modules,${x11Env}/lib/xorg/modules"
];
# create a wrapper environment for bumblebeed and optirun
postInstall = ''
wrapProgram "$out/sbin/bumblebeed" \
--prefix PATH : "${commonEnv}/sbin:${commonEnv}/bin:\$PATH" \
--prefix LD_LIBRARY_PATH : "${commonEnv}/lib:\$LD_LIBRARY_PATH" \
--prefix PATH : "${x11Env}/sbin:${x11Env}/bin:${hostEnv}/bin:\$PATH" \
--prefix LD_LIBRARY_PATH : "${x11Env}/lib:${hostEnv}/lib:\$LD_LIBRARY_PATH" \
--set FONTCONFIG_FILE "/etc/fonts/fonts.conf" \
--set XKB_BINDIR "${xorg.xkbcomp}/bin" \
--set XKB_DIR "${xkeyboard_config}/etc/X11/xkb"
wrapProgram "$out/bin/optirun" \
--prefix PATH : "${commonEnv}/sbin:${commonEnv}/bin" \
--prefix LD_LIBRARY_PATH : "${commonEnv}/lib" \
'';
--prefix PATH : "${hostEnv}/bin"
'' + (if i686Env == null
then ""
else ''
makeWrapper "$out/bin/.optirun-wrapped" "$out/bin/optirun32" \
--prefix PATH : "${i686Env}/bin"
'');
meta = {
homepage = http://github.com/Bumblebee-Project/Bumblebee;

View file

@ -0,0 +1,12 @@
source $stdenv/setup
cp -r $src src
cd src
export LIBDIR=$out/lib
export PRIMUS_libGLa=$nvidia/lib/libGL.so
export PRIMUS_libGLd=$mesa/lib/libGL.so
export PRIMUS_LOAD_GLOBAL=$mesa/lib/libglapi.so
make
ln -s $LIBDIR/libGL.so.1 $LIBDIR/libGL.so

View file

@ -0,0 +1,40 @@
# For a 64bit + 32bit system the LD_LIBRARY_PATH must contain both the 32bit and 64bit primus
# libraries. Providing a different primusrun for each architecture will not work as expected. EG:
# Using steam under wine can involve both 32bit and 64bit process. All of which inherit the
# same LD_LIBRARY_PATH.
# Other distributions do the same.
{ stdenv
, primusLib
, writeScript
, primusLib_i686 ? null
}:
with stdenv.lib;
let
version = "1.0.0748176";
ldPath = makeLibraryPath ([primusLib] ++ optional (primusLib_i686 != null) primusLib_i686);
primusrun = writeScript "primusrun"
''
export LD_LIBRARY_PATH=${ldPath}:\$LD_LIBRARY_PATH
# see: https://github.com/amonakov/primus/issues/138
# On my system, as of 3.16.6, the intel driver dies when the pixel buffers try to read from the
# source memory directly. Setting PRIMUS_UPLOAD causes an indirection through textures which
# avoids this issue.
export PRIMUS_UPLOAD=1
exec "$@"
'';
in
stdenv.mkDerivation {
name = "primus-${version}";
builder = writeScript "builder"
''
source $stdenv/setup
mkdir -p $out/bin
cp ${primusrun} $out/bin/primusrun
'';
meta = {
homepage = https://github.com/amonakov/primus;
description = "Faster OpenGL offloading for Bumblebee";
maintainer = maintainers.coconnor;
};
}

View file

@ -0,0 +1,20 @@
{ stdenv, fetchgit
, x11, mesa
, nvidia
}:
let
version = "1.0.0748176";
in
stdenv.mkDerivation {
name = "primus-lib-${version}";
src = fetchgit {
url = git://github.com/amonakov/primus.git;
rev = "074817614c014e3a99259388cb18fd54648b659a";
sha256 = "0mrh432md6zrm16avxyk57mgszrqpgwdjahspchvlaccqxp3x82v";
};
inherit nvidia mesa;
buildInputs = [ x11 mesa ];
builder = ./builder.sh;
}

View file

@ -10993,7 +10993,28 @@ let
virtualgl = callPackage ../tools/X11/virtualgl { };
bumblebee = callPackage ../tools/X11/bumblebee { };
primus = callPackage ../tools/X11/primus {
primusLib = callPackage ../tools/X11/primus/lib.nix {
nvidia = linuxPackages.nvidia_x11;
};
primusLib_i686 = if system == "x86_64-linux"
then callPackage_i686 ../tools/X11/primus/lib.nix {
nvidia = pkgsi686Linux.linuxPackages.nvidia_x11.override { libsOnly = true; };
}
else null;
};
bumblebee = callPackage ../tools/X11/bumblebee {
nvidia_x11 = linuxPackages.nvidia_x11;
nvidia_x11_i686 = if system == "x86_64-linux"
then pkgsi686Linux.linuxPackages.nvidia_x11.override { libsOnly = true; }
else null;
virtualgl = virtualgl;
virtualgl_i686 = if system == "x86_64-linux"
then pkgsi686Linux.virtualgl
else null;
};
vkeybd = callPackage ../applications/audio/vkeybd {
inherit (xlibs) libX11;