glx/swrast: Do more GLX extension setup
This gets you nice things like core contexts when using Xvfb. Also, no, MESA_copy_sub_buffer is not enabled automatically. Reviewed-by: James Jones <jajones@nvidia.com> Reviewed-by: Jon Turney <jon.turney@dronecode.org.uk> Signed-off-by: Adam Jackson <ajax@redhat.com>
This commit is contained in:
parent
cbd3cfbad3
commit
2d7194334a
|
@ -71,6 +71,8 @@ struct __GLXDRIscreen {
|
|||
const __DRIcopySubBufferExtension *copySubBuffer;
|
||||
const __DRItexBufferExtension *texBuffer;
|
||||
const __DRIconfig **driConfigs;
|
||||
|
||||
unsigned char glx_enable_bits[__GLX_EXT_BYTES];
|
||||
};
|
||||
|
||||
struct __GLXDRIcontext {
|
||||
|
@ -394,21 +396,34 @@ initializeExtensions(__GLXDRIscreen * screen)
|
|||
const __DRIextension **extensions;
|
||||
int i;
|
||||
|
||||
if (screen->swrast->base.version >= 3) {
|
||||
__glXEnableExtension(screen->glx_enable_bits,
|
||||
"GLX_ARB_create_context");
|
||||
__glXEnableExtension(screen->glx_enable_bits,
|
||||
"GLX_ARB_create_context_profile");
|
||||
__glXEnableExtension(screen->glx_enable_bits,
|
||||
"GLX_EXT_create_context_es2_profile");
|
||||
}
|
||||
|
||||
/* these are harmless to enable unconditionally */
|
||||
__glXEnableExtension(screen->glx_enable_bits, "GLX_EXT_framebuffer_sRGB");
|
||||
__glXEnableExtension(screen->glx_enable_bits, "GLX_ARB_fbconfig_float");
|
||||
__glXEnableExtension(screen->glx_enable_bits, "GLX_SGI_make_current_read");
|
||||
|
||||
extensions = screen->core->getExtensions(screen->driScreen);
|
||||
|
||||
for (i = 0; extensions[i]; i++) {
|
||||
if (strcmp(extensions[i]->name, __DRI_COPY_SUB_BUFFER) == 0) {
|
||||
screen->copySubBuffer =
|
||||
(const __DRIcopySubBufferExtension *) extensions[i];
|
||||
/* GLX_MESA_copy_sub_buffer is always enabled. */
|
||||
__glXEnableExtension(screen->glx_enable_bits,
|
||||
"GLX_MESA_copy_sub_buffer");
|
||||
}
|
||||
|
||||
if (strcmp(extensions[i]->name, __DRI_TEX_BUFFER) == 0) {
|
||||
screen->texBuffer = (const __DRItexBufferExtension *) extensions[i];
|
||||
/* GLX_EXT_texture_from_pixmap is always enabled. */
|
||||
}
|
||||
|
||||
/* Ignore unknown extensions */
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -420,6 +435,7 @@ __glXDRIscreenProbe(ScreenPtr pScreen)
|
|||
{
|
||||
const char *driverName = "swrast";
|
||||
__GLXDRIscreen *screen;
|
||||
size_t buffer_size;
|
||||
|
||||
screen = calloc(1, sizeof *screen);
|
||||
if (screen == NULL)
|
||||
|
@ -431,6 +447,8 @@ __glXDRIscreenProbe(ScreenPtr pScreen)
|
|||
screen->base.swapInterval = NULL;
|
||||
screen->base.pScreen = pScreen;
|
||||
|
||||
__glXInitExtensionEnableBits(screen->glx_enable_bits);
|
||||
|
||||
screen->driver = glxProbeDriver(driverName,
|
||||
(void **) &screen->core,
|
||||
__DRI_CORE, 1,
|
||||
|
@ -459,6 +477,19 @@ __glXDRIscreenProbe(ScreenPtr pScreen)
|
|||
|
||||
__glXScreenInit(&screen->base, pScreen);
|
||||
|
||||
/* The first call simply determines the length of the extension string.
|
||||
* This allows us to allocate some memory to hold the extension string,
|
||||
* but it requires that we call __glXGetExtensionString a second time.
|
||||
*/
|
||||
buffer_size = __glXGetExtensionString(screen->glx_enable_bits, NULL);
|
||||
if (buffer_size > 0) {
|
||||
free(screen->base.GLXextensions);
|
||||
|
||||
screen->base.GLXextensions = xnfalloc(buffer_size);
|
||||
(void) __glXGetExtensionString(screen->glx_enable_bits,
|
||||
screen->base.GLXextensions);
|
||||
}
|
||||
|
||||
screen->base.GLXmajor = 1;
|
||||
screen->base.GLXminor = 4;
|
||||
|
||||
|
|
Loading…
Reference in New Issue