glx: Remove default server glx extension string
This existed only to be strdup'd and then immediately freed. Reviewed-by: Eric Anholt <eric@anholt.net> Reviewed-by: Emil Velikov <emil.velikov@collabora.com> Signed-off-by: Adam Jackson <ajax@redhat.com>
This commit is contained in:
parent
44e1c97ca6
commit
3a21da59e5
|
@ -1003,8 +1003,6 @@ __glXDRIscreenProbe(ScreenPtr pScreen)
|
|||
*/
|
||||
buffer_size = __glXGetExtensionString(screen->glx_enable_bits, NULL);
|
||||
if (buffer_size > 0) {
|
||||
free(screen->base.GLXextensions);
|
||||
|
||||
screen->base.GLXextensions = xnfalloc(buffer_size);
|
||||
(void) __glXGetExtensionString(screen->glx_enable_bits,
|
||||
screen->base.GLXextensions);
|
||||
|
|
|
@ -495,8 +495,6 @@ __glXDRIscreenProbe(ScreenPtr pScreen)
|
|||
*/
|
||||
buffer_size = __glXGetExtensionString(screen->glx_enable_bits, NULL);
|
||||
if (buffer_size > 0) {
|
||||
free(screen->base.GLXextensions);
|
||||
|
||||
screen->base.GLXextensions = xnfalloc(buffer_size);
|
||||
(void) __glXGetExtensionString(screen->glx_enable_bits,
|
||||
screen->base.GLXextensions);
|
||||
|
|
|
@ -154,26 +154,8 @@ static const char GLServerExtensions[] =
|
|||
"GL_SGIX_shadow_ambient "
|
||||
"GL_SUN_slice_accum ";
|
||||
|
||||
/*
|
||||
** We have made the simplifying assuption that the same extensions are
|
||||
** supported across all screens in a multi-screen system.
|
||||
*/
|
||||
unsigned glxMajorVersion = SERVER_GLX_MAJOR_VERSION;
|
||||
unsigned glxMinorVersion = SERVER_GLX_MINOR_VERSION;
|
||||
static char GLXServerExtensions[] =
|
||||
"GLX_ARB_multisample "
|
||||
"GLX_EXT_visual_info "
|
||||
"GLX_EXT_visual_rating "
|
||||
"GLX_EXT_import_context "
|
||||
"GLX_EXT_texture_from_pixmap "
|
||||
"GLX_OML_swap_method "
|
||||
"GLX_SGI_make_current_read "
|
||||
#ifndef __APPLE__
|
||||
"GLX_SGIS_multisample "
|
||||
#endif
|
||||
"GLX_SGIX_fbconfig "
|
||||
"GLX_SGIX_pbuffer "
|
||||
"GLX_MESA_copy_sub_buffer ";
|
||||
|
||||
static Bool
|
||||
glxCloseScreen(ScreenPtr pScreen)
|
||||
|
@ -329,7 +311,7 @@ __glXScreenInit(__GLXscreen * pGlxScreen, ScreenPtr pScreen)
|
|||
|
||||
pGlxScreen->pScreen = pScreen;
|
||||
pGlxScreen->GLextensions = strdup(GLServerExtensions);
|
||||
pGlxScreen->GLXextensions = strdup(GLXServerExtensions);
|
||||
pGlxScreen->GLXextensions = NULL;
|
||||
|
||||
/* All GLX providers must support all of the functionality required for at
|
||||
* least GLX 1.2. If the provider supports a higher version, the GLXminor
|
||||
|
|
|
@ -566,8 +566,6 @@ __glXAquaScreenProbe(ScreenPtr pScreen)
|
|||
unsigned int buffer_size =
|
||||
__glXGetExtensionString(screen->glx_enable_bits, NULL);
|
||||
if (buffer_size > 0) {
|
||||
free(screen->base.GLXextensions);
|
||||
|
||||
screen->base.GLXextensions = xnfalloc(buffer_size);
|
||||
__glXGetExtensionString(screen->glx_enable_bits,
|
||||
screen->base.GLXextensions);
|
||||
|
|
|
@ -743,8 +743,6 @@ glxWinScreenProbe(ScreenPtr pScreen)
|
|||
unsigned int buffer_size =
|
||||
__glXGetExtensionString(screen->glx_enable_bits, NULL);
|
||||
if (buffer_size > 0) {
|
||||
free(screen->base.GLXextensions);
|
||||
|
||||
screen->base.GLXextensions = xnfalloc(buffer_size);
|
||||
__glXGetExtensionString(screen->glx_enable_bits,
|
||||
screen->base.GLXextensions);
|
||||
|
|
Loading…
Reference in New Issue