Xdmx: Initialize DMX extension even if not built with GLX support
dmxAddExtensions takes an argument to determine if it should just initialize the DMX extension, or both DMX & GLX, but if GLX wasn't compiled in, the entire call was #ifdef'ed out, leaving the DMX extension unavailable. Signed-off-by: Alan Coopersmith <alan.coopersmith@oracle.com> Reviewed-by: Julien Cristau <jcristau@debian.org> Signed-off-by: Keith Packard <keithp@keithp.com>
This commit is contained in:
		
							parent
							
								
									bd70def077
								
							
						
					
					
						commit
						d695cb7fb6
					
				| 
						 | 
				
			
			@ -612,6 +612,8 @@ InitOutput(ScreenInfo * pScreenInfo, int argc, char *argv[])
 | 
			
		|||
 | 
			
		||||
#ifdef GLXEXT
 | 
			
		||||
    static Bool glxSupported = TRUE;
 | 
			
		||||
#else
 | 
			
		||||
    const Bool glxSupported = FALSE;
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
    if (dmxGeneration != serverGeneration) {
 | 
			
		||||
| 
						 | 
				
			
			@ -740,10 +742,10 @@ InitOutput(ScreenInfo * pScreenInfo, int argc, char *argv[])
 | 
			
		|||
    /* Check if GLX extension exists on all back-end servers */
 | 
			
		||||
    for (i = 0; i < dmxNumScreens; i++)
 | 
			
		||||
        glxSupported &= (dmxScreens[i].glxMajorOpcode > 0);
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
    if (serverGeneration == 1)
 | 
			
		||||
        dmxAddExtensions(glxSupported);
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
    /* Tell dix layer about the backend displays */
 | 
			
		||||
    for (i = 0; i < dmxNumScreens; i++) {
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in New Issue