pizzaboy150
Posts: 18
Joined: Sat Sep 05, 2015 11:03 am

Where's my Triangle??

Sat Sep 28, 2019 10:36 am

Hi All,

Done this code a few times and never had this problem before. I know I am overlooking something simple but been messing with the code for 3 days and can't seem to make it work?

Feels to me like the shader is not loading the source code and i am getting no errors?

All ideas welcome!

Code: Select all

#include <iostream>
#include <bcm_host.h>
#include <EGL/egl.h>
#include <GLES2/gl2.h>

int main(int argc, char *argv[]) {

	/////////////// 
	// BCM SETUP //
	///////////////
	EGL_DISPMANX_WINDOW_T nativewindow;
	uint32_t display_width;
	uint32_t display_height;

	//get the bcm setup
	bcm_host_init();

	//the variables
	DISPMANX_ELEMENT_HANDLE_T dispman_element;
	DISPMANX_DISPLAY_HANDLE_T dispman_display;
	DISPMANX_UPDATE_HANDLE_T dispman_update;
	DISPMANX_TRANSFORM_T dispman_trans;
	VC_RECT_T dst_rect;
	VC_RECT_T src_rect;

	//create an EGL window surface, passing context width/height
	int success = graphics_get_display_size(0 /* LCD */,
                        &display_width, &display_height);
	if ( success < 0 )
	{
		return 1;
	}

	//You can hardcode the resolution here:
	//display_width = 640;
	//display_height = 480;

	dst_rect.x = 0;
	dst_rect.y = 0;
	dst_rect.width = display_width;
	dst_rect.height = display_height;

	src_rect.x = 0;
	src_rect.y = 0;
	src_rect.width = display_width << 16;
	src_rect.height = display_height << 16;

	dispman_display = vc_dispmanx_display_open( 0 /* LCD */);
	dispman_update = vc_dispmanx_update_start( 0 );

	dispman_element = vc_dispmanx_element_add ( dispman_update,
	dispman_display, 0/*layer*/, &dst_rect, 0/*src*/,
			&src_rect, DISPMANX_PROTECTION_NONE, 0 /*alpha*/,
			0/*clamp*/, dispman_trans/*transform*/);

	nativewindow.element = dispman_element;
	nativewindow.width = display_width;
	nativewindow.height = display_height;
	vc_dispmanx_update_submit_sync( dispman_update );

	///////////////
	// EGL SETUP //
	///////////////
	
	EGLDisplay eglDisplay;
	EGLSurface eglSurface;
	EGLContext eglContext;

	eglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
	if(eglDisplay == EGL_NO_DISPLAY) {
		return 2;
	}

	//initialise EGL
	if(eglInitialize(eglDisplay, NULL, NULL) != EGL_TRUE) {
		return 3;
	}

	//Create an EGL config for what we want
	EGLConfig config;
	EGLint numConfigs;
	EGLint configs[] = {
		EGL_RED_SIZE, 8,
		EGL_GREEN_SIZE, 8,
		EGL_BLUE_SIZE, 8,
		EGL_ALPHA_SIZE, 8,
		EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
		EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
		EGL_NONE
	};

	//Choose the config to use
	if(eglChooseConfig(eglDisplay, configs, &config, 1, &numConfigs) == EGL_FALSE) {
		return 4;
	}

	//tell EGL we are using OpenVG / OpenGL ES / OpenGL
	//IMPORTANT THIS MUST BE DONE BEFORE CREATING THE CONTEXT!
	if(eglBindAPI(EGL_OPENGL_ES_API) != EGL_TRUE) {
		return 5;
	}

	//Now lets create the rendering context
	eglContext = eglCreateContext(eglDisplay, config, EGL_NO_CONTEXT, NULL);
	if(eglContext == EGL_NO_CONTEXT) {
		return 6;
	}

	//Create the window surface
	eglSurface = eglCreateWindowSurface(eglDisplay, config, &nativewindow, NULL);
	if(eglSurface == EGL_NO_CONTEXT) {
		return 7;
	}

	//make context current
	if(eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext) != EGL_TRUE) {
		return 8;
	}

	////////////////////
	// CREATE SHADERS //
	////////////////////
	
	//create the shader objects
	GLuint VShader = glCreateShader(GL_VERTEX_SHADER);
	GLuint FShader = glCreateShader(GL_FRAGMENT_SHADER);

	//our shader code later we will load this from a file
	const GLchar *VCode = {"attribute vec2 position;void main() {gl_Positon = position;}"};
	const GLchar *FCode = {"void main() {gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);}"};

	//load shader source code
	glShaderSource(VShader, 1, &VCode, NULL);
	glShaderSource(FShader, 1, &FCode, NULL);

	//debug print shader code to see it loaded?
	GLchar *VSource = NULL;
	GLchar *FSource = NULL;
	glGetShaderSource(VShader, 1024, NULL, VSource);
	glGetShaderSource(FShader, 1024, NULL, FSource);
	std::cout << VSource << std::endl;
	std::cout << FSource << std::endl;

	//compile the shaders
	glCompileShader(VShader);
	glCompileShader(FShader);

	//check for errors in both shaders
	GLint status;
	GLint logSize;
	GLchar *log = NULL;
	GLint size;
	glGetShaderiv(VShader, GL_COMPILE_STATUS, &status);
	if(status == GL_FALSE) {
		std::cout << "Vertex Shader ERROR!" << std::endl;
		glGetShaderiv(VShader, GL_INFO_LOG_LENGTH, &logSize);
		std::cout << logSize << std::endl;
		glGetShaderInfoLog(VShader, logSize, &size, log);
		if(logSize > 1) {
			std::cout << log << std::endl;
		}
	}
	glGetShaderiv(FShader, GL_COMPILE_STATUS, &status);
	if(status == GL_FALSE) {
		std::cout << "Fragment Shader ERROR!" << std::endl;
		glGetShaderiv(FShader, GL_INFO_LOG_LENGTH, &logSize);
		log = new char[logSize];
		glGetShaderInfoLog(FShader, logSize, &size, log);
		if(logSize > 1) {
			std::cout << log << std::endl;
		}
	}
	//create shader program
	GLuint program = glCreateProgram();

	//attach the shaders
	glAttachShader(program, VShader);
	glAttachShader(program, FShader);

	//now link the program
	glLinkProgram(program);

	//check program linked correctly
	GLint proLog = 0;
	glGetProgramiv(program, GL_LINK_STATUS, &proLog);
	if(proLog == GL_FALSE) {
		std::cout << "Error linking shader program!" << std::endl;
		GLint logLen = 0;
		GLchar *logInfo = NULL;
		glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLen);
		glGetProgramInfoLog(program, logLen, &logLen, logInfo);
		if(logLen > 0) {
			std::cout << logInfo << std::endl;
		}
	}

	//set the program to be used
	glUseProgram(program);

	//finished with shades so now we can delete
	glDeleteShader(VShader);
	glDeleteShader(FShader);

	///////////////////
	// TRIANGLE DATA //
	///////////////////
	
	//the vertex data
	GLfloat triangle[] = {-0.5, -0.5, 0.0, 0.5, 0.5, -0.5};

	//create a buffer to store the data in GPU memory
	GLuint triBuffer;
	glGenBuffers(1, &triBuffer);
	
	//tell GL which buffer we would like to work with?
	glBindBuffer(GL_ARRAY_BUFFER, triBuffer);

	//assign the data to the buffer
	glBufferData(GL_ARRAY_BUFFER, 6*sizeof(GLfloat), triangle, GL_STATIC_DRAW);

	//bind the attribute in vertex shader to index 0
	glBindAttribLocation(program, 0, "position");

	//tell GL how the data is stored?
	glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, triangle);
	
	//tell GL which attrib we would like to use
	glEnableVertexAttribArray(0);
	
	////////////////////
	// DRAWING & LOOP //
	////////////////////
	
	//Program loop
	int count = 0;

	//set the clear color
	glClearColor(0.5, 0.5, 0.5, 1.0);

	//assuming a vsync of 60hz here
	while(count < 600) {
			
		//clear the screen
		glClear(GL_COLOR_BUFFER_BIT);
	
		//lets draw our triangle
		glDrawArrays(GL_TRIANGLES, 0, 3);

		//swap the buffers
		eglSwapBuffers(eglDisplay, eglSurface);

		//Increment timer thinking 60hz refresh here
		count++;
	}
	


	//////////////
	// CLEAN UP //
	//////////////
	
	eglDestroySurface(eglDisplay, eglSurface);
	eglDestroyContext(eglDisplay, eglContext);
	eglTerminate(eglDisplay);

	return 0;
}
Thanks in advance! :)

User avatar
PeterO
Posts: 5456
Joined: Sun Jul 22, 2012 4:14 pm

Re: Where's my Triangle??

Sat Sep 28, 2019 10:49 am

Lots of things have changed on Buster and Pi4s :-)

No need to use PI specific code anymore, generic GL and GLES code should work.

Try this.... https://wiki.maemo.org/SimpleGL_example

PeterO
Discoverer of the PI2 XENON DEATH FLASH!
Interests: C,Python,PIC,Electronics,Ham Radio (G0DZB),1960s British Computers.
"The primary requirement (as we've always seen in your examples) is that the code is readable. " Dougie Lawson

pizzaboy150
Posts: 18
Joined: Sat Sep 05, 2015 11:03 am

Re: Where's my Triangle??

Sat Sep 28, 2019 11:00 am

Oh blimey, been a while since I picked up the PI, it seems a lot has changed. Does that now mean I can create a context using GLEW and GLFW instead of X11 or the funcky BCM stuff?

User avatar
PeterO
Posts: 5456
Joined: Sun Jul 22, 2012 4:14 pm

Re: Where's my Triangle??

Sat Sep 28, 2019 11:09 am

pizzaboy150 wrote:
Sat Sep 28, 2019 11:00 am
Oh blimey, been a while since I picked up the PI, it seems a lot has changed. Does that now mean I can create a context using GLEW and GLFW instead of X11 or the funcky BCM stuff?
All I know is that you can't use the "funky BCM stuff" for sure as it is no longer supported. TBH the changes have not been well communicated and the whole alphabet soup of acronyms and abbreviations around 3D graphics is now even more confusing !

PeterO
Discoverer of the PI2 XENON DEATH FLASH!
Interests: C,Python,PIC,Electronics,Ham Radio (G0DZB),1960s British Computers.
"The primary requirement (as we've always seen in your examples) is that the code is readable. " Dougie Lawson

pizzaboy150
Posts: 18
Joined: Sat Sep 05, 2015 11:03 am

Re: Where's my Triangle??

Sat Sep 28, 2019 11:46 am

Thanks for the info I will loose bcm and add xlib then see where that takes me.

pizzaboy150
Posts: 18
Joined: Sat Sep 05, 2015 11:03 am

Re: Where's my Triangle??

Sun Sep 29, 2019 11:08 am

Okay so dumped the BCM stuff and added Xlib, code now compiles but crashes when creating the EGL window surface but compiles okay.

I have dummed down my Xlib code so am worried that EGL may need a window attribute set in Xlib to function correctly.

Though while playing I get a different response depending on which libs i use.

/opt/vc/lib/libEGL.so gives no feedback and just crashes while using;
eglSurface = eglCreateWindowSurface(eglDisplay, config, &win, NULL);

when using the /usr/lib/arm-linux-gnueabihf/libEGL.so with the code
eglSurface = eglCreateWindowSurface(eglDisplay, config, win, NULL);
gives me the error;

Code: Select all

[email protected]:~/OpenGL-ES2 $ ./program 
libEGL warning: DRI2: failed to authenticate
Segmentation fault
So I am using a Raspberry Pi3 not the 4, which lib should i be using and what am i missing in my code below?

Code: Select all

#include <iostream>
#include <X11/Xlib.h>
#include <EGL/egl.h>
#include <GLES2/gl2.h>

int main(int argc, char *argv[]) {

	/////////////// 
	// X11 STUFF //
	///////////////
	
	///create the display
	Display *display = XOpenDisplay(NULL);
	if(display == NULL) {
		std::cout << "Error creating X display!" << std::endl;
		return 1;
	}
	
	//get information about out display
	int screen_num = DefaultScreen(display);
	
	//now lets create the window
	Window win;
	int win_width = 640;
	int win_height = 480;
	int win_x = 0;
	int win_y = 0;

	win = XCreateSimpleWindow(display,
                          RootWindow(display, screen_num),
                          win_x, win_y,
                          win_width, win_height,
                          0, BlackPixel(display, screen_num),
                          WhitePixel(display, screen_num));                                           
	
	//Window name
	XStoreName(display, win, "Testing OpenGL ES 2.0");
	                         
	//make the window visable
	XMapWindow(display, win);
		
	///////////////
	// EGL SETUP //
	///////////////
	
	EGLDisplay eglDisplay;
	EGLSurface eglSurface;
	EGLContext eglContext;

	eglDisplay = eglGetDisplay((EGLNativeDisplayType)display);
	if(eglDisplay == EGL_NO_DISPLAY) {
		return 2;
	}

	//initialise EGL
	if(eglInitialize(eglDisplay, NULL, NULL) != EGL_TRUE) {
		return 3;
	}

	//Create an EGL config for what we want
	EGLConfig config;
	EGLint numConfigs;
	EGLint configs[] = {
		EGL_BUFFER_SIZE, 24,
		EGL_RENDERABLE_TYPE, 
		EGL_OPENGL_ES2_BIT,
		EGL_NONE
	};

	//Choose the config to use
	if(eglChooseConfig(eglDisplay, configs, &config, 1, &numConfigs) == EGL_FALSE) {
		return 4;
	}

	//Create the window surface
	eglSurface = eglCreateWindowSurface(eglDisplay, config, win, NULL);
	if(eglSurface == EGL_NO_SURFACE) {
		return 5;
	}
	
	//more attributes
	EGLint contextAttribs[] = {
		EGL_CONTEXT_CLIENT_VERSION, 2,
		EGL_NONE
	};	

	//Now lets create the rendering context
	eglContext = eglCreateContext(eglDisplay, config, EGL_NO_CONTEXT, contextAttribs);
	if(eglContext == EGL_NO_CONTEXT) {
		return 6;
	}

	//make context current
	if(eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext) != EGL_TRUE) {
		return 7;
	}

	////////////////////
	// CREATE SHADERS //
	////////////////////
	
	//create the shader objects
	GLuint VShader = glCreateShader(GL_VERTEX_SHADER);
	GLuint FShader = glCreateShader(GL_FRAGMENT_SHADER);

	//our shader code later we will load this from a file
	const GLchar *VCode = {"attribute vec2 position;void main() {gl_Positon = position;}"};
	const GLchar *FCode = {"void main() {gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);}"};

	//load shader source code
	glShaderSource(VShader, 1, &VCode, NULL);
	glShaderSource(FShader, 1, &FCode, NULL);

	//debug print shader code to see it loaded?
	GLchar *VSource = NULL;
	GLchar *FSource = NULL;
	glGetShaderSource(VShader, 1024, NULL, VSource);
	glGetShaderSource(FShader, 1024, NULL, FSource);
	std::cout << VSource << std::endl;
	std::cout << FSource << std::endl;

	//compile the shaders
	glCompileShader(VShader);
	glCompileShader(FShader);

	//check for errors in both shaders
	GLint status;
	GLint logSize;
	GLchar *log = NULL;
	GLint size;
	glGetShaderiv(VShader, GL_COMPILE_STATUS, &status);
	if(status == GL_FALSE) {
		std::cout << "Vertex Shader ERROR!" << std::endl;
		glGetShaderiv(VShader, GL_INFO_LOG_LENGTH, &logSize);
		std::cout << logSize << std::endl;
		glGetShaderInfoLog(VShader, logSize, &size, log);
		if(logSize > 1) {
			std::cout << log << std::endl;
		}
	}
	glGetShaderiv(FShader, GL_COMPILE_STATUS, &status);
	if(status == GL_FALSE) {
		std::cout << "Fragment Shader ERROR!" << std::endl;
		glGetShaderiv(FShader, GL_INFO_LOG_LENGTH, &logSize);
		log = new char[logSize];
		glGetShaderInfoLog(FShader, logSize, &size, log);
		if(logSize > 1) {
			std::cout << log << std::endl;
		}
	}
	//create shader program
	GLuint program = glCreateProgram();

	//attach the shaders
	glAttachShader(program, VShader);
	glAttachShader(program, FShader);

	//now link the program
	glLinkProgram(program);

	//check program linked correctly
	GLint proLog = 0;
	glGetProgramiv(program, GL_LINK_STATUS, &proLog);
	if(proLog == GL_FALSE) {
		std::cout << "Error linking shader program!" << std::endl;
		GLint logLen = 0;
		GLchar *logInfo = NULL;
		glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLen);
		glGetProgramInfoLog(program, logLen, &logLen, logInfo);
		if(logLen > 0) {
			std::cout << logInfo << std::endl;
		}
	}

	//set the program to be used
	glUseProgram(program);

	//finished with shades so now we can delete
	glDeleteShader(VShader);
	glDeleteShader(FShader);

	///////////////////
	// TRIANGLE DATA //
	///////////////////
	
	//the vertex data
	GLfloat triangle[] = {-0.5, -0.5, 0.0, 0.5, 0.5, -0.5};

	//create a buffer to store the data in GPU memory
	GLuint triBuffer;
	glGenBuffers(1, &triBuffer);
	
	//tell GL which buffer we would like to work with?
	glBindBuffer(GL_ARRAY_BUFFER, triBuffer);

	//assign the data to the buffer
	glBufferData(GL_ARRAY_BUFFER, 6*sizeof(GLfloat), triangle, GL_STATIC_DRAW);

	//bind the attribute in vertex shader to index 0
	glBindAttribLocation(program, 0, "position");

	//tell GL how the data is stored?
	glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, triangle);
	
	//tell GL which attrib we would like to use
	glEnableVertexAttribArray(0);
	
	////////////////////
	// DRAWING & LOOP //
	////////////////////
	
	//set the clear color
	glClearColor(0.5, 0.5, 0.5, 1.0);
	
	//main program loop
	bool session = true;
	
	while(session) {
			
		//clear the screen
		glClear(GL_COLOR_BUFFER_BIT);
		
		//lets draw our triangle
		glDrawArrays(GL_TRIANGLES, 0, 3);

		//swap the buffers
		eglSwapBuffers(eglDisplay, eglSurface);
	}

	//////////////
	// CLEAN UP //
	//////////////
	
	eglDestroySurface(eglDisplay, eglSurface);
	eglDestroyContext(eglDisplay, eglContext);
	eglTerminate(eglDisplay);
	XDestroyWindow(display, win);
	XCloseDisplay(display);

	return 0;
}

User avatar
PeterO
Posts: 5456
Joined: Sun Jul 22, 2012 4:14 pm

Re: Where's my Triangle??

Sun Sep 29, 2019 12:13 pm

I've never seen a clear explanation of what to use on Pi3 vs Pi4.
PeterO
Discoverer of the PI2 XENON DEATH FLASH!
Interests: C,Python,PIC,Electronics,Ham Radio (G0DZB),1960s British Computers.
"The primary requirement (as we've always seen in your examples) is that the code is readable. " Dougie Lawson

pizzaboy150
Posts: 18
Joined: Sat Sep 05, 2015 11:03 am

Re: Where's my Triangle??

Sun Sep 29, 2019 2:31 pm

Well i just compiled the example in the post you linked and that runs using the /usr/lib/arm-linux-gnueabihf/libEGL.so but with the DRI2: failed to authenticate error still being present.

Running it with the /opt/vc/lib/libEGL.so ends in no compilation and an egl bad surface exception.

So thinking I am getting closer.

May try using the OpenGL driver and seeing what difference that makes but still not sure my source is correct.

pizzaboy150
Posts: 18
Joined: Sat Sep 05, 2015 11:03 am

Re: Where's my Triangle??

Sun Sep 29, 2019 3:20 pm

Okay all sorted!!

I finally have my triangle. I am putting it down to a naff driver or an error in the EGL library I am using for the DRI2 error.

Here is the code thats working...

UPDATED 29/09/19 19:38

Code: Select all

#include <iostream>
#include <fstream>
#include <X11/Xlib.h>
#include <X11/XKBlib.h>
#include <EGL/egl.h>
#include <GLES2/gl2.h>

int main(int argc, char *argv[]) {

	/////////////// 
	// X11 STUFF //
	///////////////
	
	///create the display
	Display *display = XOpenDisplay(NULL);
	if(display == NULL) {
		std::cout << "Error creating X display!" << std::endl;
		return 1;
	}
	
	//get information about out display
	int screen_num = DefaultScreen(display);
	
	//now lets create the window
	Window win;
	int win_width = 640;
	int win_height = 480;
	int win_x = 0;
	int win_y = 0;

	win = XCreateSimpleWindow(display,
                          RootWindow(display, screen_num),
                          win_x, win_y,
                          win_width, win_height,
                          0, BlackPixel(display, screen_num),
                          WhitePixel(display, screen_num));                                           
	
	//Window name
	XStoreName(display, win, "Testing OpenGL ES 2.0");
	
	//register for events
	XSelectInput(display, win, ExposureMask | KeyPressMask);
	                         
	//make the window visable
	XMapWindow(display, win);
		
	///////////////
	// EGL SETUP //
	///////////////
	
	EGLDisplay eglDisplay;
	EGLSurface eglSurface;
	EGLContext eglContext;

	eglDisplay = eglGetDisplay((EGLNativeDisplayType)display);
	if(eglDisplay == EGL_NO_DISPLAY) {
		return 2;
	}

	//initialise EGL
	if(eglInitialize(eglDisplay, NULL, NULL) != EGL_TRUE) {
		return 3;
	}

	//Create an EGL config for what we want
	EGLConfig config;
	EGLint numConfigs;
	EGLint configs[] = {
		EGL_BUFFER_SIZE, 32,
		EGL_RENDERABLE_TYPE, 
		EGL_OPENGL_ES2_BIT,
		EGL_NONE
	};
	//Choose the config to use
	if(eglChooseConfig(eglDisplay, configs, &config, 1, 
					   &numConfigs) == EGL_FALSE) {
		return 4;
	}

	//Create the window surface
	eglSurface = eglCreateWindowSurface(eglDisplay, config, win, NULL);
	if(eglSurface == EGL_NO_SURFACE) {
		return 5;
	}
	
	//more attributes
	EGLint contextAttribs[] = {
		EGL_CONTEXT_CLIENT_VERSION, 2,
		EGL_NONE
	};	

	//Now lets create the rendering context
	eglContext = eglCreateContext(eglDisplay, config, EGL_NO_CONTEXT,
								  contextAttribs);
	if(eglContext == EGL_NO_CONTEXT) {
		return 6;
	}

	//make context current
	if(eglMakeCurrent(eglDisplay, eglSurface, eglSurface, 
					  eglContext) != EGL_TRUE) {
		return 7;
	}
	
	////////////////////
	// CREATE SHADERS //
	////////////////////
	
	//open shader files and read contents
	
	std::fstream vFile;
	std::fstream fFile;
	
	vFile.open("vShader", std::fstream::in);
	fFile.open("fShader", std::fstream::in);
	
	//check files opened
	if(!vFile.is_open()) {
		std::cout << "Unable to open vertex shader file!" << std::endl;
	}
	if(!fFile.is_open()) {
		std::cout << "Unable to open fragment shader file!" << std::endl;
	}
	
	//find file lengths
	long unsigned int vLen, fLen;
	
	vFile.seekg(0, vFile.end);
	vLen = vFile.tellg();
	vFile.seekg(0, vFile.beg);
	
	fFile.seekg(0, fFile.end);
	fLen = fFile.tellg();
	fFile.seekg(0, fFile.beg);
	
	//now read the data into a buffer
	GLchar *vSrc = new GLchar[vLen];
	GLchar *fSrc = new GLchar[fLen];
	vFile.read(vSrc, vLen);
	fFile.read(fSrc, fLen);
	
	//close files when finished
	vFile.close();
	fFile.close();
	
	//create the shader objects
	GLuint VShader = glCreateShader(GL_VERTEX_SHADER);
	GLuint FShader = glCreateShader(GL_FRAGMENT_SHADER);

	//load shader source code
	glShaderSource(VShader, 1, &vSrc, NULL);
	glShaderSource(FShader, 1, &fSrc, NULL);

	//compile the shaders
	glCompileShader(VShader);
	glCompileShader(FShader);

	//check for errors in both shaders
	GLint status;
	GLint logSize;
	GLchar *log = NULL;
	GLint size;
	glGetShaderiv(VShader, GL_COMPILE_STATUS, &status);
	if(status == GL_FALSE) {
		std::cout << "Vertex Shader ERROR!" << std::endl;
		glGetShaderiv(VShader, GL_INFO_LOG_LENGTH, &logSize);
		log = new char[logSize];
		glGetShaderInfoLog(VShader, logSize, &size, log);
		if(logSize > 1) {
			std::cout << log << std::endl;
		}
	}
	glGetShaderiv(FShader, GL_COMPILE_STATUS, &status);
	if(status == GL_FALSE) {
		std::cout << "Fragment Shader ERROR!" << std::endl;
		glGetShaderiv(FShader, GL_INFO_LOG_LENGTH, &logSize);
		log = new char[logSize];
		glGetShaderInfoLog(FShader, logSize, &size, log);
		if(logSize > 1) {
			std::cout << log << std::endl;
		}
	}
	//create shader program
	GLuint program = glCreateProgram();

	//attach the shaders
	glAttachShader(program, VShader);
	glAttachShader(program, FShader);

	//now link the program
	glLinkProgram(program);

	//check program linked correctly
	GLint proLog = 0;
	glGetProgramiv(program, GL_LINK_STATUS, &proLog);
	if(proLog == GL_FALSE) {
		std::cout << "Error linking shader program!" << std::endl;
		GLint logLen = 0;
		GLchar *logInfo = NULL;
		glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLen);
		logInfo = new char[logLen];
		glGetProgramInfoLog(program, logLen, &logLen, logInfo);
		if(logLen > 0) {
			std::cout << logInfo << std::endl;
		}
	}

	//set the program to be used
	glUseProgram(program);

	//finished with shades so now we can delete
	glDeleteShader(VShader);
	glDeleteShader(FShader);

	///////////////////
	// TRIANGLE DATA //
	///////////////////
	
	//the vertex coordinates
	GLfloat verticies[] = {-0.5, -0.5, 0.0, 
						   0.0,  0.5, 0.0, 
						   0.5, -0.5, 0.0};
						   
	//the index for the vertices to make triangles						
	GLushort index[] = {0, 1, 2};

	//create a buffer to store the data in GPU memory
	GLuint vertBuffer;
	GLuint indxBuffer;
	glGenBuffers(1, &vertBuffer);
	glGenBuffers(1, &indxBuffer);
	
	//now bind to the elements buffer for the index's
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indxBuffer);
	
	//assign index data to the buffer
	glBufferData(GL_ELEMENT_ARRAY_BUFFER, 3*sizeof(GLushort),
				 index, GL_STATIC_DRAW);
	
	//tell GL which buffer we would like to work with?
	glBindBuffer(GL_ARRAY_BUFFER, vertBuffer);

	//assign the data to the buffer
	glBufferData(GL_ARRAY_BUFFER, 9*sizeof(GLfloat),
				 verticies, GL_STATIC_DRAW);

	//bind the attribute in vertex shader to index 0
	glBindAttribLocation(program, 0, "position");

	//tell GL how the data is stored?
	//carefull NULL to use bound buffer or pointer to data not on gpu!!!
	glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
	
	//tell GL which attrib we would like to use
	glEnableVertexAttribArray(0);
	
	////////////////////
	// DRAWING & LOOP //
	////////////////////
	
	//set the clear color
	glClearColor(0.1, 0.1, 0.1, 1.0);
	
	//events
	XEvent event;
	
	//main program loop
	bool session = true;
	
	while(session) {
		
		XNextEvent(display, &event);
		
		//draws to the screen when we need too
		if(event.type == Expose) {
			//clear the screen
			glClear(GL_COLOR_BUFFER_BIT);
			
			//lets draw our triangle
			glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_SHORT, (void*)0);

			//swap the buffers
			eglSwapBuffers(eglDisplay, eglSurface);
		}
		
		//check for Button presses
		if(event.type == KeyPress) {
			//okay which button?
			KeySym ks = XkbKeycodeToKeysym(display, event.xkey.keycode,
										   0, 0);
			
			//go through keys
			switch(ks) {
				//escape key exits loop
				case XK_Escape:
					session = false;
					break;
			}
		}	
	}

	//////////////
	// CLEAN UP //
	//////////////
	
	eglDestroySurface(eglDisplay, eglSurface);
	eglDestroyContext(eglDisplay, eglContext);
	eglTerminate(eglDisplay);
	XDestroyWindow(display, win);
	XCloseDisplay(display);

	return 0;
}

shaymanjohn
Posts: 7
Joined: Tue Nov 12, 2019 8:32 pm

Re: Where's my Triangle??

Tue Nov 12, 2019 8:40 pm

When I compile and run the SimpleGL_example linked earlier in the thread on my Pi4, I'm getting between 20 and 25fps.

That seems very low to me.

It wouldn't build to begin with (GLES2/gl2.h not found), so I did a `sudo apt-get install libgles2-mesa-dev`, after which it built fine.

Does that sound correct? (the install and the frame rate)...

I've got some OpenGL experience, but the linux / X11 stuff is new to me - is this the way to go on the Pi4?

Thanks for any advice you can share.

User avatar
Gavinmc42
Posts: 4297
Joined: Wed Aug 28, 2013 3:31 am

Re: Where's my Triangle??

Fri Nov 15, 2019 4:00 am

That frame rate could be about right, the Pi4/VC6 is not a Nvidia card.

Found this last night
https://aras-p.info/blog/2010/09/29/glsl-optimizer/
https://github.com/aras-p/glsl-optimizer

I have noticed a tiny speed improvement by using lowp instead of medump or highp.
Still learning other speed tricks.

The Pi4 is very mainstream now, lots of Linux OpenGL methods work.
The limitations are OpenGL2.1 and OpenGLES 3.0.
OpenGLES 3.1 is in the works and maybe one day 3.2 as the hardware is capable of ES 3.2.

So far I have used C, C++, Rust, Go, Pascal , BBC Basic and Python to test OpenGL.
Most of the mesa3d demos examples work.

Spent the morning compiling and playing with the Khronos GLSLvalidator.
It works :D

My library preferences so far are OpenGLUT and Pyglet/PyOpenGL.
But I am having too much fun with GLSL shaders to do much more.
Wishing the VC6 was OpenGL 3.3, so many more examples.

Making obj files Blender 2.79/Blender for Artists 1.0 works.
Newer versions won't as they need OpenGL 3.3.
I'm dancing on Rainbows.
Raspberries are not Apples or Oranges

shaymanjohn
Posts: 7
Joined: Tue Nov 12, 2019 8:32 pm

Re: Where's my Triangle??

Fri Nov 15, 2019 8:39 am

Thanks for the reply!

Any thoughts on how the emulators manage much higher frame rates with much more complex scenes?

Will check out the links...

User avatar
Gavinmc42
Posts: 4297
Joined: Wed Aug 28, 2013 3:31 am

Re: Where's my Triangle??

Fri Nov 15, 2019 10:49 am

Any thoughts on how the emulators manage much higher frame rates with much more complex scenes?
There is a whole bunch of tricks game makers use, I'm still learning them.
VBO - vertex buffer objects, display lists, texture mapping, texture baking....
Those scenes are not as complex as you think.

Think about Android games, how complex do they render these days?
Games are flat colours mostly textureless and not really 3D but 2.5D, a bit like the trend from 3D UI's to flat UI's.
Then Unity/Unreal fps games, they have more GPU cores than Pi's do.

I found a good idea of what the Pi4 is capable of is the WebGL samples.
The same WebGL examples will run faster on an Android phones that have similar SoC's.
But the Pi browser software is improving on Pi's and is now much closer to Android than last year or even when the Pi4 first came out.

My cheap smartphone does 12fps for the Aquarium sample, now Aurora in Gentoo64 is the same 12fps and Chromium is 22 fps.
My Pi3B+ is slower than my phone and the Pi4 is faster.
Pi Hardware and software is now better?
Desktop PC comparable, if you have an old PC ;)

RetroPie is still not Pi4 ready, but guys have got Dolphin working.
Other emulators are getting closer to going on Pi4.
I'm dancing on Rainbows.
Raspberries are not Apples or Oranges

shaymanjohn
Posts: 7
Joined: Tue Nov 12, 2019 8:32 pm

Re: Where's my Triangle??

Fri Nov 15, 2019 5:14 pm

Thanks - but pretty sure there must be a problem or severe bottleneck somewhere here, when a single shader, simple quad is running at <25fps, but elsewhere (for example) Half Life is running on the Pi4 at 60 to 70fps...

https://www.youtube.com/watch?v=nRBf1TmRKek&t=200

User avatar
Gavinmc42
Posts: 4297
Joined: Wed Aug 28, 2013 3:31 am

Re: Where's my Triangle??

Sat Nov 16, 2019 1:26 am

Yep those game makers know some tricks.

This is now my favourite way to play with shaders, but only the minimal example does 60fps.
Raymarch - are 31 and 23fps
https://github.com/Blakkis/GLSL_Python

But you have convinced me to have a look at the code of games like Half-Life to learn how they do it.
I'm dancing on Rainbows.
Raspberries are not Apples or Oranges

shaymanjohn
Posts: 7
Joined: Tue Nov 12, 2019 8:32 pm

Re: Where's my Triangle??

Sun Nov 17, 2019 4:56 pm

Today I installed libsdl2 (sudo apt-get install libsdl2-dev), and found a basic OpenGL triangle example (using shaders, and SDL2)...which runs at 60fps. Happy days!

Been an interesting journey of understanding to get here - I think some of the problem is that pre-Pi4 things worked a bit differently, so googling brings up a lot of info that maybe doesn't quite fit the Pi4...

Thanks for your help @Gavinmc42. libsdl2 seems to fit my needs for creating retro 2d games (using OpenGL), and the apparent bottleneck I had on the X11 sample isn't there, so will see where it goes from here...

User avatar
Gavinmc42
Posts: 4297
Joined: Wed Aug 28, 2013 3:31 am

Re: Where's my Triangle??

Mon Nov 18, 2019 4:35 am

The drivers are evolving, check out this.
https://github.com/mesa3d/mesa/blob/19. ... 3d_debug.c

Are we getting compute shaders next and then maybe tessellation and geometry shaders?
I'm using 19.2, are compute shaders there already?
Hard to keep up.
I have started seeing Vulkan/Pi4 GitHub files, not working but in progress?

The Pi4 is easier to use now as lots of the normal Linux stuff works.
Found a few games engines, most are too complex for me to understand yet.
Going for a 3D game mysef as I will learn more about OpenGL 3D especially the shaders.
I'm dancing on Rainbows.
Raspberries are not Apples or Oranges

User avatar
Gavinmc42
Posts: 4297
Joined: Wed Aug 28, 2013 3:31 am

Re: Where's my Triangle??

Mon Nov 18, 2019 7:43 am

Check your SDL2 version, new changes there too.
https://www.phoronix.com/scan.php?page= ... imizations
My Buster is showing 2.0.0 - time to update?
And my Gentoo64 is 2.0.10-rc1

Too early yet for 2.0.11 to show up?
Wonder what fps Pi4 will do?
I'm dancing on Rainbows.
Raspberries are not Apples or Oranges

shaymanjohn
Posts: 7
Joined: Tue Nov 12, 2019 8:32 pm

Re: Where's my Triangle??

Mon Nov 18, 2019 5:22 pm

My SDL2 is showing 2.0.9

bullen
Posts: 315
Joined: Sun Apr 28, 2013 2:52 pm

Re: Where's my Triangle??

Sat Dec 21, 2019 3:26 am

Thanks for this example!

I also have major performance problems. I can only render 3 characters at 60 fps.

Wait now after reboot I get BadAtom...

Maybe that is because I use TWM, uncommenting the _NET_WM_STATE XChangeProperty "fixes" it though...

Now I just need to understand how I can make my characters display (the draw calls are made but nothing renders on screen)...

How do I make sure the binary is calling the right GLES methods and not some emulated layer?

I compile with g++ -lEGL -lGL -lGLU -lGLESv2 -lX11 src/linux.c -o bin/park
https://github.com/tinspin/rupy - A tiny Java async HTTP application server.

Brian Beuken
Posts: 210
Joined: Fri Jan 29, 2016 12:51 pm

Re: Where's my Triangle??

Wed Jan 22, 2020 10:30 pm

I'm having some issues like this also. I just realised that I was able to use X11 to render my OpenGLES2.0 project on Rpi3, (there's a long thread in the C++ programming sub forum) and have been converting my systems to run X11 rather than using DispmanX to make transporting between different SBC's smoother.

But for the life of me I can't get this code to work on a Raspbery 3 or 3+.

Here's the entire initOGL code, currently the FBResX and FBResY don't do anything, as they were part of my DispmanX system to change framebuffer sizes. But other than that the code seems to be a pretty standard set up for X11, it works on all other X11 systems...including the Rpi4. And when combined with a resolution change system I can get a Pi4 to shift quite some number of polys and keep a gui active at 60fps. (about twice as fast as a Pi3b)

But on a Rp3B+ (and 3B) it fails at eglCreateWindowSurface with a segmentation error and no insight into why. I've got more than enough GPU memory allocated, my config and attriubutes are standard.

Code: Select all


void Graphics::init_ogl(Target_State *state, int width, int height, int FBResX, int FBResY)

{

#define ES_WINDOW_RGB           0
		state->width = width;
		state->height = height;

		
	
	EGLint numConfigs;
	EGLint majorVersion;
	EGLint minorVersion;
	
	EGLDisplay egldisplay;
	EGLContext context;
	EGLSurface surface;
	EGLConfig config;
	Window root;
	XSetWindowAttributes swa;
	XSetWindowAttributes  xattr;
	Atom wm_state;
	XWMHints hints;
	XEvent xev;
	EGLint num_config;

	
	//eglBindAPI(EGL_OPENGL_ES_API);
	    /*
	     * X11 native display initialization
	     */
	
	x_display = XOpenDisplay(NULL);
	if (x_display == NULL)
	{
		printf("Sorry to say we can't open an XDisplay and this will fail\n");
		exit(0);   // we need to trap this;
	}
	else
		printf("we got an XDisplay\n");
	
	
	root = DefaultRootWindow(x_display);
	
	swa.event_mask  =  ExposureMask | PointerMotionMask | KeyPressMask | KeyReleaseMask;
	swa.background_pixmap = None;
	swa.background_pixel  = 0;
	swa.border_pixel      = 0;
	swa.override_redirect = true;
	
	win = XCreateWindow(
	    x_display,
		root,
		0, // puts it at the top left of the screen
		0,
		state->width, //set size  
		state->height,
		0,
		CopyFromParent,
		InputOutput,
		CopyFromParent,
		CWEventMask,
		&swa);
	
	if (win == 0)
	{
		printf("Sorry to say we can't create a window and this will fail\n");
		exit(0);    // we need to trap this;
	}
	else
		printf("we got an (Native) XWindow\n");

	state->nativewindow = (EGLNativeWindowType) win;

	
	XSelectInput(x_display, win, KeyPressMask | KeyReleaseMask);
	xattr.override_redirect = TRUE;
	XChangeWindowAttributes(x_display, win, CWOverrideRedirect, &xattr);
	
	hints.input = TRUE;
	hints.flags = InputHint;
	XSetWMHints(x_display, win, &hints);
	
	
	char* title = (char*)"x11 Demo";
	    // make the window visible on the screen
	XMapWindow(x_display, win);
	XStoreName(x_display, win, title);

	    // get identifiers for the provided atom name strings
	wm_state = XInternAtom(x_display, "_NET_WM_STATE", FALSE);
	
	memset(&xev, 0, sizeof(xev));
	xev.type                 = ClientMessage;
	xev.xclient.window       = win;
	xev.xclient.message_type = wm_state;
	xev.xclient.format       = 32;
	xev.xclient.data.l[0]    = 1;
	xev.xclient.data.l[1]    = FALSE;
	XSendEvent(
	  x_display,
		DefaultRootWindow(x_display),
		FALSE,
		SubstructureNotifyMask,
		&xev);
	

	// Get Display	
	egldisplay =  eglGetDisplay(EGL_DEFAULT_DISPLAY);    //eglGetDisplay(EGL_DEFAULT_DISPLAY);
	if (egldisplay == EGL_NO_DISPLAY)
	{
		printf("Sorry to say we have an GetDisplay error and this will fail");
		exit(0);       // we need to trap this;
	}else  	printf("we got an EGLDisplay\n");
		
	// Initialize EGL
	if(!eglInitialize(egldisplay, &majorVersion, &minorVersion))
	{
		printf("Sorry to say we have an EGLinit error and this will fail");
		EGLint err = eglGetError(); // should be getting error values that make sense now
		exit(err);         // we need to trap this;
	}else 	printf("we initialised EGL\n");
	

	// Get configs
	if(!eglGetConfigs(egldisplay, NULL, 0, &numConfigs))
	{
		printf("Sorry to say we have EGL config errors and this will fail");
		EGLint err = eglGetError();
		exit(err);         // we need to trap this;
	}else 	printf("we got %i Configs\n",numConfigs);
	
	
	// Choose config
	if(!eglChooseConfig(egldisplay, attribute_list, &config, 1, &numConfigs))
	{
		printf("Sorry to say we have config choice issues, and this will fail");
		EGLint err = eglGetError();
		exit(err);         // we need to trap this;
	} else	printf("we chose our config\n");
	


	
	// Create a GL context
	context = eglCreateContext(egldisplay, config, EGL_NO_CONTEXT, context_attributes);
	if (context == EGL_NO_CONTEXT)
	{
		EGLint err = eglGetError();
		exit(err);          // we need to trap this;
	}
	else	printf("Created a context ok\n");
	
	// Create a surface
	surface = eglCreateWindowSurface(egldisplay, config, state->nativewindow, NULL);  // this fails with a segmentation error?
// don't get this far, forced error on eglCreateWindowSurface	
	if (surface == EGL_NO_SURFACE)
	{
		EGLint err = eglGetError();
		exit(err);            // we need to trap this;
	} else 	printf("we got a Surface\n");
	
	
	// Make the context current
	if(!eglMakeCurrent(egldisplay, surface, surface, context))
	{
		EGLint err = eglGetError();
		exit(err);         // we need to trap this;
	}

	state->display = egldisplay;
	state->surface = surface;
	state->context = context;
	
// just for fun lets see what we can do with this GPU		
	printf("This SBC supports version %i.%i of EGL\n", majorVersion, minorVersion);
	printf("This GPU supplied by  :%s\n", glGetString(GL_VENDOR));
	printf("This GPU supports     :%s\n", glGetString(GL_VERSION));
	printf("This GPU Renders with :%s\n", glGetString(GL_RENDERER));
	printf("This GPU supports     :%s\n", glGetString(GL_SHADING_LANGUAGE_VERSION));
	printf("This GPU supports these extensions	:%s\n", glGetString(GL_EXTENSIONS));

	// Some OpenGLES2.0 states that we might need

	glEnable(GL_DEPTH_TEST);
	glDepthFunc(GL_LEQUAL);
	glDepthMask(TRUE);
	glDepthRangef(0.0f, 1.0f);
	glClearDepthf(1.0f);

	glViewport(0, 0, state->width, state->height);
	glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
	glCullFace(GL_BACK);
	if (glGetError() == GL_NO_ERROR)	return ;
	else
		printf("Oh bugger, Some part of the EGL/OGL graphic init failed\n");	
}
Now after tinkering about a bit, I was able to get it to work using non static libs from mesa, not the Buster EGL_static, GLESv2_static etc, but it reported that DRI2 error libEGL warning: DRI2: failed to authenticate when eglGetDisplay was called.
However... it did go on to create a surface and didn't throw the segmentation error and went on to run...very very slowly, as if emulated rather than direct to the GPU..
I can normally get my project to run comfortably at 60fps with many 10K's of polys flying around with a dispmanX render surfance.

Its a curious problem, for now I think I will continue with my use of DismpanX as my render surface, and make specific builds for the older Raspberry's

If anyone can see anything obviously wrong, please let me know.
Very old computer game programmer, now teaching very young computer game programmers, some very bad habits.
Wrote some book about coding Pi's and SBC's, it's out now...go get it!
http://www.scratchpadgames.net/

User avatar
Gavinmc42
Posts: 4297
Joined: Wed Aug 28, 2013 3:31 am

Re: Where's my Triangle??

Thu Jan 23, 2020 1:48 am

I just notice mesa 19.3.2 is released.
Was curious and checked 19.3.0, I don't remember seeing so many v3d changes before.
What version are you using?
I'm dancing on Rainbows.
Raspberries are not Apples or Oranges

User avatar
PeterO
Posts: 5456
Joined: Sun Jul 22, 2012 4:14 pm

Re: Where's my Triangle??

Thu Jan 23, 2020 7:05 am

I've not looked back to my 3B+ since I got a 4B, but it is my understanding that the EGL interface is only available on the 4B and that on earlier models you still have to use DispmanX to get a rendering surface. But I could be wrong :shock:

But I've often said that the documentation on the whole graphics stack is lacking and there is no single place that clearly explains the relationships between parts and what is/is not available on different models and with different bits of software and firmware enabled.

PeterO
Discoverer of the PI2 XENON DEATH FLASH!
Interests: C,Python,PIC,Electronics,Ham Radio (G0DZB),1960s British Computers.
"The primary requirement (as we've always seen in your examples) is that the code is readable. " Dougie Lawson

Brian Beuken
Posts: 210
Joined: Fri Jan 29, 2016 12:51 pm

Re: Where's my Triangle??

Thu Jan 23, 2020 9:43 am

Gavinmc42 wrote:
Thu Jan 23, 2020 1:48 am
I just notice mesa 19.3.2 is released.
Was curious and checked 19.3.0, I don't remember seeing so many v3d changes before.
What version are you using?
I installed it yesterday so assume it was the latest version of Mesa, didn't check te version number.
Very old computer game programmer, now teaching very young computer game programmers, some very bad habits.
Wrote some book about coding Pi's and SBC's, it's out now...go get it!
http://www.scratchpadgames.net/

Brian Beuken
Posts: 210
Joined: Fri Jan 29, 2016 12:51 pm

Re: Where's my Triangle??

Thu Jan 23, 2020 9:52 am

PeterO wrote:
Thu Jan 23, 2020 7:05 am
I've not looked back to my 3B+ since I got a 4B, but it is my understanding that the EGL interface is only available on the 4B and that on earlier models you still have to use DispmanX to get a rendering surface. But I could be wrong :shock:

But I've often said that the documentation on the whole graphics stack is lacking and there is no single place that clearly explains the relationships between parts and what is/is not available on different models and with different bits of software and firmware enabled.

PeterO
No, egl is available, that was never an issue, only the means of getting on screen was confusing on 3B, I used DispmanX (we discussed why before) and it works perfectly well.
here for example is my usual RPi (not 4) setup which works fine

Code: Select all


void Graphics::init_ogl(Target_State *state, int width, int height, int FBResX, int FBResY)
{
	
	
	
	int32_t success = 0;
	EGLBoolean result;
	EGLint num_config;

	
//RPI setup is a little different to normal EGL
	DISPMANX_ELEMENT_HANDLE_T DispmanElementH;
	DISPMANX_DISPLAY_HANDLE_T DispmanDisplayH;
	DISPMANX_UPDATE_HANDLE_T DispmanUpdateH;
	VC_RECT_T dest_rect;
	VC_RECT_T src_rect;
	EGLConfig config;
	EGLint majorVersion;
	EGLint minorVersion;
// get an EGL display connection
	state->display = eglGetDisplay(EGL_DEFAULT_DISPLAY);

// initialize the EGL display connection
	result = eglInitialize(state->display, &majorVersion, &minorVersion);

// get an appropriate EGL frame buffer configuration
	result = eglChooseConfig(state->display, attribute_list, &config, 1, &num_config);
	assert(EGL_FALSE != result);

// get an appropriate EGL frame buffer configuration
	result = eglBindAPI(EGL_OPENGL_ES_API);
	assert(EGL_FALSE != result);

// create an EGL rendering context
	state->context = eglCreateContext(state->display, config, EGL_NO_CONTEXT, context_attributes);
	assert(state->context != EGL_NO_CONTEXT);

// create an EGL window surface

	state->width = FBResX;
	state->height = FBResY;

	
	dest_rect.x = 0; // (1920/2)-FBResX/2; // if using a non display size you can center it here
	dest_rect.y = 0; // (1080/2)+(FBResY/2);
	dest_rect.width = width; // it needs to know our window size
	dest_rect.height = height; //state->height;

	src_rect.x = 0;
	src_rect.y = 0;
	src_rect.width = (FBResX) << 16;
	src_rect.height = (FBResY) << 16;
	
	DispmanDisplayH = vc_dispmanx_display_open(0);
	DispmanUpdateH = vc_dispmanx_update_start(0);

	DispmanElementH = 	vc_dispmanx_element_add(
		DispmanUpdateH,
		DispmanDisplayH,
		1/*layer*/,
		&dest_rect,
		0/*source*/,
		&src_rect,
		DISPMANX_PROTECTION_NONE, 
		0 /*alpha value*/,
		0/*clamp*/,
		(DISPMANX_TRANSFORM_T) 0/*transform*/);

	state->nativewindow.element = DispmanElementH;
	state->nativewindow.width = FBResX; //state->width;
	state->nativewindow.height = FBResY; //state->height;
	vc_dispmanx_update_submit_sync(DispmanUpdateH);

// normal EGL updates are different
	
	state->surface = eglCreateWindowSurface(state->display, config, &(state->nativewindow), NULL);
	assert(state->surface != EGL_NO_SURFACE);

	   // connect the context to the surface
	result = eglMakeCurrent(state->display, state->surface, state->surface, state->context);
	assert(EGL_FALSE != result);

// Some OpenGLES2.0 states that we might need
	glEnable(GL_DEPTH_TEST);
	glDepthFunc(GL_LEQUAL);

	glViewport(0, 0, state->width, state->height);
	glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
	
	glCullFace(GL_BACK);

	
// stops the targets desktop showing through if we have alpha (but at a frame cost, you can remove if you are sure there are no transparencies)
//	glEnable(GL_BLEND);
//	glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_ZERO, GL_ONE);
	
	
	glMapBuffer =  (PFNGLMAPBUFFEROESPROC)eglGetProcAddress("glMapBufferOES");
	


	eglSwapInterval(state->display,1); // 1 to lock speed to 60fps (assuming we are able to maintain it), 0 for immediate swap (may cause tearing) which will indicate actual frame rate
	printf("This SBC supports version %i.%i of EGL\n", majorVersion, minorVersion);
	printf("This GPU supplied by  :%s\n", glGetString(GL_VENDOR));
	printf("This GPU supports     :%s\n", glGetString(GL_VERSION));
	printf("This GPU Renders with :%s\n", glGetString(GL_RENDERER));
	printf("This GPU supports     :%s\n", glGetString(GL_SHADING_LANGUAGE_VERSION));
	printf("This GPU supports these extensions	:%s\n", glGetString(GL_EXTENSIONS));
	
	
	
}
I realise now effectivly X11 is pumping that dispmanX bitmap to the screen every frame, but thats fine.
Its fast and allows me to have full screen display, but with variable size frame buffers, so for less powerful Pi's I can reduce the GPU load to get frame rates up.
Very old computer game programmer, now teaching very young computer game programmers, some very bad habits.
Wrote some book about coding Pi's and SBC's, it's out now...go get it!
http://www.scratchpadgames.net/

Return to “OpenGLES”