How do I get BGL to work?

So I have a script that (should) make a HUD using BGL, but it doesn’t show up. I have the entire script right here:

from __future__ import division
import math
from math import ceil
import ZotA
from ZotA import Player
import BGL
from BGL import *
import bitstring
from bitstring import *
import GameLogic as Logic
import Rasterizer
import os
import sys

#Constants
BarLength = 64
ByteBarLength = int(ceil(BarLength/8))*8#A bit more complicated than it should be, but whatever
BarHeight = 4
SW = Rasterizer.getWindowWidth()
SH = Rasterizer.getWindowHeight()
FBack = Buffer(GL_BYTE, Bits(length=ByteBarLength*BarHeight).__invert__().hex)
TemplateBand = BitString(length=ByteBarLength)
Screens = 0

if Logic.globalDict["Game Settings"]["Hardware"]["Video"]["Antialiasing"] != 0:
	if Logic.globalDict["Game Settings"]["Hardware"]["Video"]["Antialiasing"] == -1:
		Hint = GL_FASTEST
	elif Logic.globalDict["Game Settings"]["Hardware"]["Video"]["Antialiasing"] == -2:
		Hint = GL_NICEST
	glEnable(GL_BLEND)
	glEnable(GL_POLYGON_SMOOTH)
	glHint(GL_POLYGON_SMOOTH_HINT, Hint)
	glDisable (GL_DEPTH_TEST)


def Mainloop():
	if Logic.globalDict["Game Settings"]["Hardware"]["Video"]["Antialiasing"] != 0:
		glBlendFunc(GL_SRC_ALPHA_SATURATE, GL_ONE)
	glFlush()

def DrawHUD():
	#Make raster for the Fatigue bar
	Distance = int(BarLength*.5)#int(BarLength*Player.Health["FA"].rDivide())
	FBand = TemplateBand
	FBand.set(xrange(Distance))
	FBar = FBand * BarHeight
	#Do OpenGL stuff
	#Draw Background
	glColor3i(0, 0, 0)
	glRasterPos2i(SW-64, SH-8)
	glBitmap(BarLength, BarHeight, 0, 0, 0, 0, FBack)
	#Draw Foreground
	glColor3f(0.0, 1.0, 0.0)
	glRasterPos2i(SW-64, SH-8)
	glBitmap(BarLength, BarHeight, 0, 0, 0, 0, Buffer(GL_BYTE, FBar.hex))
	glFlush()#Is it really needed, or does Blender already do it for me?
	
	#Debug
	#print(FBBand)
	#print(FatigueBar)
	#print(BGL.glGetError())
	
	#q = BGL.Buffer(BGL.GL_BYTE, "0x0")
	#BGL.glGetBooleanv(BGL.GL_CURRENT_RASTER_POSITION_VALID, q)
	#print(q)
	#####################################################################################
	#Always spits out [[[0]]], except for the first frame (when it gets imported, and the
	#raster position hasn't been set)
	#If I don't set the raster position it prints [[1]], but still won't draw the bitmap!
	#Why? Is it really out of range, or is it spitting out crap?

def Screencap():
	global Screens
	for s in os.listdir(os.path.normpath(sys.path[0]+"/Screens/")):
		if int(s[6:10])+1 > Screens:
			Screens = int(s[6:10])+1
	print(Screens)
	s = str(Screens)
	while len(s) < 4:
		s = str(0) + s
	s = os.path.normpath("//Screens/Screen"+s+".jpg")
	Rasterizer.makeScreenshot(s)#Possibly replace with something that doesn't interrupt gameplay
	print("Succesfully created "+s)

All the variables are set up correctly in globalDict (just assume they’re right) and bitstring imports correctly (if you’re not familiar with it, it really helps with controlling bits and bytes and whatnot, so assume that’s right too), but it doesn’t show up.

What version of Blender are you using? Up until Blender 2.5, there was no way to run your own OpenGL commands in the BGE. Well, you could call them and actually do some drawing, but then the rasterizer would clear the screen and draw it’s stuff.

In Blender 2.5 you can register your drawing functions via a callback in KX_Scene:


import bge

bge.logic.getCurrentScene().post_draw = [my_draw_func]

2.49 (and that’s what I though it was doing too :p).