sf::RenderWindow window(sf::VideoMode(800, 600), "Test");
sf::Vertex vertices[] =
{
sf::Vertex(sf::Vector2f( 0, 0), sf::Color::Blue),
sf::Vertex(sf::Vector2f(200, 0), sf::Color::Red),
sf::Vertex(sf::Vector2f(200, 200), sf::Color::Red),
sf::Vertex(sf::Vector2f( 0, 200), sf::Color::Yellow),
};
sf::Shader shader;
//the example shader from the SDK
shader.loadFromFile("pixelate.frag", sf::Shader::Fragment);
shader.setParameter("pixel_threshold", .01f);
window.draw(vertices, 4, sf::Quads, &shader);
I'm probably not understanding something which is why I'm asking this question.
Why doesn't this work?The only thing that happens to the Quad is that it gets turned all black. I'm thinking it has something to do with textures since:
sf::Texture texture;
texture.create(800,600);
window.draw(vertices, 4, sf::Quads);
texture.update(window);
sf::Sprite sprite(texture);
window.draw(sprite, &shader);
this workaround works, but is probably bad since it uses the whole window. texture.update is overloaded, but I have no idea how to change a vertex array to an array of pixels or sf::Image (If it's even the correct way to do this).
This is the shader code just in case:
uniform sampler2D texture;
uniform float pixel_threshold;
void main()
{
float factor = 1.0 / (pixel_threshold + 0.001);
vec2 pos = floor(gl_TexCoord[0].xy * factor + 0.5) / factor;
gl_FragColor = texture2D(texture, pos) * gl_Color;
}