OpenGL: Fix regular depal (shader depal still worked) that broke recently

This commit is contained in:
Henrik Rydgård 2022-08-24 11:01:57 +02:00
parent ff92d7d4b0
commit bd674c47b6
6 changed files with 21 additions and 14 deletions

View file

@ -1035,15 +1035,17 @@ void GLQueueRunner::PerformRenderPass(const GLRStep &step, bool first, bool last
activeSlot = slot;
}
if (c.bind_fb_texture.aspect == GL_COLOR_BUFFER_BIT) {
if (curTex[slot] != &c.bind_fb_texture.framebuffer->color_texture)
if (curTex[slot] != &c.bind_fb_texture.framebuffer->color_texture) {
glBindTexture(GL_TEXTURE_2D, c.bind_fb_texture.framebuffer->color_texture.texture);
curTex[slot] = &c.bind_fb_texture.framebuffer->color_texture;
curTex[slot] = &c.bind_fb_texture.framebuffer->color_texture;
}
} else if (c.bind_fb_texture.aspect == GL_DEPTH_BUFFER_BIT) {
if (curTex[slot] != &c.bind_fb_texture.framebuffer->z_stencil_texture)
if (curTex[slot] != &c.bind_fb_texture.framebuffer->z_stencil_texture) {
glBindTexture(GL_TEXTURE_2D, c.bind_fb_texture.framebuffer->z_stencil_texture.texture);
curTex[slot] = &c.bind_fb_texture.framebuffer->z_stencil_texture;
curTex[slot] = &c.bind_fb_texture.framebuffer->z_stencil_texture;
}
} else {
// TODO: Stencil texturing?
// Can't texture from stencil buffers.
curTex[slot] = nullptr;
}
CHECK_GL_ERROR_IF_DEBUG();

View file

@ -1214,7 +1214,7 @@ bool OpenGLPipeline::LinkShaders() {
}
std::vector<GLRProgram::Initializer> initialize;
for (int i = 0; i < MAX_TEXTURE_SLOTS; ++i) {
if (i < queries.size()) {
if (i < samplers_.size()) {
initialize.push_back({ &samplerLocs_[i], 0, i });
} else {
samplerLocs_[i] = -1;

View file

@ -230,7 +230,10 @@ Draw2DPipeline *Draw2D::Create2DPipeline(std::function<Draw2DPipelineInfo (Shade
{ draw2DVs_, fs },
inputLayout,
depthStencil,
blend, rasterNoCull, &draw2DUBDesc,
blend,
rasterNoCull,
&draw2DUBDesc,
info.samplers.is_empty() ? samplers : info.samplers,
};
Draw::Pipeline *pipeline = draw_->CreateGraphicsPipeline(pipelineDesc);

View file

@ -1,6 +1,7 @@
#pragma once
#include "GPU/GPU.h"
#include "Common/GPU/Shader.h"
// For framebuffer copies and similar things that just require passthrough.
struct Draw2DVertex {
@ -32,7 +33,7 @@ inline RasterChannel Draw2DSourceChannel(Draw2DShader shader) {
struct Draw2DPipelineInfo {
RasterChannel readChannel;
RasterChannel writeChannel;
bool secondTexture;
Slice<SamplerDef> samplers;
};
struct Draw2DPipeline {

View file

@ -1890,13 +1890,14 @@ void TextureCacheCommon::ApplyTextureFramebuffer(VirtualFramebuffer *framebuffer
bool depth = channel == RASTER_DEPTH;
bool need_depalettize = CanDepalettize(texFormat, depth ? GE_FORMAT_DEPTH16 : framebuffer->drawnFormat);
bool useShaderDepal = framebufferManager_->GetCurrentRenderVFB() != framebuffer && !depth && !gstate_c.curTextureIs3D;
// Shader depal is not supported during 3D texturing or depth texturing, and requires 32-bit integer instructions in the shader.
bool useShaderDepal = framebufferManager_->GetCurrentRenderVFB() != framebuffer &&
!depth &&
!gstate_c.curTextureIs3D &&
draw_->GetDeviceCaps().fragmentShaderInt32Supported;
// TODO: Implement shader depal in the fragment shader generator for D3D11 at least.
if (!draw_->GetDeviceCaps().fragmentShaderInt32Supported) {
useShaderDepal = false;
}
switch (draw_->GetShaderLanguageDesc().shaderLanguage) {
case ShaderLanguage::HLSL_D3D11:
case ShaderLanguage::HLSL_D3D9:
@ -1915,7 +1916,6 @@ void TextureCacheCommon::ApplyTextureFramebuffer(VirtualFramebuffer *framebuffer
smoothedDepal = CanUseSmoothDepal(gstate, framebuffer->drawnFormat, clutTexture.rampLength);
if (useShaderDepal) {
// Very icky conflation here of native and thin3d rendering. This will need careful work per backend in BindAsClutTexture.
BindAsClutTexture(clutTexture.texture);

View file

@ -213,6 +213,7 @@ Draw2DPipeline *TextureShaderCache::GetDepalettizeShader(uint32_t clutMode, GETe
return Draw2DPipelineInfo{
config.bufferFormat == GE_FORMAT_DEPTH16 ? RASTER_DEPTH : RASTER_COLOR,
RASTER_COLOR,
samplers
};
});
delete[] buffer;