diff --git a/GPU/GLES/FragmentShaderGenerator.cpp b/GPU/GLES/FragmentShaderGenerator.cpp index c105d90c20..3bab172e2f 100644 --- a/GPU/GLES/FragmentShaderGenerator.cpp +++ b/GPU/GLES/FragmentShaderGenerator.cpp @@ -253,7 +253,7 @@ ReplaceBlendType ReplaceBlendWithShader(bool allowShaderBlend) { case GE_BLENDMODE_MIN: case GE_BLENDMODE_MAX: - if (gl_extensions.EXT_blend_minmax || gl_extensions.GLES3) { + if (gstate_c.Supports(GPU_SUPPORTS_BLEND_MINMAX)) { return REPLACE_BLEND_STANDARD; } else { return !allowShaderBlend ? REPLACE_BLEND_STANDARD : REPLACE_BLEND_COPY_FBO; diff --git a/GPU/GLES/Framebuffer.cpp b/GPU/GLES/Framebuffer.cpp index d02e0a1145..533649965b 100644 --- a/GPU/GLES/Framebuffer.cpp +++ b/GPU/GLES/Framebuffer.cpp @@ -1296,7 +1296,7 @@ void FramebufferManager::BlitFramebuffer(VirtualFramebuffer *dst, int dstX, int useBlit = true; } #else - if (gl_extensions.GLES3 || (gstate_c..Supports(GPU_SUPPORTS_NV_FRAMEBUFFER_BLIT)) { + if (gl_extensions.GLES3 || gstate_c.Supports(GPU_SUPPORTS_NV_FRAMEBUFFER_BLIT)) { useNV = !gl_extensions.GLES3; useBlit = true; } diff --git a/GPU/GLES/Framebuffer.h b/GPU/GLES/Framebuffer.h index ee371cbcdc..27b784807f 100644 --- a/GPU/GLES/Framebuffer.h +++ b/GPU/GLES/Framebuffer.h @@ -145,6 +145,8 @@ private: void SetNumExtraFBOs(int num); + inline bool ShouldDownloadUsingCPU(const VirtualFramebuffer *vfb) const; + #ifndef USING_GLES2 void PackFramebufferAsync_(VirtualFramebuffer *vfb); #endif diff --git a/GPU/GLES/GLES_GPU.cpp b/GPU/GLES/GLES_GPU.cpp index cf33f6e4dd..c69fea9df6 100644 --- a/GPU/GLES/GLES_GPU.cpp +++ b/GPU/GLES/GLES_GPU.cpp @@ -21,6 +21,7 @@ #include "Common/ChunkFile.h" +#include "Core/Config.h" #include "Core/Debugger/Breakpoints.h" #include "Core/MemMapHelpers.h" #include "Core/Host.h" @@ -31,6 +32,7 @@ #include "GPU/GPUState.h" #include "GPU/ge_constants.h" #include "GPU/GeDisasm.h" +#include "GPU/Common/FramebufferCommon.h" #include "GPU/GLES/ShaderManager.h" #include "GPU/GLES/GLES_GPU.h" @@ -470,13 +472,14 @@ void GLES_GPU::CheckGPUFeatures() { if (gl_extensions.ARB_blend_func_extended /*|| gl_extensions.EXT_blend_func_extended*/) features |= GPU_SUPPORTS_DUALSOURCE_BLEND; -#ifdef USING_GLES2 - if (gl_extensions.GLES3) - features |= GPU_SUPPORTS_GLSL_ES_300; -#else - if (gl_extensions.VersionGEThan(3, 3, 0)) - features |= GPU_SUPPORTS_GLSL_330; -#endif + if (gl_extensions.IsGLES) { + if (gl_extensions.GLES3) + features |= GPU_SUPPORTS_GLSL_ES_300; + } else { + if (gl_extensions.VersionGEThan(3, 3, 0)) + features |= GPU_SUPPORTS_GLSL_330; + } + // Framebuffer fetch appears to be buggy at least on Tegra 3 devices. So we blacklist it. // Tales of Destiny 2 has been reported to display green. if (gl_extensions.EXT_shader_framebuffer_fetch || gl_extensions.NV_shader_framebuffer_fetch || gl_extensions.ARM_shader_framebuffer_fetch) { @@ -491,27 +494,45 @@ void GLES_GPU::CheckGPUFeatures() { features |= GPU_SUPPORTS_FBO_ARB; } + bool useCPU = false; + if (!gl_extensions.IsGLES) { + // Urrgh, we don't even define FB_READFBOMEMORY_CPU on mobile #ifndef USING_GLES2 - bool useCPU = g_Config.iRenderingMode == FB_READFBOMEMORY_CPU; - // We might get here if hackForce04154000Download_ is hit. - // Some cards or drivers seem to always dither when downloading a framebuffer to 16-bit. - // This causes glitches in games that expect the exact values. - // It has not been experienced on NVIDIA cards, so those are left using the GPU (which is faster.) - if (g_Config.iRenderingMode == FB_BUFFERED_MODE) { - if (gl_extensions.gpuVendor != GPU_VENDOR_NVIDIA || gl_extensions.ver[0] < 3) { - useCPU = true; - } - } -#else - useCPU = true; + useCPU = g_Config.iRenderingMode == FB_READFBOMEMORY_CPU; #endif - if (useCPU) { - features |= GPU_PREFER_CPU_DOWNLOAD; + // We might get here if hackForce04154000Download_ is hit. + // Some cards or drivers seem to always dither when downloading a framebuffer to 16-bit. + // This causes glitches in games that expect the exact values. + // It has not been experienced on NVIDIA cards, so those are left using the GPU (which is faster.) + if (g_Config.iRenderingMode == FB_BUFFERED_MODE) { + if (gl_extensions.gpuVendor != GPU_VENDOR_NVIDIA || gl_extensions.ver[0] < 3) { + useCPU = true; + } + } + } else { + useCPU = true; } + if (useCPU) + features |= GPU_PREFER_CPU_DOWNLOAD; + if ((gl_extensions.gpuVendor == GPU_VENDOR_NVIDIA) || (gl_extensions.gpuVendor == GPU_VENDOR_AMD)) features |= GPU_PREFER_REVERSE_COLOR_ORDER; + if (gl_extensions.OES_texture_npot) + features |= GPU_SUPPORTS_OES_TEXTURE_NPOT; + + if (gl_extensions.EXT_unpack_subimage || !gl_extensions.IsGLES) + features |= GPU_SUPPORTS_UNPACK_SUBIMAGE; + + if (gl_extensions.EXT_blend_minmax || gl_extensions.GLES3) + features |= GPU_SUPPORTS_BLEND_MINMAX; + +#ifdef MOBILE_DEVICE + // Arguably, we should turn off GPU_IS_MOBILE on like modern Tegras, etc. + features |= GPU_IS_MOBILE; +#endif + gstate_c.featureFlags = features; } diff --git a/GPU/GLES/GLES_GPU.h b/GPU/GLES/GLES_GPU.h index 178770837d..b2a97684e1 100644 --- a/GPU/GLES/GLES_GPU.h +++ b/GPU/GLES/GLES_GPU.h @@ -37,6 +37,7 @@ public: GLES_GPU(); ~GLES_GPU(); + // This gets called on startup and when we get back from settings. void CheckGPUFeatures(); void InitClear() override; diff --git a/GPU/GLES/StateMapping.cpp b/GPU/GLES/StateMapping.cpp index bf46f1a4bc..7c08adc418 100644 --- a/GPU/GLES/StateMapping.cpp +++ b/GPU/GLES/StateMapping.cpp @@ -567,7 +567,7 @@ void TransformDrawEngine::ApplyBlendState() { glstate.blendFuncSeparate.set(glBlendFuncA, glBlendFuncB, GL_ZERO, GL_ONE); } - if (gl_extensions.EXT_blend_minmax || gl_extensions.GLES3) { + if (gstate_c.Supports(GPU_SUPPORTS_BLEND_MINMAX)) { glstate.blendEquationSeparate.set(eqLookup[blendFuncEq], alphaEq); } else { glstate.blendEquationSeparate.set(eqLookupNoMinMax[blendFuncEq], alphaEq); diff --git a/GPU/GLES/TextureCache.cpp b/GPU/GLES/TextureCache.cpp index d953087a89..ff352ca552 100644 --- a/GPU/GLES/TextureCache.cpp +++ b/GPU/GLES/TextureCache.cpp @@ -66,6 +66,7 @@ #define GL_UNPACK_ROW_LENGTH 0x0CF2 #endif +// Hack! extern int g_iNumVideos; TextureCache::TextureCache() : cacheSizeEstimate_(0), secondCacheSizeEstimate_(0), clearCacheNextFrame_(false), lowMemoryMode_(false), clutBuf_(NULL), clutMaxBytes_(0), texelsScaledThisFrame_(0) { @@ -1446,14 +1447,15 @@ void TextureCache::SetTexture(bool force) { scaleFactor = (PSP_CoreParameter().renderWidth + 479) / 480; } -#ifndef MOBILE_DEVICE - scaleFactor = std::min(gl_extensions.OES_texture_npot ? 5 : 4, scaleFactor); - if (!gl_extensions.OES_texture_npot && scaleFactor == 3) { - scaleFactor = 2; + // Mobile devices don't get the higher scale factors, too expensive. Very rough way to decide though... + if (!gstate_c.Supports(GPU_IS_MOBILE)) { + scaleFactor = std::min(gstate_c.Supports(GPU_SUPPORTS_OES_TEXTURE_NPOT) ? 5 : 4, scaleFactor); + if (!gl_extensions.OES_texture_npot && scaleFactor == 3) { + scaleFactor = 2; + } + } else { + scaleFactor = std::min(gstate_c.Supports(GPU_SUPPORTS_OES_TEXTURE_NPOT) ? 3 : 2, scaleFactor); } -#else - scaleFactor = std::min(gl_extensions.OES_texture_npot ? 3 : 2, scaleFactor); -#endif } else { scaleFactor = g_Config.iTexScalingLevel; } @@ -1698,7 +1700,7 @@ void *TextureCache::DecodeTextureLevel(GETextureFormat format, GEPaletteFormat c case GE_TFMT_8888: if (!swizzled) { // Special case: if we don't need to deal with packing, we don't need to copy. - if ((g_Config.iTexScalingLevel == 1 && gl_extensions.EXT_unpack_subimage) || w == bufw) { + if ((g_Config.iTexScalingLevel == 1 && gstate_c.Supports(GPU_SUPPORTS_UNPACK_SUBIMAGE)) || w == bufw) { if (UseBGRA8888()) { tmpTexBuf32.resize(std::max(bufw, w) * h); finalBuf = tmpTexBuf32.data(); @@ -1791,7 +1793,7 @@ void *TextureCache::DecodeTextureLevel(GETextureFormat format, GEPaletteFormat c ERROR_LOG_REPORT(G3D, "NO finalbuf! Will crash!"); } - if (!(g_Config.iTexScalingLevel == 1 && gl_extensions.EXT_unpack_subimage) && w != bufw) { + if (!(g_Config.iTexScalingLevel == 1 && gstate_c.Supports(GPU_SUPPORTS_UNPACK_SUBIMAGE)) && w != bufw) { int pixelSize; switch (dstFmt) { case GL_UNSIGNED_SHORT_4_4_4_4: @@ -1803,6 +1805,7 @@ void *TextureCache::DecodeTextureLevel(GETextureFormat format, GEPaletteFormat c pixelSize = 4; break; } + // Need to rearrange the buffer to simulate GL_UNPACK_ROW_LENGTH etc. int inRowBytes = bufw * pixelSize; int outRowBytes = w * pixelSize; @@ -1868,7 +1871,7 @@ void TextureCache::LoadTextureLevel(TexCacheEntry &entry, int level, bool replac gpuStats.numTexturesDecoded++; // Can restore these and remove the fixup at the end of DecodeTextureLevel on desktop GL and GLES 3. - if ((g_Config.iTexScalingLevel == 1 && gl_extensions.EXT_unpack_subimage) && w != bufw) { + if ((g_Config.iTexScalingLevel == 1 && gstate_c.Supports(GPU_SUPPORTS_UNPACK_SUBIMAGE)) && w != bufw) { glPixelStorei(GL_UNPACK_ROW_LENGTH, bufw); useUnpack = true; } diff --git a/GPU/GPUCommon.h b/GPU/GPUCommon.h index cc60d15080..6ead754b81 100644 --- a/GPU/GPUCommon.h +++ b/GPU/GPUCommon.h @@ -18,6 +18,8 @@ class GPUCommon : public GPUThreadEventQueue, public GPUDebugInterface { public: GPUCommon(); virtual ~GPUCommon(); + + virtual void CheckGPUFeatures() override {} virtual void Reinitialize(); virtual void InterruptStart(int listid); diff --git a/GPU/GPUInterface.h b/GPU/GPUInterface.h index 15a57fed3c..9af5d02ba0 100644 --- a/GPU/GPUInterface.h +++ b/GPU/GPUInterface.h @@ -207,6 +207,8 @@ public: static const int DisplayListMaxCount = 64; + virtual void CheckGPUFeatures() = 0; + // Initialization virtual void InitClear() = 0; virtual void Reinitialize() = 0; diff --git a/GPU/GPUState.h b/GPU/GPUState.h index 1e639e4f26..e70fbb48c1 100644 --- a/GPU/GPUState.h +++ b/GPU/GPUState.h @@ -448,9 +448,13 @@ enum { GPU_SUPPORTS_DUALSOURCE_BLEND = FLAG_BIT(0), GPU_SUPPORTS_GLSL_ES_300 = FLAG_BIT(1), GPU_SUPPORTS_GLSL_330 = FLAG_BIT(2), + GPU_SUPPORTS_UNPACK_SUBIMAGE = FLAG_BIT(3), + GPU_SUPPORTS_BLEND_MINMAX = FLAG_BIT(4), GPU_SUPPORTS_NV_FRAMEBUFFER_BLIT = FLAG_BIT(10), GPU_SUPPORTS_ANY_FRAMEBUFFER_FETCH = FLAG_BIT(20), GPU_SUPPORTS_FBO_ARB = FLAG_BIT(25), + GPU_SUPPORTS_OES_TEXTURE_NPOT = FLAG_BIT(26), + GPU_IS_MOBILE = FLAG_BIT(29), GPU_PREFER_CPU_DOWNLOAD = FLAG_BIT(30), GPU_PREFER_REVERSE_COLOR_ORDER = FLAG_BIT(31), }; diff --git a/UI/EmuScreen.cpp b/UI/EmuScreen.cpp index 8b2db8bdc0..2bbc151f21 100644 --- a/UI/EmuScreen.cpp +++ b/UI/EmuScreen.cpp @@ -180,6 +180,7 @@ void EmuScreen::dialogFinished(const Screen *dialog, DialogResult result) { quit_ = false; } RecreateViews(); + gpu->CheckGPUFeatures(); } static void AfterStateLoad(bool success, void *ignored) {