The problem briefly.
I'm just starting out with Direct3D 11 and DXGI and I've encountered a problem which plagues me not only in my project, but also in other games as well. When I start my application in full screen mode for some reason it doesn't choose my native monitor's resolution (2560x1440) but some weird one (1280x1440). It happens in various games and in my project too. I wonder if there's any fix for that, or maybe I'm doing something wrong.
Minimal reproducible example:
// the only way not to use uuidof() intrinsic
#define INITGUID
#include <windows.h>
#include <dxgi1_6.h>
#include <d3d11_4.h>
#include <cstdint>
#include <iostream>
#include <vector>
#include <functional>
HINSTANCE g_instance = nullptr;
HWND g_window_handle = nullptr;
IDXGIFactory7* g_dxgi_factory = nullptr;
IDXGIAdapter4* g_dxgi_graphics_card = nullptr;
IDXGIOutput6* g_dxgi_monitor = nullptr;
IDXGIDevice4* g_dxgi_device = nullptr;
IDXGISwapChain1* g_swapchain = nullptr;
ID3D11Device* g_d3d11_device = nullptr;
ID3D11DeviceContext* g_d3d11_device_context = nullptr;
const D3D_FEATURE_LEVEL g_feature_level = D3D_FEATURE_LEVEL_11_1;
bool g_game_is_done = false; // never changes, for demonstration
void loop(std::function<bool()> engine_loop_callback);
bool queue_is_ok(MSG* in_out_message, bool& out_done);
LRESULT CALLBACK window_procedure(HWND window_handle, UINT message_id, WPARAM w_param, LPARAM l_param);
int APIENTRY WinMain(_In_ HINSTANCE instance, _In_opt_ HINSTANCE prev_instance, _In_ char* command_line, _In_ int32_t show_command)
{
// store the application instance
g_instance = instance;
// set up window class
WNDCLASSEXA window_class {};
window_class.cbSize = sizeof(WNDCLASSEXA); // size of the structure
window_class.style = 0; // no special styles required
window_class.lpfnWndProc = window_procedure; // use our static window procedure
window_class.cbClsExtra = 0; // no extra memory for now
window_class.cbWndExtra = 0; // no extra memory for now
window_class.hInstance = g_instance; // the hInstance is the .exe (because zenosys is static lib)
window_class.hIcon = nullptr; // no icon loaders for now
window_class.hCursor = nullptr; // no cursor loaders for now
window_class.hbrBackground = nullptr; // no background color as we don't use gdi for rendering
window_class.lpszMenuName = nullptr; // don't use menu (games don't have toolbars lmao)
window_class.lpszClassName = "MyApplication"; // class name to identify it in the system
window_class.hIconSm = nullptr; // no icon loaders for now
// use atom to store the class name
ATOM class_atom = RegisterClassExA(&window_class);
// create the window using mostly default params
g_window_handle = CreateWindowExA(
0,
MAKEINTATOM(class_atom), "Title",
0,
CW_USEDEFAULT, CW_USEDEFAULT,
CW_USEDEFAULT, CW_USEDEFAULT,
nullptr, nullptr, g_instance, nullptr
);
// check if window is successfully created
int32_t error;
if (not g_window_handle)
{
error = GetLastError();
return 1;
}
// show it to the user
ShowWindow(g_window_handle, SW_SHOWDEFAULT);
// create the dxgi factory and check for error.
HRESULT result = S_OK;
result = CreateDXGIFactory2(0, IID_IDXGIFactory7, reinterpret_cast<void**>(&g_dxgi_factory));
if (FAILED(result))
{
std::cerr << "Failed on CreateDXGIFactory2!\t" << std::hex << result << std::endl;
return 1;
}
// vector holding all found adapters
std::vector<IDXGIAdapter4*> adapters;
// taken directly and a little rewritten from https://learn.microsoft.com/en-us/windows/win32/api/DXGI/nf-dxgi-idxgifactory1-enumadapters1
{
IDXGIAdapter4* adapter; // used to temporary store found adapter
// getting all the adapters
bool could_find; // to check wether there are adapters left
for (uint8_t index = 0;; index )
{
// get the adapter in the order of "highest performance first"
could_find = g_dxgi_factory->EnumAdapterByGpuPreference(
index, DXGI_GPU_PREFERENCE_HIGH_PERFORMANCE,
IID_IDXGIAdapter4, reinterpret_cast<void**>(&adapter)
) != DXGI_ERROR_NOT_FOUND;
// finish if couldn't find the next adapter
if (not could_find)
break;
adapters.push_back(adapter); // store the found adapter
}
}
// store the first graphics adapter. There's always at least one (generic windows renderer)
g_dxgi_graphics_card = adapters[0];
// get the first monitor of the graphics card (temporal solution)
result = g_dxgi_graphics_card->EnumOutputs(0, reinterpret_cast<IDXGIOutput**>(&g_dxgi_monitor));
if(FAILED(result))
{
std::cerr << "Failed on EnumOutputs!\t" << std::hex << result << std::endl;
return 1;
}
// Create Direct3D device
result = D3D11CreateDevice(
g_dxgi_graphics_card,
D3D_DRIVER_TYPE_UNKNOWN,
nullptr,
0,
&g_feature_level,
1,
D3D11_SDK_VERSION,
reinterpret_cast<ID3D11Device**>(&g_d3d11_device),
nullptr,
reinterpret_cast<ID3D11DeviceContext**>(&g_d3d11_device_context)
);
if(FAILED(result))
{
std::cerr << "Failed to create Direct3D device!\t" << std::hex << result << std::endl;
return 1;
}
// From Direct3D device obtain its DXGI counterpart
result = g_d3d11_device->QueryInterface<IDXGIDevice4>(&g_dxgi_device);
if(FAILED(result))
{
std::cerr << "Failed to obtain DXGI device!\t" << std::hex << result << std::endl;
return 1;
}
// settings for regular, windowed swapchain
DXGI_SWAP_CHAIN_DESC1 swapchain_descriptor {};
swapchain_descriptor.Width = 0; // determine by the set window size
swapchain_descriptor.Height = 0; // determine by the set window size
swapchain_descriptor.Format = DXGI_FORMAT_R16G16B16A16_FLOAT; // linear color format that is supported by flip swap chain
swapchain_descriptor.Stereo = false; // true is for VR-like things (two screens rendering at the same time)
swapchain_descriptor.SampleDesc = {1, 0}; // disable MSAA
swapchain_descriptor.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT | DXGI_USAGE_SHADER_INPUT; // how can the swapchain be used
swapchain_descriptor.BufferCount = 2; // we want the front and the back buffer (sums up to 2)
swapchain_descriptor.Scaling = DXGI_SCALING_NONE; // stretch preserving the original rendered aspect ratio
swapchain_descriptor.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL; // use flip model instead of bitblt model (more in MSDN)
swapchain_descriptor.AlphaMode = DXGI_ALPHA_MODE_UNSPECIFIED; // IDK what is this, but setting other values doesn't work
swapchain_descriptor.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; // allows alt tab functionality and windowed to full screen transition
// get display modes and take the best one (highest resolution and refresh rate)
uint32_t number_of_display_modes = 0;
// take only the number of display modes
g_dxgi_monitor->GetDisplayModeList1(
DXGI_FORMAT_R16G16B16A16_FLOAT, DXGI_ENUM_MODES_SCALING,
&number_of_display_modes, nullptr
);
// use this number to dynamically init the array of modes
DXGI_MODE_DESC1* display_modes = new DXGI_MODE_DESC1 [number_of_display_modes];
// fill the array
g_dxgi_monitor->GetDisplayModeList1(
DXGI_FORMAT_R16G16B16A16_FLOAT, DXGI_ENUM_MODES_SCALING,
&number_of_display_modes, display_modes
);
// pick the best display mode from it
DXGI_MODE_DESC1 best_display_mode = display_modes [number_of_display_modes - 1];
// settings for fullscreen swapchain
DXGI_SWAP_CHAIN_FULLSCREEN_DESC swapchain_fullscreen_descriptor {};
swapchain_fullscreen_descriptor.RefreshRate = best_display_mode.RefreshRate; // get the highest refresh rate from the monitor
swapchain_fullscreen_descriptor.ScanlineOrdering = best_display_mode.ScanlineOrdering; // get scanline ordering from the monitor
swapchain_fullscreen_descriptor.Scaling = DXGI_MODE_SCALING_CENTERED; // never use scaling (msdn docs)
swapchain_fullscreen_descriptor.Windowed = false; // start the application in fullscreen
// create the swapchain
result = g_dxgi_factory->CreateSwapChainForHwnd(
g_d3d11_device, g_window_handle,
&swapchain_descriptor, &swapchain_fullscreen_descriptor,
nullptr, reinterpret_cast<IDXGISwapChain1**>(&g_swapchain)
);
if(FAILED(result))
{
std::cerr << "Couldn't create swapchain for the window\t" << std::hex << result << std::endl;
return 1;
}
// PAY ATTENTION HERE. WITHOUT THIS THE RESOLUTION IS INCORRECT
// create the new mode to force the swapchain to the native res. Otherwise the resolution is incorrect
DXGI_MODE_DESC new_resolution {};
new_resolution.Width = best_display_mode.Width;
new_resolution.Height = best_display_mode.Height;
new_resolution.RefreshRate = best_display_mode.RefreshRate;
new_resolution.Format = best_display_mode.Format;
new_resolution.ScanlineOrdering = best_display_mode.ScanlineOrdering;
new_resolution.Scaling = best_display_mode.Scaling;
// apply the new resolution
g_swapchain->ResizeTarget(&new_resolution);
// END PAY ATTENTION
// delete unused display modes
delete[] display_modes;
// create a call back function. It's not used here but in my project it is
auto engine_loop = []() -> bool
{
// do nothing for example
return g_game_is_done;
};
loop(engine_loop);
return 0;
}
void loop(std::function<bool()> engine_loop_callback)
{
// create a message
MSG message {};
bool done = false;
// as long as we are not done
while (not done)
{
// check whether we are safe to call engine loop and if we can handle message
if (not queue_is_ok(&message, done))
continue;
// do the engine suff
done = engine_loop_callback();
}
}
bool queue_is_ok(MSG* in_out_message, bool& out_done)
{
// check the message queue if it's empty
int32_t queue_is_empty = not PeekMessageA(in_out_message, g_window_handle, 0, 0, PM_NOREMOVE); // returns 0 if empty so we negate it
if (queue_is_empty)
return true; // go straight to engine loop if no messages were found
// check the queue for errors
switch (GetMessageA(in_out_message, g_window_handle, 0, 0))
{
case 0: // WM_QUIT -> must quit the application
{
out_done = true; // mark as done
return false; // don't call engine loop
}
case -1: // there was an error
{
int32_t error_code = GetLastError(); // get error
std::cerr << "Error code:\t" << std::hex << error_code << std::endl;
PostQuitMessage(0); // leave the application
return false; // jump to next loop iter to handle WM_QUIT
}
default: // if everything's fine
{
DispatchMessageA(in_out_message); // dispatch the message
return true; // proceed to engine loop
}
}
}
LRESULT window_procedure(HWND window_handle, UINT message_id, WPARAM w_param, LPARAM l_param)
{
switch (message_id)
{
case WM_DESTROY:
return 0;
default:
return DefWindowProcA(window_handle, message_id, w_param, l_param);
}
}
Expected behavior
The expected behavior is that DXGI uses my native desktop resolution by default.
What I've tried
- Debugging. It showed that the last enumerated display mode is the best one, and that DXGI uses some weird resolution by default.
- Setting the resolution after creation of the swapchain works, but leaves all the other programs that have been opened in full screen before resized to incorrect resolution on the desktop.
- Tried the same code on another PC which turned out to be working with no error. My PC has NVidia RTX 2060 graphics card and the test PC has Intel HD 630
What I think is the problem
To my mind it seems that for some reason the NVidia driver supplies incorrect data for DXGI. This problem has ever happened only to me and I could never find the solution. It also does affect other games/programs (but not all) which supports my theory. I just want to know if my code or my hardware is the problem.
CodePudding user response:
You create the swapchain instructing to get the size from the window
However it is still certain fixed size which is unrelated to display mode, even potential full screen mode. You would switch full screen and the same target would be flipped.
Your going full screen is followed by using current target size and non-dimension properties from DXGI_SWAP_CHAIN_FULLSCREEN_DESC
together suggesting display mode to choose.
When you resize the target immediately you have possibly excessive rendering because the DXGI target resolution exceeds window size, however then going full screen will get you the mode you expect (esp. see comment for pTarget
in IDXGISwapChain::SetFullscreenState
).
You are likely to want to control the target size to keep it appropriate: to have it match your window in windowed mode and to have it match desired display mode (whether it's native monitor resolution or not).