I have, pretty much, the simplest version of my test code that doesn't run:
module sample;
import std.c.windows.windows;
import std.conv;
import std.format;
import std.array;
import std.stdio;
alias HANDLE handle;
void main(){
auto buff = CreateConsoleScreenBuffer(GENERIC_READ|GENERIC_WRITE, FILE_SHARE_READ|FILE_SHARE_WRITE, null, CONSOLE_TEXTMODE_BUFFER, null);
if( buff == INVALID_HANDLE_VALUE )
throw new Exception("Unable to create new screen buffer. Error: " ~ text( GetLastError() ) );
COORD c = {60, 30};
// get the console window dims, to make sure this screen buffer is as large or larger
CONSOLE_SCREEN_BUFFER_INFO* csbi = new CONSOLE_SCREEN_BUFFER_INFO;
GetConsoleScreenBufferInfo( buff, csbi );
short width = cast(short)(csbi.srWindow.Right-csbi.srWindow.Left);
short height = cast(short)(csbi.srWindow.Bottom-csbi.srWindow.Top);
if(c.X<width)
c.X = width;
if(c.Y<height)
c.Y = height;
// set the screen buffer size !!!<= THIS IS WHAT DOESN'T WORK!
if(SetConsoleScreenBufferSize( buff, c ) == 0){
auto err = GetLastError();
throw new Exception("Unable to set buffer dimensions. Error: " ~ text(err) );
}
SetConsoleTextAttribute( buff, defineAttr( Color.White|Color.Intensity, Color.Blue ) );
setCursor( buff, 2, 28 );
readln();
}
enum Color{ Black = 0, Blue = 1, Green = 2, Red = 4, White = 7, Intensity = 8 };
ushort defineAttr(Color fg = Color.Red|Color.Green|Color.Blue, Color bg = Color.Black){
ushort val;
if(fg & Color.Red) val |= FOREGROUND_RED;
if(fg & Color.Green) val |= FOREGROUND_GREEN;
if(fg & Color.Blue) val |= FOREGROUND_BLUE;
if(fg & Color.Intensity) val |= FOREGROUND_INTENSITY;
if(bg & Color.Red) val |= BACKGROUND_RED;
if(bg & Color.Green) val |= BACKGROUND_GREEN;
if(bg & Color.Blue) val |= BACKGROUND_BLUE;
if(bg & Color.Intensity) val |= BACKGROUND_INTENSITY;
return val;
}
void setCursor(handle n, short x, short y){
COORD c = {x, y};
SetConsoleCursorPosition( n, c );
}
I don't understand what this "invalid parameter" might be, so if anyone could shine a light on this, I would very much appreciate it.
EDIT: I forgot to mention, I have factored out nearly every function here, but the code is otherwise exactly as written. Also, I have tested on both a brand new screen buffer, as well as on the default, both to the same effect.
SOLVED: I ended up stealing the DOSBox code linked by aqrit.
The original link: http://sourceforge.net/p/dosbox/code-0/HEAD/tree/dosbox/trunk/src/debug/debug_win32.cpp
My implementation:
/// Shamelessly ripped from the DOSBox sources and adapted for D
void setBufferDims(handle n, short w, short h){
assert(n != INVALID_HANDLE_VALUE, "invalid handle passed to setBufferDims()");
CONSOLE_SCREEN_BUFFER_INFO csbi;
SMALL_RECT rect;
COORD window_dims, win_coords;
if(!GetConsoleScreenBufferInfo(n, &csbi))
throw new Exception( "Unable to retrieve console screen buffer data. Error: "~text(GetLastError()) );
// get console window size in cols and rows
win_coords = GetLargestConsoleWindowSize( n );
if(win_coords.X == 0 && win_coords.Y == 0)
throw new Exception( "Unable to retrieve largest possible window coordinates: "~text(GetLastError()) );
window_dims = csbi.dwSize;
rect.Right = cast(short)(min(w, win_coords.X)-1);
rect.Bottom = cast(short)(min(h, win_coords.Y)-1);
rect.Left = rect.Top = 0;
win_coords.X = w;
win_coords.Y = h;
// if console is smaller than or equal to requested size
if(csbi.dwSize.X * csbi.dwSize.Y > w * h){
SetConsoleWindowInfo( n, true, &rect);
SetConsoleScreenBufferSize( n, win_coords );
}
if(csbi.dwSize.X * csbi.dwSize.Y < w * h){// otherwise
// resize console, then buffer
SetConsoleScreenBufferSize( n, win_coords );
SetConsoleWindowInfo( n, true, &rect);
}
}
Is it wrong that I find it really cool that the D code is almost identical to its C/C++ origins?