Common/DebugInterface: Make return value of GetColor() a u32

At its only usage point, its return value is stored into a u32, and the
default implementation returns 0xFFFFFFFF (-1), which would be an
unsigned integer. Given all of the bits are used to determine a color,
it makes slightly more sense to treat this as an unsigned value as
opposed to a signed one.
This commit is contained in:
Lioncash
2019-07-08 18:13:27 -04:00
parent a9a9b193bb
commit d4d485b692
6 changed files with 28 additions and 24 deletions

View File

@ -4,6 +4,7 @@
#include "Core/HW/DSPLLE/DSPDebugInterface.h"
#include <array>
#include <cstddef>
#include <string>
@ -257,17 +258,8 @@ void DSPDebugInterface::ToggleMemCheck(u32 address, bool read, bool write, bool
// =======================================================
// Separate the blocks with colors.
// -------------
int DSPDebugInterface::GetColor(u32 address)
u32 DSPDebugInterface::GetColor(u32 address)
{
static const int colors[6] = {
0xd0FFFF, // light cyan
0xFFd0d0, // light red
0xd8d8FF, // light blue
0xFFd0FF, // light purple
0xd0FFd0, // light green
0xFFFFd0, // light yellow
};
// Scan backwards so we don't miss it. Hm, actually, let's not - it looks pretty good.
int addr = -1;
for (int i = 0; i < 1; i++)
@ -284,7 +276,16 @@ int DSPDebugInterface::GetColor(u32 address)
return 0xFFFFFF;
if (symbol->type != Common::Symbol::Type::Function)
return 0xEEEEFF;
return colors[symbol->index % 6];
static constexpr std::array<u32, 6> colors{
0xd0FFFF, // light cyan
0xFFd0d0, // light red
0xd8d8FF, // light blue
0xFFd0FF, // light purple
0xd0FFd0, // light green
0xFFFFd0, // light yellow
};
return colors[symbol->index % colors.size()];
}
// =============

View File

@ -71,7 +71,7 @@ public:
void SetPC(u32 address) override;
void Step() override {}
void RunToBreakpoint() override;
int GetColor(u32 address) override;
u32 GetColor(u32 address) override;
std::string GetDescription(u32 address) override;
void Clear() override;