player_login() flushes the output queue before receiving input. The
receive obeys max_idle, the flush doesn't. Which means a client could
hog the thread indefinitely.
Broken in commit
08b94556 (v4.3.20) "Reimplement max_idle without a
separate thread". Until then, the idle thread aborted a stuck attempt
to flush output.
Denial of service seems possible.
void
player_login(void *ud)
{
void
player_login(void *ud)
{
char buf[128];
char space[128];
int ac;
char buf[128];
char space[128];
int ac;
pr_id(player, C_INIT, "Empire server ready\n");
for (;;) {
pr_id(player, C_INIT, "Empire server ready\n");
for (;;) {
+ deadline = player->curup + minutes(max_idle);
if (io_outputwaiting(player->iop)) {
if (io_outputwaiting(player->iop)) {
- if (io_output(player->iop, (time_t)-1) <= 0)
+ if (io_output(player->iop, deadline) <= 0)
break;
continue;
}
if (io_gets(player->iop, buf, sizeof(buf)) < 0) {
break;
continue;
}
if (io_gets(player->iop, buf, sizeof(buf)) < 0) {
- res = io_input(player->iop, player->curup + minutes(max_idle));
+ res = io_input(player->iop, deadline);
if (res <= 0) {
if (res == 0 && !io_eof(player->iop))
pr_id(player, C_DATA, "idle connection terminated\n");
if (res <= 0) {
if (res == 0 && !io_eof(player->iop))
pr_id(player, C_DATA, "idle connection terminated\n");