Bug#240070: g++ generates wrong code
Package: g++-2.95
Version: 2.95.4-11woody1
Package: g++-3.0
Version: 3.0.4-7
Hi,
the bug wich I describe below seems to be in both packages - this is
the reason why I write one report with two packages - sorry.
Problem:
--------
Compiling my test code with and without -O2 compiler option generates
different behaviors. The test code is attached (it is a TCP Server
wich select()s on the different connections - it's really ugly and
unstructured - please tell me if you don't come along with it!).
Reproduction:
-------------
- compile the attached server.cc mit the contributed Makefile; one
time with and one time without -O2 Option
- start the "server" binaries and connect to port 2000 with telnet:
$ telnet localhost 2000
- One time the connection will be established, the other time,
accept() will fail with error: invalid argument
I have reproduced this behavior on different Debian-3.0 boxes. To be
sure I tested it on FreeBSD and (old) Redhat as well: They don't have
this effect.
I have another test code example, wich one time segfaults and the
other time works. Please tell me if you are interested in more code.
Overview:
---------
System: Compiler: with -O2 without -O2
---------------------------------------------------------------------
Debian-3.0 g++-2.954-11woody1 works fails!
Debian-3.0 g++-3.0.4-7 fails! works
FreeBSD-4.9 g++-2.95.4 works works
Redhat-7.0 g++-2.96 20000731 works works
--
Martin
CC=g++-3.0
all:
$(CC) -c server.cc -g -O2
# $(CC) -c server.cc -g
$(CC) -o server server.o -lstdc++
clean:
rm -f *.o server
#include <stdio.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netdb.h>
#include <unistd.h>
#include <string.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/poll.h>
#include <list.h>
typedef list<int> intlist;
void initFDSet(int *maxfds, fd_set *set, int serverSocket,
intlist *clients)
{
/* Initialize the file descriptor set. */
FD_ZERO (set);
FD_SET (serverSocket, set);
*maxfds = serverSocket;
for (list<int>::iterator i = clients->begin(); i != clients->end(); i++)
{
FD_SET(*i,set);
if (*i > *maxfds)
*maxfds = *i;
}
}
int main()
{
struct sockaddr_in sin;
struct sockaddr_in pin;
socklen_t addrlen;
int i;
struct timeval timeout;
int maxfds;
int _serverSocket;
int client;
list<int> clients;
fd_set set;
if ((_serverSocket = socket(PF_INET, SOCK_STREAM, 0)) == -1) {
perror("socket");
exit(1);
}
i = 1;
setsockopt(_serverSocket,SOL_SOCKET,SO_REUSEADDR, &i, sizeof(i));
/* complete the socket structure */
memset(&sin, 0, sizeof(sin));
sin.sin_family = AF_INET;
sin.sin_addr.s_addr = INADDR_ANY;
sin.sin_port = htons(2000);
/* bind the socket to the port number */
if (bind(_serverSocket, (struct sockaddr *) &sin, sizeof(sin)) == -1) {
perror("bind");
exit(1);
}
/* show that we are willing to listen */
if (listen(_serverSocket, 128) == -1) {
perror("listen");
exit(1);
}
/* Initialize the timeout data structure. */
timeout.tv_sec = 5;
timeout.tv_usec = 0;
initFDSet(&maxfds,&set,_serverSocket,&clients);
while (1)
{
select (maxfds + 1, &set, NULL, NULL, &timeout);
if (FD_ISSET(_serverSocket,&set))
{
// new connection
printf("new connection\n");
if ((client = accept(_serverSocket,
(struct sockaddr *) &pin, &addrlen)) == -1)
{
perror("accept");
exit(1);
}
clients.push_back(client);
}
for (list<int>::iterator i = clients.begin(); i != clients.end(); i++)
{
client = *i;
if (FD_ISSET(client,&set))
{
char buf[512];
int size = recv(client,buf,sizeof(buf),0);
if (! size)
{
clients.remove(client);
close(client);
printf("%X client closed connection\n",client);
break;
}
else
{
buf[size] = 0;
printf("%X recv: %d - %s\n",client,size,buf);
}
}
}
initFDSet(&maxfds,&set,_serverSocket,&clients);
}
}
Reply to: