# File lib/mongrel.rb, line 546
546:     def process_client(client)
547:       begin
548:         parser = HttpParser.new
549:         params = HttpParams.new
550:         request = nil
551:         data = client.readpartial(Const::CHUNK_SIZE)
552:         nparsed = 0
553: 
554:         # Assumption: nparsed will always be less since data will get filled with more
555:         # after each parsing.  If it doesn't get more then there was a problem
556:         # with the read operation on the client socket.  Effect is to stop processing when the
557:         # socket can't fill the buffer for further parsing.
558:         while nparsed < data.length
559:           nparsed = parser.execute(params, data, nparsed)
560: 
561:           if parser.finished?
562:             script_name, path_info, handlers = @classifier.resolve(params[Const::REQUEST_PATH])
563: 
564:             if handlers
565:               params[Const::PATH_INFO] = path_info
566:               params[Const::SCRIPT_NAME] = script_name
567:               params[Const::REMOTE_ADDR] = params[Const::HTTP_X_FORWARDED_FOR] || client.peeraddr.last
568:               notifier = handlers[0].request_notify ? handlers[0] : nil
569: 
570:               request = HttpRequest.new(params, client, notifier)
571: 
572:               # in the case of large file uploads the user could close the socket, so skip those requests
573:               break if request.body == nil  # nil signals from HttpRequest::initialize that the request was aborted
574: 
575:               # request is good so far, continue processing the response
576:               response = HttpResponse.new(client)
577: 
578:               # Process each handler in registered order until we run out or one finalizes the response.
579:               handlers.each do |handler|
580:                 handler.process(request, response)
581:                 break if response.done or client.closed?
582:               end
583: 
584:               # And finally, if nobody closed the response off, we finalize it.
585:               unless response.done or client.closed? 
586:                 response.finished
587:               end
588:             else
589:               # Didn't find it, return a stock 404 response.
590:               client.write(Const::ERROR_404_RESPONSE)
591:             end
592: 
593:             break #done
594:           else
595:             # Parser is not done, queue up more data to read and continue parsing
596:             chunk = client.readpartial(Const::CHUNK_SIZE)
597:             break if !chunk or chunk.length == 0  # read failed, stop processing
598: 
599:             data << chunk
600:             if data.length >= Const::MAX_HEADER
601:               raise HttpParserError.new("HEADER is longer than allowed, aborting client early.")
602:             end
603:           end
604:         end
605:       rescue EOFError,Errno::ECONNRESET,Errno::EPIPE,Errno::EINVAL,Errno::EBADF
606:         client.close rescue Object
607:       rescue HttpParserError
608:         if $mongrel_debug_client
609:           STDERR.puts "#{Time.now}: BAD CLIENT (#{params[Const::HTTP_X_FORWARDED_FOR] || client.peeraddr.last}): #$!"
610:           STDERR.puts "#{Time.now}: REQUEST DATA: #{data.inspect}\n---\nPARAMS: #{params.inspect}\n---\n"
611:         end
612:       rescue Errno::EMFILE
613:         reap_dead_workers('too many files')
614:       rescue Object
615:         STDERR.puts "#{Time.now}: ERROR: #$!"
616:         STDERR.puts $!.backtrace.join("\n") if $mongrel_debug_client
617:       ensure
618:         client.close rescue Object
619:         request.body.delete if request and request.body.class == Tempfile
620:       end
621:     end