mirror of
https://github.com/puma/puma.git
synced 2022-11-09 13:48:40 -05:00
Bugfixes for 0.3.13.4 release firming up IO tweaks.
git-svn-id: svn+ssh://rubyforge.org/var/svn/mongrel/trunk@293 19e92222-5c0b-0410-8929-a290d50e31e9
This commit is contained in:
parent
ab3c8082de
commit
c05069479d
5 changed files with 229 additions and 225 deletions
|
@ -33,7 +33,7 @@ class Start < GemPlugin::Plugin "/commands"
|
|||
['-G', '--generate CONFIG', "Generate a config file for -C", :@generate, nil],
|
||||
['', '--user USER', "User to run as", :@user, nil],
|
||||
['', '--group GROUP', "Group to run as", :@group, nil],
|
||||
['', '--prefix PATH', "URL prefix for Rails app", :@prefix, '/']
|
||||
['', '--prefix PATH', "URL prefix for Rails app", :@prefix, nil]
|
||||
]
|
||||
end
|
||||
|
||||
|
@ -44,6 +44,7 @@ class Start < GemPlugin::Plugin "/commands"
|
|||
# change there to start, then we'll have to come back after daemonize
|
||||
Dir.chdir(@cwd)
|
||||
|
||||
valid? @prefix[-1].chr != "/", "Don't put / at the end of your prefix" if @prefix
|
||||
valid_dir? File.dirname(@log_file), "Path to log file not valid: #@log_file"
|
||||
valid_dir? File.dirname(@pid_file), "Path to pid file not valid: #@pid_file"
|
||||
valid_dir? @docroot, "Path to docroot not valid: #@docroot"
|
||||
|
@ -106,8 +107,9 @@ class Start < GemPlugin::Plugin "/commands"
|
|||
debug "/"
|
||||
end
|
||||
|
||||
log "Starting Rails with #{defaults[:environment]} environment on #{defaults[:prefix]} ..."
|
||||
uri defaults[:prefix], :handler => rails(:mime => mime, :prefix => @prefix)
|
||||
log "Starting Rails with #{defaults[:environment]} environment..."
|
||||
log "Mounting Rails at #{defaults[:prefix]}..." if defaults[:prefix]
|
||||
uri defaults[:prefix] || "/", :handler => rails(:mime => mime, :prefix => defaults[:prefix])
|
||||
log "Rails loaded."
|
||||
|
||||
log "Loading any Rails specific GemPlugins"
|
||||
|
|
|
@ -30,40 +30,40 @@ static int http_parser_error = 1;
|
|||
#line 110 "ext/http11/http11_parser.rl"
|
||||
|
||||
int http_parser_init(http_parser *parser) {
|
||||
int cs = 0;
|
||||
|
||||
int cs = 0;
|
||||
|
||||
#line 36 "ext/http11/http11_parser.c"
|
||||
{
|
||||
cs = http_parser_start;
|
||||
}
|
||||
#line 114 "ext/http11/http11_parser.rl"
|
||||
parser->cs = cs;
|
||||
parser->body_start = 0;
|
||||
parser->content_len = 0;
|
||||
parser->mark = 0;
|
||||
parser->nread = 0;
|
||||
parser->field_len = 0;
|
||||
parser->field_start = 0;
|
||||
parser->cs = cs;
|
||||
parser->body_start = 0;
|
||||
parser->content_len = 0;
|
||||
parser->mark = 0;
|
||||
parser->nread = 0;
|
||||
parser->field_len = 0;
|
||||
parser->field_start = 0;
|
||||
|
||||
return(1);
|
||||
return(1);
|
||||
}
|
||||
|
||||
|
||||
/** exec **/
|
||||
size_t http_parser_execute(http_parser *parser, const char *buffer, size_t len, size_t off) {
|
||||
const char *p, *pe;
|
||||
int cs = parser->cs;
|
||||
const char *p, *pe;
|
||||
int cs = parser->cs;
|
||||
|
||||
assert(off <= len && "offset past end of buffer");
|
||||
assert(off <= len && "offset past end of buffer");
|
||||
|
||||
p = buffer+off;
|
||||
pe = buffer+len;
|
||||
p = buffer+off;
|
||||
pe = buffer+len;
|
||||
|
||||
assert(*pe == '\0' && "pointer does not end on NUL");
|
||||
assert(pe - p == len - off && "pointers aren't same distance");
|
||||
assert(*pe == '\0' && "pointer does not end on NUL");
|
||||
assert(pe - p == len - off && "pointers aren't same distance");
|
||||
|
||||
|
||||
|
||||
|
||||
#line 68 "ext/http11/http11_parser.c"
|
||||
{
|
||||
p -= 1;
|
||||
|
@ -113,9 +113,9 @@ case 2:
|
|||
tr18:
|
||||
#line 33 "ext/http11/http11_parser.rl"
|
||||
{
|
||||
if(parser->request_method != NULL)
|
||||
parser->request_method(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
if(parser->request_method != NULL)
|
||||
parser->request_method(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
goto st3;
|
||||
st3:
|
||||
if ( ++p == pe )
|
||||
|
@ -152,16 +152,16 @@ case 4:
|
|||
tr20:
|
||||
#line 37 "ext/http11/http11_parser.rl"
|
||||
{
|
||||
if(parser->request_uri != NULL)
|
||||
parser->request_uri(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
if(parser->request_uri != NULL)
|
||||
parser->request_uri(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
goto st5;
|
||||
tr32:
|
||||
#line 41 "ext/http11/http11_parser.rl"
|
||||
{
|
||||
if(parser->query_string != NULL)
|
||||
parser->query_string(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
if(parser->query_string != NULL)
|
||||
parser->query_string(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
goto st5;
|
||||
st5:
|
||||
if ( ++p == pe )
|
||||
|
@ -239,17 +239,17 @@ case 13:
|
|||
tr23:
|
||||
#line 46 "ext/http11/http11_parser.rl"
|
||||
{
|
||||
if(parser->http_version != NULL)
|
||||
parser->http_version(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
if(parser->http_version != NULL)
|
||||
parser->http_version(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
goto st14;
|
||||
tr35:
|
||||
#line 28 "ext/http11/http11_parser.rl"
|
||||
{
|
||||
if(parser->http_field != NULL) {
|
||||
parser->http_field(parser->data, PTR_TO(field_start), parser->field_len, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
}
|
||||
if(parser->http_field != NULL) {
|
||||
parser->http_field(parser->data, PTR_TO(field_start), parser->field_len, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
}
|
||||
goto st14;
|
||||
st14:
|
||||
if ( ++p == pe )
|
||||
|
@ -297,11 +297,11 @@ case 16:
|
|||
tr26:
|
||||
#line 51 "ext/http11/http11_parser.rl"
|
||||
{
|
||||
parser->body_start = p - buffer + 1;
|
||||
if(parser->header_done != NULL)
|
||||
parser->header_done(parser->data, p, 0);
|
||||
goto _out51;
|
||||
}
|
||||
parser->body_start = p - buffer + 1;
|
||||
if(parser->header_done != NULL)
|
||||
parser->header_done(parser->data, p + 1, pe - p - 1);
|
||||
goto _out51;
|
||||
}
|
||||
goto st51;
|
||||
st51:
|
||||
if ( ++p == pe )
|
||||
|
@ -345,8 +345,8 @@ case 17:
|
|||
tr16:
|
||||
#line 23 "ext/http11/http11_parser.rl"
|
||||
{
|
||||
parser->field_len = LEN(field_start, p);
|
||||
}
|
||||
parser->field_len = LEN(field_start, p);
|
||||
}
|
||||
goto st18;
|
||||
st18:
|
||||
if ( ++p == pe )
|
||||
|
@ -500,9 +500,9 @@ case 27:
|
|||
tr30:
|
||||
#line 37 "ext/http11/http11_parser.rl"
|
||||
{
|
||||
if(parser->request_uri != NULL)
|
||||
parser->request_uri(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
if(parser->request_uri != NULL)
|
||||
parser->request_uri(parser->data, PTR_TO(mark), LEN(mark, p));
|
||||
}
|
||||
goto st28;
|
||||
st28:
|
||||
if ( ++p == pe )
|
||||
|
@ -963,50 +963,50 @@ case 50:
|
|||
}
|
||||
#line 141 "ext/http11/http11_parser.rl"
|
||||
|
||||
parser->cs = cs;
|
||||
parser->nread += p - (buffer + off);
|
||||
parser->cs = cs;
|
||||
parser->nread += p - (buffer + off);
|
||||
|
||||
assert(p <= pe && "buffer overflow after parsing execute");
|
||||
assert(parser->nread <= len && "nread longer than length");
|
||||
assert(parser->body_start <= len && "body starts after buffer end");
|
||||
assert(parser->mark < len && "mark is after buffer end");
|
||||
assert(parser->field_len <= len && "field has length longer than whole buffer");
|
||||
assert(parser->field_start < len && "field starts after buffer end");
|
||||
assert(p <= pe && "buffer overflow after parsing execute");
|
||||
assert(parser->nread <= len && "nread longer than length");
|
||||
assert(parser->body_start <= len && "body starts after buffer end");
|
||||
assert(parser->mark < len && "mark is after buffer end");
|
||||
assert(parser->field_len <= len && "field has length longer than whole buffer");
|
||||
assert(parser->field_start < len && "field starts after buffer end");
|
||||
|
||||
if(parser->body_start) {
|
||||
/* final \r\n combo encountered so stop right here */
|
||||
|
||||
if(parser->body_start) {
|
||||
/* final \r\n combo encountered so stop right here */
|
||||
|
||||
#line 980 "ext/http11/http11_parser.c"
|
||||
#line 155 "ext/http11/http11_parser.rl"
|
||||
parser->nread++;
|
||||
}
|
||||
parser->nread++;
|
||||
}
|
||||
|
||||
return(parser->nread);
|
||||
return(parser->nread);
|
||||
}
|
||||
|
||||
int http_parser_finish(http_parser *parser)
|
||||
{
|
||||
int cs = parser->cs;
|
||||
int cs = parser->cs;
|
||||
|
||||
|
||||
|
||||
#line 993 "ext/http11/http11_parser.c"
|
||||
#line 166 "ext/http11/http11_parser.rl"
|
||||
|
||||
parser->cs = cs;
|
||||
parser->cs = cs;
|
||||
|
||||
if (http_parser_has_error(parser) ) {
|
||||
return -1;
|
||||
} else if (http_parser_is_finished(parser) ) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
if (http_parser_has_error(parser) ) {
|
||||
return -1;
|
||||
} else if (http_parser_is_finished(parser) ) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int http_parser_has_error(http_parser *parser) {
|
||||
return parser->cs == http_parser_error;
|
||||
return parser->cs == http_parser_error;
|
||||
}
|
||||
|
||||
int http_parser_is_finished(http_parser *parser) {
|
||||
return parser->cs == http_parser_first_final;
|
||||
return parser->cs == http_parser_first_final;
|
||||
}
|
||||
|
|
|
@ -15,170 +15,170 @@
|
|||
|
||||
/** machine **/
|
||||
%%{
|
||||
machine http_parser;
|
||||
machine http_parser;
|
||||
|
||||
action mark {MARK(mark, fpc); }
|
||||
action mark {MARK(mark, fpc); }
|
||||
|
||||
action start_field { MARK(field_start, fpc); }
|
||||
action write_field {
|
||||
parser->field_len = LEN(field_start, fpc);
|
||||
}
|
||||
action start_field { MARK(field_start, fpc); }
|
||||
action write_field {
|
||||
parser->field_len = LEN(field_start, fpc);
|
||||
}
|
||||
|
||||
action start_value { MARK(mark, fpc); }
|
||||
action write_value {
|
||||
if(parser->http_field != NULL) {
|
||||
parser->http_field(parser->data, PTR_TO(field_start), parser->field_len, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
}
|
||||
action request_method {
|
||||
if(parser->request_method != NULL)
|
||||
parser->request_method(parser->data, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
action request_uri {
|
||||
if(parser->request_uri != NULL)
|
||||
parser->request_uri(parser->data, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
action query_string {
|
||||
if(parser->query_string != NULL)
|
||||
parser->query_string(parser->data, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
action start_value { MARK(mark, fpc); }
|
||||
action write_value {
|
||||
if(parser->http_field != NULL) {
|
||||
parser->http_field(parser->data, PTR_TO(field_start), parser->field_len, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
}
|
||||
action request_method {
|
||||
if(parser->request_method != NULL)
|
||||
parser->request_method(parser->data, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
action request_uri {
|
||||
if(parser->request_uri != NULL)
|
||||
parser->request_uri(parser->data, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
action query_string {
|
||||
if(parser->query_string != NULL)
|
||||
parser->query_string(parser->data, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
|
||||
action http_version {
|
||||
if(parser->http_version != NULL)
|
||||
parser->http_version(parser->data, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
action http_version {
|
||||
if(parser->http_version != NULL)
|
||||
parser->http_version(parser->data, PTR_TO(mark), LEN(mark, fpc));
|
||||
}
|
||||
|
||||
action done {
|
||||
parser->body_start = fpc - buffer + 1;
|
||||
if(parser->header_done != NULL)
|
||||
parser->header_done(parser->data, fpc, 0);
|
||||
fbreak;
|
||||
}
|
||||
action done {
|
||||
parser->body_start = fpc - buffer + 1;
|
||||
if(parser->header_done != NULL)
|
||||
parser->header_done(parser->data, fpc + 1, pe - fpc - 1);
|
||||
fbreak;
|
||||
}
|
||||
|
||||
|
||||
#### HTTP PROTOCOL GRAMMAR
|
||||
# line endings
|
||||
CRLF = "\r\n";
|
||||
#### HTTP PROTOCOL GRAMMAR
|
||||
# line endings
|
||||
CRLF = "\r\n";
|
||||
|
||||
# character types
|
||||
CTL = (cntrl | 127);
|
||||
safe = ("$" | "-" | "_" | ".");
|
||||
extra = ("!" | "*" | "'" | "(" | ")" | ",");
|
||||
reserved = (";" | "/" | "?" | ":" | "@" | "&" | "=" | "+");
|
||||
unsafe = (CTL | " " | "\"" | "#" | "%" | "<" | ">");
|
||||
national = any -- (alpha | digit | reserved | extra | safe | unsafe);
|
||||
unreserved = (alpha | digit | safe | extra | national);
|
||||
escape = ("%" xdigit xdigit);
|
||||
uchar = (unreserved | escape);
|
||||
pchar = (uchar | ":" | "@" | "&" | "=" | "+");
|
||||
tspecials = ("(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\\" | "\"" | "/" | "[" | "]" | "?" | "=" | "{" | "}" | " " | "\t");
|
||||
# character types
|
||||
CTL = (cntrl | 127);
|
||||
safe = ("$" | "-" | "_" | ".");
|
||||
extra = ("!" | "*" | "'" | "(" | ")" | ",");
|
||||
reserved = (";" | "/" | "?" | ":" | "@" | "&" | "=" | "+");
|
||||
unsafe = (CTL | " " | "\"" | "#" | "%" | "<" | ">");
|
||||
national = any -- (alpha | digit | reserved | extra | safe | unsafe);
|
||||
unreserved = (alpha | digit | safe | extra | national);
|
||||
escape = ("%" xdigit xdigit);
|
||||
uchar = (unreserved | escape);
|
||||
pchar = (uchar | ":" | "@" | "&" | "=" | "+");
|
||||
tspecials = ("(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\\" | "\"" | "/" | "[" | "]" | "?" | "=" | "{" | "}" | " " | "\t");
|
||||
|
||||
# elements
|
||||
token = (ascii -- (CTL | tspecials));
|
||||
# elements
|
||||
token = (ascii -- (CTL | tspecials));
|
||||
|
||||
# URI schemes and absolute paths
|
||||
scheme = ( alpha | digit | "+" | "-" | "." )* ;
|
||||
absolute_uri = (scheme ":" (uchar | reserved )*) >mark %request_uri;
|
||||
# URI schemes and absolute paths
|
||||
scheme = ( alpha | digit | "+" | "-" | "." )* ;
|
||||
absolute_uri = (scheme ":" (uchar | reserved )*) >mark %request_uri;
|
||||
|
||||
path = (pchar+ ( "/" pchar* )*) ;
|
||||
query = ( uchar | reserved )* >mark %query_string ;
|
||||
param = ( pchar | "/" )* ;
|
||||
params = (param ( ";" param )*) ;
|
||||
rel_path = (path? (";" params)?) %request_uri ("?" query)? ;
|
||||
absolute_path = ("/"+ rel_path) >mark ;
|
||||
|
||||
Request_URI = ("*" >mark %request_uri | absolute_uri | absolute_path) ;
|
||||
Method = (upper | digit | safe){1,20} >mark %request_method;
|
||||
|
||||
http_number = (digit+ "." digit+) ;
|
||||
HTTP_Version = ("HTTP/" http_number) >mark %http_version ;
|
||||
Request_Line = (Method " " Request_URI " " HTTP_Version CRLF) ;
|
||||
|
||||
field_name = (token -- ":")+ >start_field %write_field;
|
||||
path = (pchar+ ( "/" pchar* )*) ;
|
||||
query = ( uchar | reserved )* >mark %query_string ;
|
||||
param = ( pchar | "/" )* ;
|
||||
params = (param ( ";" param )*) ;
|
||||
rel_path = (path? (";" params)?) %request_uri ("?" query)? ;
|
||||
absolute_path = ("/"+ rel_path) >mark ;
|
||||
|
||||
field_value = any* >start_value %write_value;
|
||||
Request_URI = ("*" >mark %request_uri | absolute_uri | absolute_path) ;
|
||||
Method = (upper | digit | safe){1,20} >mark %request_method;
|
||||
|
||||
message_header = field_name ": " field_value :> CRLF;
|
||||
|
||||
Request = Request_Line (message_header)* ( CRLF @done);
|
||||
http_number = (digit+ "." digit+) ;
|
||||
HTTP_Version = ("HTTP/" http_number) >mark %http_version ;
|
||||
Request_Line = (Method " " Request_URI " " HTTP_Version CRLF) ;
|
||||
|
||||
main := Request;
|
||||
field_name = (token -- ":")+ >start_field %write_field;
|
||||
|
||||
field_value = any* >start_value %write_value;
|
||||
|
||||
message_header = field_name ": " field_value :> CRLF;
|
||||
|
||||
Request = Request_Line (message_header)* ( CRLF @done);
|
||||
|
||||
main := Request;
|
||||
}%%
|
||||
|
||||
/** Data **/
|
||||
%% write data;
|
||||
|
||||
int http_parser_init(http_parser *parser) {
|
||||
int cs = 0;
|
||||
%% write init;
|
||||
parser->cs = cs;
|
||||
parser->body_start = 0;
|
||||
parser->content_len = 0;
|
||||
parser->mark = 0;
|
||||
parser->nread = 0;
|
||||
parser->field_len = 0;
|
||||
parser->field_start = 0;
|
||||
int cs = 0;
|
||||
%% write init;
|
||||
parser->cs = cs;
|
||||
parser->body_start = 0;
|
||||
parser->content_len = 0;
|
||||
parser->mark = 0;
|
||||
parser->nread = 0;
|
||||
parser->field_len = 0;
|
||||
parser->field_start = 0;
|
||||
|
||||
return(1);
|
||||
return(1);
|
||||
}
|
||||
|
||||
|
||||
/** exec **/
|
||||
size_t http_parser_execute(http_parser *parser, const char *buffer, size_t len, size_t off) {
|
||||
const char *p, *pe;
|
||||
int cs = parser->cs;
|
||||
const char *p, *pe;
|
||||
int cs = parser->cs;
|
||||
|
||||
assert(off <= len && "offset past end of buffer");
|
||||
assert(off <= len && "offset past end of buffer");
|
||||
|
||||
p = buffer+off;
|
||||
pe = buffer+len;
|
||||
p = buffer+off;
|
||||
pe = buffer+len;
|
||||
|
||||
assert(*pe == '\0' && "pointer does not end on NUL");
|
||||
assert(pe - p == len - off && "pointers aren't same distance");
|
||||
assert(*pe == '\0' && "pointer does not end on NUL");
|
||||
assert(pe - p == len - off && "pointers aren't same distance");
|
||||
|
||||
|
||||
%% write exec;
|
||||
%% write exec;
|
||||
|
||||
parser->cs = cs;
|
||||
parser->nread += p - (buffer + off);
|
||||
parser->cs = cs;
|
||||
parser->nread += p - (buffer + off);
|
||||
|
||||
assert(p <= pe && "buffer overflow after parsing execute");
|
||||
assert(parser->nread <= len && "nread longer than length");
|
||||
assert(parser->body_start <= len && "body starts after buffer end");
|
||||
assert(parser->mark < len && "mark is after buffer end");
|
||||
assert(parser->field_len <= len && "field has length longer than whole buffer");
|
||||
assert(parser->field_start < len && "field starts after buffer end");
|
||||
assert(p <= pe && "buffer overflow after parsing execute");
|
||||
assert(parser->nread <= len && "nread longer than length");
|
||||
assert(parser->body_start <= len && "body starts after buffer end");
|
||||
assert(parser->mark < len && "mark is after buffer end");
|
||||
assert(parser->field_len <= len && "field has length longer than whole buffer");
|
||||
assert(parser->field_start < len && "field starts after buffer end");
|
||||
|
||||
if(parser->body_start) {
|
||||
/* final \r\n combo encountered so stop right here */
|
||||
%%write eof;
|
||||
parser->nread++;
|
||||
}
|
||||
if(parser->body_start) {
|
||||
/* final \r\n combo encountered so stop right here */
|
||||
%%write eof;
|
||||
parser->nread++;
|
||||
}
|
||||
|
||||
return(parser->nread);
|
||||
return(parser->nread);
|
||||
}
|
||||
|
||||
int http_parser_finish(http_parser *parser)
|
||||
{
|
||||
int cs = parser->cs;
|
||||
int cs = parser->cs;
|
||||
|
||||
%%write eof;
|
||||
%%write eof;
|
||||
|
||||
parser->cs = cs;
|
||||
parser->cs = cs;
|
||||
|
||||
if (http_parser_has_error(parser) ) {
|
||||
return -1;
|
||||
} else if (http_parser_is_finished(parser) ) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
if (http_parser_has_error(parser) ) {
|
||||
return -1;
|
||||
} else if (http_parser_is_finished(parser) ) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int http_parser_has_error(http_parser *parser) {
|
||||
return parser->cs == http_parser_error;
|
||||
return parser->cs == http_parser_error;
|
||||
}
|
||||
|
||||
int http_parser_is_finished(http_parser *parser) {
|
||||
return parser->cs == http_parser_first_final;
|
||||
return parser->cs == http_parser_first_final;
|
||||
}
|
||||
|
|
|
@ -135,7 +135,7 @@ module Mongrel
|
|||
ERROR_503_RESPONSE="HTTP/1.1 503 Service Unavailable\r\n\r\nBUSY".freeze
|
||||
|
||||
# The basic max request size we'll try to read.
|
||||
CHUNK_SIZE=(4 * 1024)
|
||||
CHUNK_SIZE=(16 * 1024)
|
||||
|
||||
# This is the maximum header that is allowed before a client is booted. The parser detects
|
||||
# this, but we'd also like to do this as well.
|
||||
|
@ -195,41 +195,44 @@ module Mongrel
|
|||
@params = params
|
||||
@socket = socket
|
||||
content_length = params[Const::CONTENT_LENGTH].to_i
|
||||
http_body_len = params.http_body.length
|
||||
remain = content_length - params.http_body.length
|
||||
|
||||
|
||||
dispatcher.request_begins(params) if dispatcher
|
||||
STDERR.puts "REQUEST: #{params.inspect}"
|
||||
|
||||
# conditions to test:
|
||||
# * http_body_len == 0 && content_length == 0 -- Nothing to do
|
||||
# * http_body_len > content_length -- ERROR, abort
|
||||
# * http_body_len < content_length -- need to read more
|
||||
# * http_body_len == content_length -- initial body has all of it
|
||||
if http_body_len == 0 && content_length == 0
|
||||
# no body to process
|
||||
if remain == 0
|
||||
# we've got everything, pack it up
|
||||
STDERR.puts "everything already read, packing up and done: #{params.http_body.inspect}"
|
||||
@body = StringIO.new
|
||||
dispatcher.request_progress(params, 0, 0) if dispatcher
|
||||
elsif http_body_len > content_length
|
||||
@body.write params.http_body
|
||||
dispatcher.request_progress(params, 0, content_length) if dispatcher
|
||||
elsif remain < 0
|
||||
STDERR.puts "ERROR: body length larger than content length, stupid client"
|
||||
# ERROR, they're sending bad requests
|
||||
raise HttpParserError.new("Sent body size #{http_body_len} but declared Content-Length: #{content_length}")
|
||||
elsif http_body_len < content_length
|
||||
raise HttpParserError.new("Sent body size #{params.http_body.length} but declared Content-Length: #{content_length}")
|
||||
elsif remain > 0
|
||||
STDERR.puts "need to read #{remain} more of the body #{content_length}"
|
||||
# must read more data to complete body
|
||||
clen = content_length - http_body_len
|
||||
if clen > Const::MAX_BODY
|
||||
if remain > Const::MAX_BODY
|
||||
STDERR.puts "Big content, saving to tempfile"
|
||||
# huge body, put it in a tempfile
|
||||
@body = Tempfile.new(Const::MONGREL_TMP_BASE)
|
||||
@body.binmode
|
||||
else
|
||||
# small body, just use that
|
||||
@body = StringIO.new(params.http_body)
|
||||
STDERR.puts "Small file, using ram"
|
||||
@body = StringIO.new
|
||||
end
|
||||
read_body(clen, dispatcher)
|
||||
elsif http_body_len == content_length
|
||||
# we've got everything, pack it up
|
||||
@body = StringIO.new(params.http_body)
|
||||
dispatcher.request_progress(params, 0, http_body_len) if dispatcher
|
||||
|
||||
@body.write params.http_body
|
||||
read_body(remain, content_length, dispatcher)
|
||||
else
|
||||
STDERR.puts "BAD LOGIC: Tell Zed he's a moron."
|
||||
end
|
||||
|
||||
@body.rewind
|
||||
STDERR.puts "DONE"
|
||||
end
|
||||
|
||||
|
||||
|
@ -237,26 +240,27 @@ module Mongrel
|
|||
# small chunks. It expects @body to be an IO object, @socket to be valid,
|
||||
# and will set @body = nil if the request fails. It also expects any initial
|
||||
# part of the body that has been read to be in the @body already.
|
||||
def read_body(clen, dispatcher)
|
||||
def read_body(remain, total, dispatcher)
|
||||
STDERR.puts "reading body"
|
||||
begin
|
||||
total = clen
|
||||
# write the odd sized chunk first
|
||||
clen -= @body.write(@socket.read(clen % Const::CHUNK_SIZE))
|
||||
dispatcher.request_progress(params, clen, total) if dispatcher
|
||||
remain -= @body.write(@socket.read(remain % Const::CHUNK_SIZE))
|
||||
STDERR.puts "first read, remaining: #{remain}"
|
||||
dispatcher.request_progress(params, remain, total) if dispatcher
|
||||
|
||||
# then stream out nothing but perfectly sized chunks
|
||||
while clen > 0 and !@socket.closed?
|
||||
until remain <= 0 or @socket.closed?
|
||||
data = @socket.read(Const::CHUNK_SIZE)
|
||||
STDERR.puts "read #{data.length} more"
|
||||
# have to do it this way since @socket.eof? causes it to block
|
||||
raise "Socket closed or read failure" if not data or data.length != Const::CHUNK_SIZE
|
||||
clen -= @body.write(data)
|
||||
remain -= @body.write(data)
|
||||
# ASSUME: we are writing to a disk and these writes always write the requested amount
|
||||
dispatcher.request_progress(params, clen, total) if dispatcher
|
||||
dispatcher.request_progress(params, remain, total) if dispatcher
|
||||
end
|
||||
|
||||
# rewind to keep the world happy
|
||||
@body.rewind
|
||||
rescue Object
|
||||
STDERR.puts "ERROR reading http body: #$!"
|
||||
$!.backtrace.join("\n")
|
||||
# any errors means we should delete the file, including if the file is dumped
|
||||
@socket.close unless @socket.closed?
|
||||
@body.delete if @body.class == Tempfile
|
||||
|
|
|
@ -136,8 +136,6 @@ module Mongrel
|
|||
ops[:environment] ||= "development"
|
||||
ops[:docroot] ||= "public"
|
||||
ops[:mime] ||= {}
|
||||
ops[:prefix] ||= "/"
|
||||
|
||||
|
||||
$orig_dollar_quote = $".clone
|
||||
ENV['RAILS_ENV'] = ops[:environment]
|
||||
|
@ -150,7 +148,7 @@ module Mongrel
|
|||
log "[RAILS] ActionController::Base.allow_concurrency is true. Wow, you're very brave."
|
||||
end
|
||||
|
||||
ActionController::AbstractRequest.relative_url_root = ops[:prefix]
|
||||
ActionController::AbstractRequest.relative_url_root = ops[:prefix] if ops[:prefix]
|
||||
|
||||
@rails_handler = RailsHandler.new(ops[:docroot], ops[:mime])
|
||||
end
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue