perf(h1): improve parsing and encoding of http1 messages

This commit is contained in:
Sean McArthur
2018-05-11 15:40:42 -07:00
parent c3c35e866c
commit 26417fc24a
13 changed files with 1006 additions and 442 deletions

View File

@@ -531,7 +531,7 @@ impl<T, B, R> Future for HandshakeInner<T, B, R>
where where
T: AsyncRead + AsyncWrite + Send + 'static, T: AsyncRead + AsyncWrite + Send + 'static,
B: Payload, B: Payload,
R: proto::Http1Transaction< R: proto::h1::Http1Transaction<
Incoming=StatusCode, Incoming=StatusCode,
Outgoing=proto::RequestLine, Outgoing=proto::RequestLine,
>, >,

View File

@@ -193,7 +193,6 @@ where C: Connect + Sync + 'static,
Version::HTTP_11 => (), Version::HTTP_11 => (),
other => { other => {
error!("Request has unsupported version \"{:?}\"", other); error!("Request has unsupported version \"{:?}\"", other);
//TODO: replace this with a proper variant
return ResponseFuture::new(Box::new(future::err(::Error::new_user_unsupported_version()))); return ResponseFuture::new(Box::new(future::err(::Error::new_user_unsupported_version())));
} }
} }

View File

@@ -26,8 +26,6 @@ pub(crate) enum Kind {
Parse(Parse), Parse(Parse),
/// A message reached EOF, but is not complete. /// A message reached EOF, but is not complete.
Incomplete, Incomplete,
/// A protocol upgrade was encountered, but not yet supported in hyper.
Upgrade,
/// A client connection received a response when not waiting for one. /// A client connection received a response when not waiting for one.
MismatchedResponse, MismatchedResponse,
/// A pending item was dropped before ever being processed. /// A pending item was dropped before ever being processed.
@@ -74,6 +72,9 @@ pub(crate) enum Parse {
Header, Header,
TooLarge, TooLarge,
Status, Status,
/// A protocol upgrade was encountered, but not yet supported in hyper.
UpgradeNotSupported,
} }
/* /*
@@ -141,10 +142,6 @@ impl Error {
Error::new(Kind::Canceled, cause.map(Into::into)) Error::new(Kind::Canceled, cause.map(Into::into))
} }
pub(crate) fn new_upgrade() -> Error {
Error::new(Kind::Upgrade, None)
}
pub(crate) fn new_incomplete() -> Error { pub(crate) fn new_incomplete() -> Error {
Error::new(Kind::Incomplete, None) Error::new(Kind::Incomplete, None)
} }
@@ -161,10 +158,6 @@ impl Error {
Error::new(Kind::Parse(Parse::Status), None) Error::new(Kind::Parse(Parse::Status), None)
} }
pub(crate) fn new_version() -> Error {
Error::new(Kind::Parse(Parse::Version), None)
}
pub(crate) fn new_version_h2() -> Error { pub(crate) fn new_version_h2() -> Error {
Error::new(Kind::Parse(Parse::VersionH2), None) Error::new(Kind::Parse(Parse::VersionH2), None)
} }
@@ -260,8 +253,8 @@ impl StdError for Error {
Kind::Parse(Parse::Header) => "invalid Header provided", Kind::Parse(Parse::Header) => "invalid Header provided",
Kind::Parse(Parse::TooLarge) => "message head is too large", Kind::Parse(Parse::TooLarge) => "message head is too large",
Kind::Parse(Parse::Status) => "invalid Status provided", Kind::Parse(Parse::Status) => "invalid Status provided",
Kind::Parse(Parse::UpgradeNotSupported) => "unsupported protocol upgrade",
Kind::Incomplete => "message is incomplete", Kind::Incomplete => "message is incomplete",
Kind::Upgrade => "unsupported protocol upgrade",
Kind::MismatchedResponse => "response received without matching request", Kind::MismatchedResponse => "response received without matching request",
Kind::Closed => "connection closed", Kind::Closed => "connection closed",
Kind::Connect => "an error occurred trying to connect", Kind::Connect => "an error occurred trying to connect",
@@ -325,8 +318,8 @@ impl From<http::status::InvalidStatusCode> for Parse {
} }
} }
impl From<http::uri::InvalidUriBytes> for Parse { impl From<http::uri::InvalidUri> for Parse {
fn from(_: http::uri::InvalidUriBytes) -> Parse { fn from(_: http::uri::InvalidUri) -> Parse {
Parse::Uri Parse::Uri
} }
} }

View File

@@ -2,45 +2,43 @@ use std::fmt::Write;
use bytes::BytesMut; use bytes::BytesMut;
use http::HeaderMap; use http::HeaderMap;
use http::header::{CONNECTION, CONTENT_LENGTH, EXPECT, TRANSFER_ENCODING}; use http::header::{CONTENT_LENGTH, TRANSFER_ENCODING};
use http::header::{HeaderValue, OccupiedEntry, ValueIter}; use http::header::{HeaderValue, OccupiedEntry, ValueIter};
/// Maximum number of bytes needed to serialize a u64 into ASCII decimal. /// Maximum number of bytes needed to serialize a u64 into ASCII decimal.
const MAX_DECIMAL_U64_BYTES: usize = 20; const MAX_DECIMAL_U64_BYTES: usize = 20;
pub fn connection_keep_alive(headers: &HeaderMap) -> bool { pub fn connection_keep_alive(value: &HeaderValue) -> bool {
for line in headers.get_all(CONNECTION) { connection_has(value, "keep-alive")
if let Ok(s) = line.to_str() { }
for val in s.split(',') {
if eq_ascii(val.trim(), "keep-alive") { pub fn connection_close(value: &HeaderValue) -> bool {
return true; connection_has(value, "close")
} }
fn connection_has(value: &HeaderValue, needle: &str) -> bool {
if let Ok(s) = value.to_str() {
for val in s.split(',') {
if eq_ascii(val.trim(), needle) {
return true;
} }
} }
} }
false false
} }
pub fn connection_close(headers: &HeaderMap) -> bool { pub fn content_length_parse(value: &HeaderValue) -> Option<u64> {
for line in headers.get_all(CONNECTION) { value
if let Ok(s) = line.to_str() { .to_str()
for val in s.split(',') { .ok()
if eq_ascii(val.trim(), "close") { .and_then(|s| s.parse().ok())
return true;
}
}
}
}
false
} }
pub fn content_length_parse(headers: &HeaderMap) -> Option<u64> { pub fn content_length_parse_all(headers: &HeaderMap) -> Option<u64> {
content_length_parse_all(headers.get_all(CONTENT_LENGTH).into_iter()) content_length_parse_all_values(headers.get_all(CONTENT_LENGTH).into_iter())
} }
pub fn content_length_parse_all(values: ValueIter<HeaderValue>) -> Option<u64> { pub fn content_length_parse_all_values(values: ValueIter<HeaderValue>) -> Option<u64> {
// If multiple Content-Length headers were sent, everything can still // If multiple Content-Length headers were sent, everything can still
// be alright if they all contain the same value, and all parse // be alright if they all contain the same value, and all parse
// correctly. If not, then it's an error. // correctly. If not, then it's an error.
@@ -70,10 +68,6 @@ pub fn content_length_parse_all(values: ValueIter<HeaderValue>) -> Option<u64> {
} }
} }
pub fn content_length_zero(headers: &mut HeaderMap) {
headers.insert(CONTENT_LENGTH, HeaderValue::from_static("0"));
}
pub fn content_length_value(len: u64) -> HeaderValue { pub fn content_length_value(len: u64) -> HeaderValue {
let mut len_buf = BytesMut::with_capacity(MAX_DECIMAL_U64_BYTES); let mut len_buf = BytesMut::with_capacity(MAX_DECIMAL_U64_BYTES);
write!(len_buf, "{}", len) write!(len_buf, "{}", len)
@@ -84,10 +78,6 @@ pub fn content_length_value(len: u64) -> HeaderValue {
} }
} }
pub fn expect_continue(headers: &HeaderMap) -> bool {
Some(&b"100-continue"[..]) == headers.get(EXPECT).map(|v| v.as_bytes())
}
pub fn transfer_encoding_is_chunked(headers: &HeaderMap) -> bool { pub fn transfer_encoding_is_chunked(headers: &HeaderMap) -> bool {
is_chunked(headers.get_all(TRANSFER_ENCODING).into_iter()) is_chunked(headers.get_all(TRANSFER_ENCODING).into_iter())
} }
@@ -95,10 +85,17 @@ pub fn transfer_encoding_is_chunked(headers: &HeaderMap) -> bool {
pub fn is_chunked(mut encodings: ValueIter<HeaderValue>) -> bool { pub fn is_chunked(mut encodings: ValueIter<HeaderValue>) -> bool {
// chunked must always be the last encoding, according to spec // chunked must always be the last encoding, according to spec
if let Some(line) = encodings.next_back() { if let Some(line) = encodings.next_back() {
if let Ok(s) = line.to_str() { return is_chunked_(line);
if let Some(encoding) = s.rsplit(',').next() { }
return eq_ascii(encoding.trim(), "chunked");
} false
}
pub fn is_chunked_(value: &HeaderValue) -> bool {
// chunked must always be the last encoding, according to spec
if let Ok(s) = value.to_str() {
if let Some(encoding) = s.rsplit(',').next() {
return eq_ascii(encoding.trim(), "chunked");
} }
} }

View File

@@ -1,6 +1,6 @@
#![doc(html_root_url = "https://docs.rs/hyper/0.11.22")] #![doc(html_root_url = "https://docs.rs/hyper/0.11.22")]
#![deny(missing_docs)] #![deny(missing_docs)]
#![deny(warnings)] //#![deny(warnings)]
#![deny(missing_debug_implementations)] #![deny(missing_debug_implementations)]
#![cfg_attr(all(test, feature = "nightly"), feature(test))] #![cfg_attr(all(test, feature = "nightly"), feature(test))]

View File

@@ -4,13 +4,13 @@ use std::marker::PhantomData;
use bytes::{Buf, Bytes}; use bytes::{Buf, Bytes};
use futures::{Async, Poll}; use futures::{Async, Poll};
use http::{Method, Version}; use http::{HeaderMap, Method, Version};
use tokio_io::{AsyncRead, AsyncWrite}; use tokio_io::{AsyncRead, AsyncWrite};
use ::Chunk; use ::Chunk;
use proto::{BodyLength, Decode, Http1Transaction, MessageHead}; use proto::{BodyLength, MessageHead};
use super::io::{Buffered}; use super::io::{Buffered};
use super::{EncodedBuf, Encoder, Decoder}; use super::{EncodedBuf, Encode, Encoder, Decode, Decoder, Http1Transaction, ParseContext};
const H2_PREFACE: &'static [u8] = b"PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n"; const H2_PREFACE: &'static [u8] = b"PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n";
@@ -36,6 +36,7 @@ where I: AsyncRead + AsyncWrite,
Conn { Conn {
io: Buffered::new(io), io: Buffered::new(io),
state: State { state: State {
cached_headers: None,
error: None, error: None,
keep_alive: KA::Busy, keep_alive: KA::Busy,
method: None, method: None,
@@ -118,8 +119,11 @@ where I: AsyncRead + AsyncWrite,
trace!("Conn::read_head"); trace!("Conn::read_head");
loop { loop {
let (version, head) = match self.io.parse::<T>() { let msg = match self.io.parse::<T>(ParseContext {
Ok(Async::Ready(head)) => (head.version, head), cached_headers: &mut self.state.cached_headers,
req_method: &mut self.state.method,
}) {
Ok(Async::Ready(msg)) => msg,
Ok(Async::NotReady) => return Ok(Async::NotReady), Ok(Async::NotReady) => return Ok(Async::NotReady),
Err(e) => { Err(e) => {
// If we are currently waiting on a message, then an empty // If we are currently waiting on a message, then an empty
@@ -141,48 +145,32 @@ where I: AsyncRead + AsyncWrite,
} }
}; };
match version { self.state.version = msg.head.version;
Version::HTTP_10 | let head = msg.head;
Version::HTTP_11 => {}, let decoder = match msg.decode {
_ => { Decode::Normal(d) => {
error!("unimplemented HTTP Version = {:?}", version);
self.state.close_read();
//TODO: replace this with a more descriptive error
return Err(::Error::new_version());
}
};
self.state.version = version;
let decoder = match T::decoder(&head, &mut self.state.method) {
Ok(Decode::Normal(d)) => {
d d
}, },
Ok(Decode::Final(d)) => { Decode::Final(d) => {
trace!("final decoder, HTTP ending"); trace!("final decoder, HTTP ending");
debug_assert!(d.is_eof()); debug_assert!(d.is_eof());
self.state.close_read(); self.state.close_read();
d d
}, },
Ok(Decode::Ignore) => { Decode::Ignore => {
// likely a 1xx message that we can ignore // likely a 1xx message that we can ignore
continue; continue;
} }
Err(e) => {
debug!("decoder error = {:?}", e);
self.state.close_read();
return self.on_parse_error(e)
.map(|()| Async::NotReady);
}
}; };
debug!("incoming body is {}", decoder); debug!("incoming body is {}", decoder);
self.state.busy(); self.state.busy();
if head.expecting_continue() { if msg.expect_continue {
let msg = b"HTTP/1.1 100 Continue\r\n\r\n"; let cont = b"HTTP/1.1 100 Continue\r\n\r\n";
self.io.write_buf_mut().extend_from_slice(msg); self.io.write_buf_mut().extend_from_slice(cont);
} }
let wants_keep_alive = head.should_keep_alive(); let wants_keep_alive = msg.keep_alive;
self.state.keep_alive &= wants_keep_alive; self.state.keep_alive &= wants_keep_alive;
let (body, reading) = if decoder.is_eof() { let (body, reading) = if decoder.is_eof() {
(false, Reading::KeepAlive) (false, Reading::KeepAlive)
@@ -410,8 +398,17 @@ where I: AsyncRead + AsyncWrite,
self.enforce_version(&mut head); self.enforce_version(&mut head);
let buf = self.io.write_buf_mut(); let buf = self.io.write_buf_mut();
self.state.writing = match T::encode(head, body, &mut self.state.method, self.state.title_case_headers, buf) { self.state.writing = match T::encode(Encode {
head: &mut head,
body,
keep_alive: self.state.wants_keep_alive(),
req_method: &mut self.state.method,
title_case_headers: self.state.title_case_headers,
}, buf) {
Ok(encoder) => { Ok(encoder) => {
debug_assert!(self.state.cached_headers.is_none());
debug_assert!(head.headers.is_empty());
self.state.cached_headers = Some(head.headers);
if !encoder.is_eof() { if !encoder.is_eof() {
Writing::Body(encoder) Writing::Body(encoder)
} else if encoder.is_last() { } else if encoder.is_last() {
@@ -430,24 +427,12 @@ where I: AsyncRead + AsyncWrite,
// If we know the remote speaks an older version, we try to fix up any messages // If we know the remote speaks an older version, we try to fix up any messages
// to work with our older peer. // to work with our older peer.
fn enforce_version(&mut self, head: &mut MessageHead<T::Outgoing>) { fn enforce_version(&mut self, head: &mut MessageHead<T::Outgoing>) {
//use header::Connection;
let wants_keep_alive = if self.state.wants_keep_alive() {
let ka = head.should_keep_alive();
self.state.keep_alive &= ka;
ka
} else {
false
};
match self.state.version { match self.state.version {
Version::HTTP_10 => { Version::HTTP_10 => {
// If the remote only knows HTTP/1.0, we should force ourselves // If the remote only knows HTTP/1.0, we should force ourselves
// to do only speak HTTP/1.0 as well. // to do only speak HTTP/1.0 as well.
head.version = Version::HTTP_10; head.version = Version::HTTP_10;
if wants_keep_alive {
//TODO: head.headers.set(Connection::keep_alive());
}
}, },
_ => { _ => {
// If the remote speaks HTTP/1.1, then it *should* be fine with // If the remote speaks HTTP/1.1, then it *should* be fine with
@@ -617,13 +602,27 @@ impl<I, B: Buf, T> fmt::Debug for Conn<I, B, T> {
} }
struct State { struct State {
/// Re-usable HeaderMap to reduce allocating new ones.
cached_headers: Option<HeaderMap>,
/// If an error occurs when there wasn't a direct way to return it
/// back to the user, this is set.
error: Option<::Error>, error: Option<::Error>,
/// Current keep-alive status.
keep_alive: KA, keep_alive: KA,
/// If mid-message, the HTTP Method that started it.
///
/// This is used to know things such as if the message can include
/// a body or not.
method: Option<Method>, method: Option<Method>,
title_case_headers: bool, title_case_headers: bool,
/// Set to true when the Dispatcher should poll read operations
/// again. See the `maybe_notify` method for more.
notify_read: bool, notify_read: bool,
/// State of allowed reads
reading: Reading, reading: Reading,
/// State of allowed writes
writing: Writing, writing: Writing,
/// Either HTTP/1.0 or 1.1 connection
version: Version, version: Version,
} }

View File

@@ -4,7 +4,8 @@ use http::{Request, Response, StatusCode};
use tokio_io::{AsyncRead, AsyncWrite}; use tokio_io::{AsyncRead, AsyncWrite};
use body::{Body, Payload}; use body::{Body, Payload};
use proto::{BodyLength, Conn, Http1Transaction, MessageHead, RequestHead, RequestLine, ResponseHead}; use proto::{BodyLength, Conn, MessageHead, RequestHead, RequestLine, ResponseHead};
use super::Http1Transaction;
use service::Service; use service::Service;
pub(crate) struct Dispatcher<D, Bs: Payload, I, T> { pub(crate) struct Dispatcher<D, Bs: Payload, I, T> {

View File

@@ -7,7 +7,7 @@ use iovec::IoVec;
use common::StaticBuf; use common::StaticBuf;
/// Encoders to handle different Transfer-Encodings. /// Encoders to handle different Transfer-Encodings.
#[derive(Debug, Clone)] #[derive(Debug, Clone, PartialEq)]
pub struct Encoder { pub struct Encoder {
kind: Kind, kind: Kind,
is_last: bool, is_last: bool,
@@ -70,8 +70,9 @@ impl Encoder {
} }
} }
pub fn set_last(&mut self) { pub fn set_last(mut self, is_last: bool) -> Self {
self.is_last = true; self.is_last = is_last;
self
} }
pub fn is_last(&self) -> bool { pub fn is_last(&self) -> bool {

View File

@@ -8,7 +8,7 @@ use futures::{Async, Poll};
use iovec::IoVec; use iovec::IoVec;
use tokio_io::{AsyncRead, AsyncWrite}; use tokio_io::{AsyncRead, AsyncWrite};
use proto::{Http1Transaction, MessageHead}; use super::{Http1Transaction, ParseContext, ParsedMessage};
/// The initial buffer size allocated before trying to read from IO. /// The initial buffer size allocated before trying to read from IO.
pub(crate) const INIT_BUFFER_SIZE: usize = 8192; pub(crate) const INIT_BUFFER_SIZE: usize = 8192;
@@ -126,12 +126,16 @@ where
} }
} }
pub(super) fn parse<S: Http1Transaction>(&mut self) -> Poll<MessageHead<S::Incoming>, ::Error> { pub(super) fn parse<S>(&mut self, ctx: ParseContext)
-> Poll<ParsedMessage<S::Incoming>, ::Error>
where
S: Http1Transaction,
{
loop { loop {
match try!(S::parse(&mut self.read_buf)) { match try!(S::parse(&mut self.read_buf, ParseContext { cached_headers: ctx.cached_headers, req_method: ctx.req_method, })) {
Some((head, len)) => { Some(msg) => {
debug!("parsed {} headers ({} bytes)", head.headers.len(), len); debug!("parsed {} headers", msg.head.headers.len());
return Ok(Async::Ready(head)) return Ok(Async::Ready(msg))
}, },
None => { None => {
if self.read_buf.capacity() >= self.max_buf_size { if self.read_buf.capacity() >= self.max_buf_size {
@@ -617,7 +621,11 @@ mod tests {
let mock = AsyncIo::new_buf(raw, raw.len()); let mock = AsyncIo::new_buf(raw, raw.len());
let mut buffered = Buffered::<_, Cursor<Vec<u8>>>::new(mock); let mut buffered = Buffered::<_, Cursor<Vec<u8>>>::new(mock);
assert_eq!(buffered.parse::<::proto::ClientTransaction>().unwrap(), Async::NotReady); let ctx = ParseContext {
cached_headers: &mut None,
req_method: &mut None,
};
assert!(buffered.parse::<::proto::ClientTransaction>(ctx).unwrap().is_not_ready());
assert!(buffered.io.blocked()); assert!(buffered.io.blocked());
} }

View File

@@ -1,3 +1,8 @@
use bytes::BytesMut;
use http::{HeaderMap, Method};
use proto::{MessageHead, BodyLength};
pub(crate) use self::conn::Conn; pub(crate) use self::conn::Conn;
pub(crate) use self::dispatch::Dispatcher; pub(crate) use self::dispatch::Dispatcher;
pub use self::decode::Decoder; pub use self::decode::Decoder;
@@ -11,5 +16,58 @@ mod decode;
pub(crate) mod dispatch; pub(crate) mod dispatch;
mod encode; mod encode;
mod io; mod io;
pub mod role; mod role;
pub(crate) type ServerTransaction = self::role::Server<self::role::YesUpgrades>;
//pub type ServerTransaction = self::role::Server<self::role::NoUpgrades>;
//pub type ServerUpgradeTransaction = self::role::Server<self::role::YesUpgrades>;
pub(crate) type ClientTransaction = self::role::Client<self::role::NoUpgrades>;
pub(crate) type ClientUpgradeTransaction = self::role::Client<self::role::YesUpgrades>;
pub(crate) trait Http1Transaction {
type Incoming;
type Outgoing: Default;
fn parse(bytes: &mut BytesMut, ctx: ParseContext) -> ParseResult<Self::Incoming>;
fn encode(enc: Encode<Self::Outgoing>, dst: &mut Vec<u8>) -> ::Result<Encoder>;
fn on_error(err: &::Error) -> Option<MessageHead<Self::Outgoing>>;
fn should_error_on_parse_eof() -> bool;
fn should_read_first() -> bool;
}
pub(crate) type ParseResult<T> = Result<Option<ParsedMessage<T>>, ::error::Parse>;
#[derive(Debug)]
pub(crate) struct ParsedMessage<T> {
head: MessageHead<T>,
decode: Decode,
expect_continue: bool,
keep_alive: bool,
}
pub(crate) struct ParseContext<'a> {
cached_headers: &'a mut Option<HeaderMap>,
req_method: &'a mut Option<Method>,
}
/// Passed to Http1Transaction::encode
pub(crate) struct Encode<'a, T: 'a> {
head: &'a mut MessageHead<T>,
body: Option<BodyLength>,
keep_alive: bool,
req_method: &'a mut Option<Method>,
title_case_headers: bool,
}
#[derive(Debug, PartialEq)]
pub enum Decode {
/// Decode normally.
Normal(Decoder),
/// After this decoder is done, HTTP is done.
Final(Decoder),
/// A header block that should be ignored, like unknown 1xx responses.
Ignore,
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,7 @@
//! Pieces pertaining to the HTTP message protocol. //! Pieces pertaining to the HTTP message protocol.
use bytes::BytesMut;
use http::{HeaderMap, Method, StatusCode, Uri, Version}; use http::{HeaderMap, Method, StatusCode, Uri, Version};
use headers; pub(crate) use self::h1::{dispatch, Conn, ClientTransaction, ClientUpgradeTransaction, ServerTransaction};
pub(crate) use self::h1::{dispatch, Conn};
pub(crate) mod h1; pub(crate) mod h1;
pub(crate) mod h2; pub(crate) mod h2;
@@ -30,6 +27,7 @@ pub struct RequestLine(pub Method, pub Uri);
/// An incoming response message. /// An incoming response message.
pub type ResponseHead = MessageHead<StatusCode>; pub type ResponseHead = MessageHead<StatusCode>;
/*
impl<S> MessageHead<S> { impl<S> MessageHead<S> {
pub fn should_keep_alive(&self) -> bool { pub fn should_keep_alive(&self) -> bool {
should_keep_alive(self.version, &self.headers) should_keep_alive(self.version, &self.headers)
@@ -55,33 +53,7 @@ pub fn should_keep_alive(version: Version, headers: &HeaderMap) -> bool {
pub fn expecting_continue(version: Version, headers: &HeaderMap) -> bool { pub fn expecting_continue(version: Version, headers: &HeaderMap) -> bool {
version == Version::HTTP_11 && headers::expect_continue(headers) version == Version::HTTP_11 && headers::expect_continue(headers)
} }
*/
pub(crate) type ServerTransaction = h1::role::Server<h1::role::YesUpgrades>;
//pub type ServerTransaction = h1::role::Server<h1::role::NoUpgrades>;
//pub type ServerUpgradeTransaction = h1::role::Server<h1::role::YesUpgrades>;
pub(crate) type ClientTransaction = h1::role::Client<h1::role::NoUpgrades>;
pub(crate) type ClientUpgradeTransaction = h1::role::Client<h1::role::YesUpgrades>;
pub(crate) trait Http1Transaction {
type Incoming;
type Outgoing: Default;
fn parse(bytes: &mut BytesMut) -> ParseResult<Self::Incoming>;
fn decoder(head: &MessageHead<Self::Incoming>, method: &mut Option<Method>) -> ::Result<Decode>;
fn encode(
head: MessageHead<Self::Outgoing>,
body: Option<BodyLength>,
method: &mut Option<Method>,
title_case_headers: bool,
dst: &mut Vec<u8>,
) -> ::Result<h1::Encoder>;
fn on_error(err: &::Error) -> Option<MessageHead<Self::Outgoing>>;
fn should_error_on_parse_eof() -> bool;
fn should_read_first() -> bool;
}
pub(crate) type ParseResult<T> = Result<Option<(MessageHead<T>, usize)>, ::error::Parse>;
#[derive(Debug)] #[derive(Debug)]
pub enum BodyLength { pub enum BodyLength {
@@ -91,17 +63,7 @@ pub enum BodyLength {
Unknown, Unknown,
} }
/*
#[derive(Debug)]
pub enum Decode {
/// Decode normally.
Normal(h1::Decoder),
/// After this decoder is done, HTTP is done.
Final(h1::Decoder),
/// A header block that should be ignored, like unknown 1xx responses.
Ignore,
}
#[test] #[test]
fn test_should_keep_alive() { fn test_should_keep_alive() {
let mut headers = HeaderMap::new(); let mut headers = HeaderMap::new();
@@ -129,3 +91,4 @@ fn test_expecting_continue() {
assert!(!expecting_continue(Version::HTTP_10, &headers)); assert!(!expecting_continue(Version::HTTP_10, &headers));
assert!(expecting_continue(Version::HTTP_11, &headers)); assert!(expecting_continue(Version::HTTP_11, &headers));
} }
*/

View File

@@ -181,6 +181,13 @@ mod response_body_lengths {
has_header(&body, "transfer-encoding:"), has_header(&body, "transfer-encoding:"),
"expects_chunked" "expects_chunked"
); );
assert_eq!(
case.expects_chunked,
has_header(&body, "chunked\r\n"),
"expects_chunked"
);
assert_eq!( assert_eq!(
case.expects_con_len, case.expects_con_len,
has_header(&body, "content-length:"), has_header(&body, "content-length:"),
@@ -200,7 +207,7 @@ mod response_body_lengths {
} }
#[test] #[test]
fn get_fixed_response_known() { fn fixed_response_known() {
run_test(TestCase { run_test(TestCase {
version: 1, version: 1,
headers: &[("content-length", "11")], headers: &[("content-length", "11")],
@@ -211,7 +218,7 @@ mod response_body_lengths {
} }
#[test] #[test]
fn get_fixed_response_unknown() { fn fixed_response_unknown() {
run_test(TestCase { run_test(TestCase {
version: 1, version: 1,
headers: &[("content-length", "11")], headers: &[("content-length", "11")],
@@ -222,7 +229,18 @@ mod response_body_lengths {
} }
#[test] #[test]
fn get_chunked_response_known() { fn fixed_response_known_empty() {
run_test(TestCase {
version: 1,
headers: &[("content-length", "0")],
body: Bd::Known(""),
expects_chunked: false,
expects_con_len: true,
});
}
#[test]
fn chunked_response_known() {
run_test(TestCase { run_test(TestCase {
version: 1, version: 1,
headers: &[("transfer-encoding", "chunked")], headers: &[("transfer-encoding", "chunked")],
@@ -234,7 +252,7 @@ mod response_body_lengths {
} }
#[test] #[test]
fn get_chunked_response_unknown() { fn chunked_response_unknown() {
run_test(TestCase { run_test(TestCase {
version: 1, version: 1,
headers: &[("transfer-encoding", "chunked")], headers: &[("transfer-encoding", "chunked")],
@@ -245,7 +263,22 @@ mod response_body_lengths {
} }
#[test] #[test]
fn get_chunked_response_trumps_length() { fn te_response_adds_chunked() {
run_test(TestCase {
version: 1,
headers: &[("transfer-encoding", "gzip")],
body: Bd::Unknown("foo bar baz"),
expects_chunked: true,
expects_con_len: false,
});
}
#[test]
#[ignore]
// This used to be the case, but providing this functionality got in the
// way of performance. It can probably be brought back later, and doing
// so should be backwards-compatible...
fn chunked_response_trumps_length() {
run_test(TestCase { run_test(TestCase {
version: 1, version: 1,
headers: &[ headers: &[
@@ -260,7 +293,7 @@ mod response_body_lengths {
} }
#[test] #[test]
fn get_auto_response_with_entity_unknown_length() { fn auto_response_with_unknown_length() {
run_test(TestCase { run_test(TestCase {
version: 1, version: 1,
// no headers means trying to guess from Payload // no headers means trying to guess from Payload
@@ -272,7 +305,7 @@ mod response_body_lengths {
} }
#[test] #[test]
fn get_auto_response_with_entity_known_length() { fn auto_response_with_known_length() {
run_test(TestCase { run_test(TestCase {
version: 1, version: 1,
// no headers means trying to guess from Payload // no headers means trying to guess from Payload
@@ -283,9 +316,20 @@ mod response_body_lengths {
}); });
} }
#[test]
fn auto_response_known_empty() {
run_test(TestCase {
version: 1,
// no headers means trying to guess from Payload
headers: &[],
body: Bd::Known(""),
expects_chunked: false,
expects_con_len: true,
});
}
#[test] #[test]
fn http_10_get_auto_response_with_entity_unknown_length() { fn http10_auto_response_with_unknown_length() {
run_test(TestCase { run_test(TestCase {
version: 0, version: 0,
// no headers means trying to guess from Payload // no headers means trying to guess from Payload
@@ -298,7 +342,7 @@ mod response_body_lengths {
#[test] #[test]
fn http_10_get_chunked_response() { fn http10_chunked_response() {
run_test(TestCase { run_test(TestCase {
version: 0, version: 0,
// http/1.0 should strip this header // http/1.0 should strip this header
@@ -620,6 +664,62 @@ fn disable_keep_alive() {
} }
} }
#[test]
fn header_connection_close() {
let foo_bar = b"foo bar baz";
let server = serve();
server.reply()
.header("content-length", foo_bar.len().to_string())
.header("connection", "close")
.body(foo_bar);
let mut req = connect(server.addr());
req.write_all(b"\
GET / HTTP/1.1\r\n\
Host: example.domain\r\n\
Connection: keep-alive\r\n\
\r\n\
").expect("writing 1");
let mut buf = [0; 1024 * 8];
loop {
let n = req.read(&mut buf[..]).expect("reading 1");
if n < buf.len() {
if &buf[n - foo_bar.len()..n] == foo_bar {
break;
} else {
}
}
}
// try again!
// but since the server responded with connection: close, the internal
// state should have noticed and shutdown
let quux = b"zar quux";
server.reply()
.header("content-length", quux.len().to_string())
.body(quux);
// the write can possibly succeed, since it fills the kernel buffer on the first write
let _ = req.write_all(b"\
GET /quux HTTP/1.1\r\n\
Host: example.domain\r\n\
Connection: close\r\n\
\r\n\
");
let mut buf = [0; 1024 * 8];
match req.read(&mut buf[..]) {
// Ok(0) means EOF, so a proper shutdown
// Err(_) could mean ConnReset or something, also fine
Ok(0) |
Err(_) => {}
Ok(n) => {
panic!("read {} bytes on a disabled keep-alive socket", n);
}
}
}
#[test] #[test]
fn expect_continue() { fn expect_continue() {
let server = serve(); let server = serve();