2021-01-27 12:52:38 +01:00
|
|
|
|
extern crate httparse;
|
|
|
|
|
extern crate netencode;
|
|
|
|
|
extern crate arglib_netencode;
|
2021-01-29 16:35:02 +01:00
|
|
|
|
extern crate ascii;
|
2021-01-31 16:38:21 +01:00
|
|
|
|
extern crate exec_helpers;
|
2021-01-27 12:52:38 +01:00
|
|
|
|
|
|
|
|
|
use std::os::unix::io::FromRawFd;
|
|
|
|
|
use std::io::Read;
|
|
|
|
|
use std::io::Write;
|
2021-02-06 22:33:39 +01:00
|
|
|
|
use std::collections::HashMap;
|
2021-01-31 16:38:21 +01:00
|
|
|
|
use exec_helpers::{die_user_error, die_expected_error, die_temporary};
|
2021-01-27 12:52:38 +01:00
|
|
|
|
|
|
|
|
|
use netencode::{U, T};
|
|
|
|
|
|
|
|
|
|
enum What {
|
|
|
|
|
Request,
|
|
|
|
|
Response
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-06 22:33:39 +01:00
|
|
|
|
// reads a http request (stdin), and writes all headers to stdout, as netencoded record.
|
|
|
|
|
// The keys are text, but can be lists of text iff headers appear multiple times, so beware.
|
2021-01-27 12:52:38 +01:00
|
|
|
|
fn main() -> std::io::Result<()> {
|
|
|
|
|
|
2021-02-08 03:23:53 +01:00
|
|
|
|
let what : What = match arglib_netencode::arglib_netencode("read-http", None) {
|
2021-01-27 12:52:38 +01:00
|
|
|
|
T::Record(rec) => match rec.get("what") {
|
|
|
|
|
Some(T::Text(t)) => match t.as_str() {
|
|
|
|
|
"request" => What::Request,
|
|
|
|
|
"response" => What::Response,
|
2021-02-08 03:23:53 +01:00
|
|
|
|
_ => die_user_error("read-http", "`what` should be either t:request or t:response"),
|
2021-01-27 12:52:38 +01:00
|
|
|
|
},
|
2021-02-08 03:23:53 +01:00
|
|
|
|
Some(o) => die_user_error("read-http", format!("expected a record of text, got {:#?}", o)),
|
2021-01-27 12:52:38 +01:00
|
|
|
|
None => {
|
|
|
|
|
eprintln!("read-http arglib: no `what` given, defaulting to Response");
|
|
|
|
|
What::Response
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-31 16:38:21 +01:00
|
|
|
|
o => die_user_error("read-http arglib", format!("expected a record, got {:#?}", o))
|
2021-01-27 12:52:38 +01:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
fn read_stdin_to_complete<F>(mut parse: F) -> ()
|
|
|
|
|
where F: FnMut(&[u8]) -> httparse::Result<usize>
|
|
|
|
|
{
|
|
|
|
|
let mut res = httparse::Status::Partial;
|
|
|
|
|
loop {
|
|
|
|
|
if let httparse::Status::Complete(_) = res {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
let mut buf = [0; 2048];
|
|
|
|
|
match std::io::stdin().read(&mut buf[..]) {
|
|
|
|
|
Ok(size) => if size == 0 {
|
|
|
|
|
break;
|
|
|
|
|
},
|
2021-01-31 16:38:21 +01:00
|
|
|
|
Err(err) => die_temporary("read-http", format!("could not read from stdin, {:?}", err))
|
2021-01-27 12:52:38 +01:00
|
|
|
|
}
|
|
|
|
|
match parse(&buf) {
|
|
|
|
|
Ok(status) => {
|
|
|
|
|
res = status;
|
|
|
|
|
}
|
2021-01-31 16:38:21 +01:00
|
|
|
|
Err(err) => die_temporary("read-http", format!("httparse parsing failed: {:#?}", err))
|
2021-01-27 12:52:38 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-06 22:33:39 +01:00
|
|
|
|
fn normalize_headers<'a>(headers: &'a [httparse::Header]) -> HashMap<String, U<'a>> {
|
|
|
|
|
let mut res = HashMap::new();
|
2021-01-27 12:52:38 +01:00
|
|
|
|
for httparse::Header { name, value } in headers {
|
2021-01-29 16:35:02 +01:00
|
|
|
|
let val = ascii::AsciiStr::from_ascii(*value)
|
2021-02-06 22:33:39 +01:00
|
|
|
|
.expect(&format!("read-http: we require header values to be ASCII, but the header {} was {:?}", name, value))
|
|
|
|
|
.as_str();
|
|
|
|
|
// lowercase the header names, since the standard doesn’t care
|
|
|
|
|
// and we want unique strings to match against
|
|
|
|
|
let name_lower = name.to_lowercase();
|
|
|
|
|
match res.insert(name_lower, U::Text(val)) {
|
|
|
|
|
None => (),
|
|
|
|
|
Some(U::Text(t)) => {
|
|
|
|
|
let name_lower = name.to_lowercase();
|
|
|
|
|
let _ = res.insert(name_lower, U::List(vec![U::Text(t), U::Text(val)]));
|
|
|
|
|
()
|
|
|
|
|
},
|
|
|
|
|
Some(U::List(mut l)) => {
|
|
|
|
|
let name_lower = name.to_lowercase();
|
|
|
|
|
l.push(U::Text(val));
|
|
|
|
|
let _ = res.insert(name_lower, U::List(l));
|
|
|
|
|
()
|
|
|
|
|
},
|
|
|
|
|
Some(o) => panic!("read-http: header not text nor list: {:?}", o),
|
|
|
|
|
}
|
2021-01-27 12:52:38 +01:00
|
|
|
|
}
|
|
|
|
|
res
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// tries to read until the end of the http header (deliniated by two newlines "\r\n\r\n")
|
|
|
|
|
fn read_till_end_of_header<R: Read>(buf: &mut Vec<u8>, reader: R) -> Option<()> {
|
2021-02-06 22:33:39 +01:00
|
|
|
|
let mut chonker = Chunkyboi::new(reader, 4096);
|
2021-01-27 12:52:38 +01:00
|
|
|
|
loop {
|
2021-02-06 22:33:39 +01:00
|
|
|
|
// TODO: attacker can send looooong input, set upper maximum
|
|
|
|
|
match chonker.next() {
|
2021-01-27 12:52:38 +01:00
|
|
|
|
Some(Ok(chunk)) => {
|
|
|
|
|
buf.extend_from_slice(&chunk);
|
|
|
|
|
if chunk.windows(4).any(|c| c == b"\r\n\r\n" ) {
|
|
|
|
|
return Some(());
|
|
|
|
|
}
|
|
|
|
|
},
|
2021-01-31 16:38:21 +01:00
|
|
|
|
Some(Err(err)) => die_temporary("read-http", format!("error reading from stdin: {:?}", err)),
|
2021-01-27 12:52:38 +01:00
|
|
|
|
None => return None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// max header size chosen arbitrarily
|
|
|
|
|
let mut headers = [httparse::EMPTY_HEADER; 128];
|
|
|
|
|
let stdin = std::io::stdin();
|
|
|
|
|
|
|
|
|
|
match what {
|
|
|
|
|
Request => {
|
|
|
|
|
let mut req = httparse::Request::new(&mut headers);
|
|
|
|
|
let mut buf: Vec<u8> = vec![];
|
|
|
|
|
match read_till_end_of_header(&mut buf, stdin.lock()) {
|
|
|
|
|
Some(()) => match req.parse(&buf) {
|
|
|
|
|
Ok(httparse::Status::Complete(_body_start)) => {},
|
2021-01-31 16:38:21 +01:00
|
|
|
|
Ok(httparse::Status::Partial) => die_expected_error("read-http", "httparse should have gotten a full header"),
|
|
|
|
|
Err(err) => die_expected_error("read-http", format!("httparse response parsing failed: {:#?}", err))
|
2021-01-27 12:52:38 +01:00
|
|
|
|
},
|
2021-01-31 16:38:21 +01:00
|
|
|
|
None => die_expected_error("read-http", format!("httparse end of stdin reached before able to parse request headers"))
|
2021-01-27 12:52:38 +01:00
|
|
|
|
}
|
|
|
|
|
let method = req.method.expect("method must be filled on complete parse");
|
|
|
|
|
let path = req.path.expect("path must be filled on complete parse");
|
2021-01-29 15:45:30 +01:00
|
|
|
|
write_dict_req(method, path, &normalize_headers(req.headers))
|
2021-01-27 12:52:38 +01:00
|
|
|
|
},
|
|
|
|
|
Response => {
|
|
|
|
|
let mut resp = httparse::Response::new(&mut headers);
|
|
|
|
|
let mut buf: Vec<u8> = vec![];
|
|
|
|
|
match read_till_end_of_header(&mut buf, stdin.lock()) {
|
|
|
|
|
Some(()) => match resp.parse(&buf) {
|
|
|
|
|
Ok(httparse::Status::Complete(_body_start)) => {},
|
2021-01-31 16:38:21 +01:00
|
|
|
|
Ok(httparse::Status::Partial) => die_expected_error("read-http", "httparse should have gotten a full header"),
|
|
|
|
|
Err(err) => die_expected_error("read-http", format!("httparse response parsing failed: {:#?}", err))
|
2021-01-27 12:52:38 +01:00
|
|
|
|
},
|
2021-01-31 16:38:21 +01:00
|
|
|
|
None => die_expected_error("read-http", format!("httparse end of stdin reached before able to parse response headers"))
|
2021-01-27 12:52:38 +01:00
|
|
|
|
}
|
|
|
|
|
let code = resp.code.expect("code must be filled on complete parse");
|
|
|
|
|
let reason = resp.reason.expect("reason must be filled on complete parse");
|
2021-01-29 15:45:30 +01:00
|
|
|
|
write_dict_resp(code, reason, &normalize_headers(resp.headers))
|
2021-01-27 12:52:38 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-06 22:33:39 +01:00
|
|
|
|
fn write_dict_req<'a, 'buf>(method: &'buf str, path: &'buf str, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> {
|
2021-01-27 12:52:38 +01:00
|
|
|
|
let mut http = vec![
|
2021-02-01 09:16:14 +01:00
|
|
|
|
("method", U::Text(method)),
|
|
|
|
|
("path", U::Text(path)),
|
2021-02-06 22:33:39 +01:00
|
|
|
|
].into_iter().collect();
|
2021-01-27 12:52:38 +01:00
|
|
|
|
write_dict(http, headers)
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-06 22:33:39 +01:00
|
|
|
|
fn write_dict_resp<'a, 'buf>(code: u16, reason: &'buf str, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> {
|
2021-01-27 12:52:38 +01:00
|
|
|
|
let mut http = vec![
|
|
|
|
|
("status", U::N6(code as u64)),
|
2021-02-01 09:16:14 +01:00
|
|
|
|
("status-text", U::Text(reason)),
|
2021-02-06 22:33:39 +01:00
|
|
|
|
].into_iter().collect();
|
2021-01-27 12:52:38 +01:00
|
|
|
|
write_dict(http, headers)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2021-02-06 22:33:39 +01:00
|
|
|
|
fn write_dict<'buf, 'a>(mut http: HashMap<&str, U<'a>>, headers: &'a HashMap<String, U<'a>>) -> std::io::Result<()> {
|
|
|
|
|
match http.insert("headers", U::Record(
|
|
|
|
|
headers.iter().map(|(k,v)| (k.as_str(), v.clone())).collect()
|
|
|
|
|
)) {
|
|
|
|
|
None => (),
|
|
|
|
|
Some(_) => panic!("read-http: headers already in dict"),
|
|
|
|
|
};
|
2021-01-27 12:52:38 +01:00
|
|
|
|
netencode::encode(
|
2021-01-29 15:45:30 +01:00
|
|
|
|
&mut std::io::stdout(),
|
2021-02-06 22:43:53 +01:00
|
|
|
|
&U::Record(http)
|
2021-01-27 12:52:38 +01:00
|
|
|
|
)?;
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// iter helper
|
|
|
|
|
|
|
|
|
|
struct Chunkyboi<T> {
|
|
|
|
|
inner: T,
|
|
|
|
|
buf: Vec<u8>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<R: Read> Chunkyboi<R> {
|
|
|
|
|
fn new(inner: R, chunksize: usize) -> Self {
|
|
|
|
|
let buf = vec![0; chunksize];
|
|
|
|
|
Chunkyboi {
|
|
|
|
|
inner,
|
|
|
|
|
buf
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<R: Read> Iterator for Chunkyboi<R> {
|
|
|
|
|
type Item = std::io::Result<Vec<u8>>;
|
|
|
|
|
|
|
|
|
|
fn next(&mut self) -> Option<std::io::Result<Vec<u8>>> {
|
|
|
|
|
match self.inner.read(&mut self.buf) {
|
|
|
|
|
Ok(0) => None,
|
|
|
|
|
Ok(read) => {
|
|
|
|
|
// clone a new buffer so we can reuse the internal one
|
|
|
|
|
Some(Ok(self.buf[..read].to_owned()))
|
|
|
|
|
}
|
|
|
|
|
Err(err) => Some(Err(err))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|