Compare commits
10 Commits
4a298c8b1e
...
ac173e1ca4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac173e1ca4 | ||
|
|
be3740f6ed | ||
|
|
16b90fe61c | ||
|
|
38ded8a8c3 | ||
|
|
35001306fb | ||
|
|
ef899b461e | ||
|
|
d916e58f20 | ||
|
|
edfd90b787 | ||
|
|
d5e6878703 | ||
|
|
f4b8eb502d |
20
Cargo.lock
generated
20
Cargo.lock
generated
@ -68,6 +68,15 @@ version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "diff"
|
||||
version = "0.1.13"
|
||||
@ -80,6 +89,16 @@ version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.0.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"miniz_oxide",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gimli"
|
||||
version = "0.27.3"
|
||||
@ -98,6 +117,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
"flate2",
|
||||
"itertools",
|
||||
"nom",
|
||||
"pretty_assertions",
|
||||
|
||||
@ -25,7 +25,7 @@ thiserror = "1.0.38" # error handling
|
||||
tokio = { version = "1.23.0", features = ["full"] } # async networking
|
||||
nom = "7.1.3" # parser combinators
|
||||
itertools = "0.11.0" # General iterator helpers
|
||||
flate2 = "1.0.30"
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1.3.0" # nicer looking assertions
|
||||
|
||||
|
||||
217
src/main.rs
217
src/main.rs
@ -1,32 +1,35 @@
|
||||
// #![feature(if_let_guard)]
|
||||
|
||||
use std::{ collections::HashMap, path::PathBuf, sync::Arc };
|
||||
use anyhow::Result;
|
||||
use std::{ collections::HashMap, io::Write, path::PathBuf, sync::Arc };
|
||||
use anyhow::{bail, Result};
|
||||
|
||||
use itertools::Itertools;
|
||||
use tokio::{
|
||||
fs::File,
|
||||
io::{ AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader },
|
||||
net::{ TcpListener, TcpStream },
|
||||
};
|
||||
|
||||
use flate2::{ write::GzEncoder, Compression };
|
||||
|
||||
mod utils;
|
||||
use utils::*;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Args {
|
||||
pub directory: PathBuf,
|
||||
pub directory: Option<PathBuf>,
|
||||
}
|
||||
|
||||
type A = Arc<Args>;
|
||||
|
||||
|
||||
fn parse_args () -> Args {
|
||||
let directory = std::env::args().position(|e| e == "--directory").unwrap() + 1;
|
||||
let directory = PathBuf::from(std::env::args().nth(directory).unwrap());
|
||||
let directory = std::env::args()
|
||||
.position(|e| e == "--directory")
|
||||
.and_then(|d| std::env::args().nth(d + 1))
|
||||
.map(|d| PathBuf::from(d));
|
||||
|
||||
Args {
|
||||
directory
|
||||
}
|
||||
Args { directory }
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
@ -47,35 +50,55 @@ async fn main() -> Result<()> {
|
||||
async fn process (mut stream: TcpStream, args: A) -> Result<()> {
|
||||
let buf_reader = BufReader::new(&mut stream);
|
||||
|
||||
let mut data = buf_reader
|
||||
.lines();
|
||||
let mut data = buf_reader.lines();
|
||||
|
||||
let (_method, path, _ver) = {
|
||||
let start_line = data.next_line().await?.ok_or(E::InvalidRequest)?; // should be 500;
|
||||
let mut parts = start_line.split_whitespace().map(ToOwned::to_owned);
|
||||
let method = parts.next().ok_or(E::InvalidRequest)?;
|
||||
let path = parts.next().ok_or(E::InvalidRequest)?;
|
||||
let ver = parts.next().ok_or(E::InvalidRequest)?;
|
||||
let (method, target, _version) = 'outer : {
|
||||
'inner : {
|
||||
let Ok(Some(start_line)) = data.next_line().await else { break 'inner };
|
||||
let mut parts = start_line.split_ascii_whitespace();
|
||||
let Some(Ok(method)) = parts.next().map(Method::try_from) else { break 'inner };
|
||||
let Some(path) = parts.next() else { break 'inner };
|
||||
let Some(version) = parts.next() else { break 'inner };
|
||||
break 'outer (method, path.to_owned(), version.to_owned());
|
||||
}
|
||||
// this is either the best or the worst piece of code i've written
|
||||
|
||||
(method, path, ver)
|
||||
let _ = stream.write_all(&Response::_400.build()).await;
|
||||
let _ = stream.flush();
|
||||
bail!(E::InvalidRequest)
|
||||
};
|
||||
|
||||
let headers = Headers::parse(data.into_inner()).await;
|
||||
|
||||
let response = match path.as_str() {
|
||||
"/" => Response::Empty,
|
||||
"/user-agent" => Response::TextPlain(headers.get("User-Agent").to_owned()),
|
||||
let mut data = data.into_inner();
|
||||
|
||||
let headers = Headers::parse(&mut data).await;
|
||||
let encoding = Encoding::parse(headers.get("Accept-Encoding"));
|
||||
|
||||
use Method as M;
|
||||
let response = match (method, target.as_str()) {
|
||||
(M::GET, "/") => Response::Empty,
|
||||
(M::GET, "/user-agent") => Response::TextPlain(headers.get("User-Agent").to_owned(), encoding),
|
||||
// p if let Some(echo) = p.strip_prefix("/echo/") => Response::TextPlain(echo), // a nicer way to do that, not available in stable yet
|
||||
p if p.starts_with("/echo/") => Response::TextPlain(p.trim_start_matches("/echo/").to_owned()),
|
||||
p if p.starts_with("/files/") => {
|
||||
let path = args.directory.join(p.trim_start_matches("/files/"));
|
||||
(M::GET, r) if r.starts_with("/echo/") => Response::TextPlain(r.trim_start_matches("/echo/").to_owned(), encoding),
|
||||
(M::GET, r) if r.starts_with("/files/") => 'file : {
|
||||
let Some(path) = &args.directory else { break 'file Response::_500; };
|
||||
let path = path.join(r.trim_start_matches("/files/"));
|
||||
let Ok(mut f) = File::open(path).await else { break 'file Response::_404; };
|
||||
|
||||
let mut buf = vec![];
|
||||
if let Ok(mut f) = File::open(path).await {
|
||||
let _ = f.read_to_end(&mut buf).await;
|
||||
Response::OctetStream(buf)
|
||||
} else {
|
||||
Response::_404
|
||||
}
|
||||
let _ = f.read_to_end(&mut buf).await;
|
||||
Response::OctetStream(buf)
|
||||
},
|
||||
(M::POST, r) if r.starts_with("/files") => 'file : {
|
||||
let length = headers.get("Content-Length").parse().unwrap();
|
||||
let body = parse_req_body(&mut data, length).await;
|
||||
|
||||
let Some(path) = &args.directory else { break 'file Response::_500; };
|
||||
let path = path.join(r.trim_start_matches("/files/"));
|
||||
let Ok(mut f) = File::create(path).await else { break 'file Response::_500; };
|
||||
let Ok(_) = f.write_all(&body).await else { break 'file Response::_500 };
|
||||
|
||||
Response::_201
|
||||
},
|
||||
_ => Response::_404,
|
||||
};
|
||||
@ -86,11 +109,17 @@ async fn process (mut stream: TcpStream, args: A) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn parse_req_body (reader: &mut BufReader<&mut TcpStream>, length: usize) -> Vec<u8> {
|
||||
let mut v = vec![0; length];
|
||||
let _ = reader.read_exact(&mut v).await;
|
||||
v
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Headers (HashMap<String, String>);
|
||||
|
||||
impl Headers {
|
||||
pub async fn parse (mut reader: BufReader<&'_ mut TcpStream>) -> Self {
|
||||
pub async fn parse (reader: &mut BufReader<&'_ mut TcpStream>) -> Self {
|
||||
let mut map = HashMap::new();
|
||||
let mut buf = String::new();
|
||||
while let Ok(_) = reader.read_line(&mut buf).await {
|
||||
@ -110,44 +139,136 @@ impl Headers {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum Response {
|
||||
_404,
|
||||
Empty,
|
||||
TextPlain (String),
|
||||
OctetStream (Vec<u8>)
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
enum Encoding {
|
||||
Gzip,
|
||||
Invalid,
|
||||
None,
|
||||
}
|
||||
|
||||
|
||||
impl Encoding {
|
||||
pub fn header (&self) -> &'static str {
|
||||
match self {
|
||||
Self::Gzip => "gzip",
|
||||
_ => d!()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse (s: &str) -> Vec<Self> {
|
||||
s.split(',').map(str::trim).map(From::from).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Encoding {
|
||||
fn from (s: &str) -> Self {
|
||||
match s.to_ascii_lowercase().as_str() {
|
||||
"" => Self::None,
|
||||
"gzip" => Self::Gzip,
|
||||
_ => Self::Invalid
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
enum Method {
|
||||
GET,
|
||||
POST,
|
||||
PUT,
|
||||
DELETE,
|
||||
UPDATE,
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Method {
|
||||
type Error = anyhow::Error;
|
||||
fn try_from (s: &str) -> Result<Self> {
|
||||
let mut s = s.to_string();
|
||||
s.make_ascii_lowercase();
|
||||
Ok(match s.as_str() {
|
||||
"get" => Self::GET,
|
||||
"post" => Self::POST,
|
||||
"put" => Self::PUT,
|
||||
"delete" => Self::DELETE,
|
||||
"update" => Self::UPDATE,
|
||||
_ => bail!(E::UnknownMethod)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum Response {
|
||||
_201,
|
||||
_400,
|
||||
_404,
|
||||
_500,
|
||||
Empty,
|
||||
TextPlain (String, Vec<Encoding>),
|
||||
OctetStream (Vec<u8>)
|
||||
}
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
impl Response {
|
||||
fn build (self) -> Vec<u8> {
|
||||
|
||||
let headers = self.headers().join("\r\n");
|
||||
let mut headers = self.headers();
|
||||
let code = self.code();
|
||||
|
||||
let code = match self {
|
||||
Self::_404 => "404 Not Found",
|
||||
_ => "200 OK",
|
||||
};
|
||||
let mut v: Vec<u8> = vec![];
|
||||
let mut write_response_header = |headers: Vec<String>| v.extend_from_slice(format!("HTTP/1.1 {code}\r\n{}\r\n\r\n", headers.join("\r\n")).as_bytes());
|
||||
|
||||
let mut v: Vec<u8> = f!("HTTP/1.1 {code}\r\n{headers}\r\n\r\n").into();
|
||||
match self {
|
||||
Self::OctetStream(bytes) => {
|
||||
write_response_header(headers);
|
||||
v.extend_from_slice(&bytes);
|
||||
},
|
||||
Self::TextPlain(text) => {
|
||||
v.extend_from_slice(text.as_bytes());
|
||||
Self::TextPlain(text, encodings) => {
|
||||
if encodings.contains(&Encoding::Gzip) {
|
||||
let mut enc = GzEncoder::new(vec![], Compression::default());
|
||||
enc.write_all(text.as_bytes()).unwrap();
|
||||
let b = enc.finish().unwrap();
|
||||
headers.push(format!("Content-Length: {}", b.len()));
|
||||
write_response_header(headers);
|
||||
v.extend_from_slice(&b);
|
||||
} else {
|
||||
write_response_header(headers);
|
||||
v.extend_from_slice(text.as_bytes());
|
||||
}
|
||||
|
||||
},
|
||||
_ => ()
|
||||
_ => write_response_header(headers)
|
||||
}
|
||||
|
||||
v
|
||||
}
|
||||
|
||||
fn code (&self) -> &'static str {
|
||||
match self {
|
||||
Self::_201 => "201 Created",
|
||||
Self::_400 => "400 Bad Request",
|
||||
Self::_404 => "404 Not Found",
|
||||
Self::_500 => "500 Internal Server Error",
|
||||
_ => "200 OK",
|
||||
}
|
||||
}
|
||||
|
||||
fn headers (&self) -> Vec<String> {
|
||||
match self {
|
||||
Self::TextPlain(text) => vec![f!("Content-Type: text/plain"), format!("Content-Length: {}", text.len())],
|
||||
Self::OctetStream(bytes) => vec![f!("Content-Type: application/octet-stream"), format!("Content-Length: {}", bytes.len())],
|
||||
Self::TextPlain(text, enc) => {
|
||||
let mut v = vec![
|
||||
f!("Content-Type: text/plain"),
|
||||
];
|
||||
if !enc.contains(&Encoding::Gzip) { v.push(format!("Content-Length: {}", text.len())) }
|
||||
let enc = enc.into_iter().map(Encoding::header).filter(|e| !e.is_empty()).join(", ");
|
||||
if !enc.is_empty() { v.push(f!("Content-Encoding: {enc}")) }
|
||||
|
||||
v
|
||||
},
|
||||
Self::OctetStream(bytes) => vec![
|
||||
f!("Content-Type: application/octet-stream"),
|
||||
format!("Content-Length: {}", bytes.len())
|
||||
],
|
||||
_ => d!()
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,4 +9,6 @@ macro_rules! f { ($s: expr) => { format!($s) }; }
|
||||
pub enum E {
|
||||
#[error("Invalid request data found during parsing")]
|
||||
InvalidRequest,
|
||||
#[error("Cannot parse method")]
|
||||
UnknownMethod,
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user