Skip to content

Commit

Permalink
Update backend dependencies (#1120)
Browse files Browse the repository at this point in the history
This finally updates all backend dependencies, including some big
breaking changes. The most time was required by the `hyper 1.0` update
and the update of all the crypto related libraries.

I don't think a normal code review is all too useful. At most a quick
look. But more importantly I think is testing several things. Ideally
this PR shouldn't be merged before we have checked all of this:

- [x] Opencast sync with HTTP
- [x] Opencast sync with HTTPS
- [x] `from_login_credentials = "opencast"` HTTP
- [x] `from_login_credentials = "opencast"` HTTPS
- [ ] `db.tls_mode = "on"`
- [x] `db.tls_mode = "without-verify-cert"`
- [x] `db.tls_mode = "off"`
- [x] The Tokio update bumps the effective required libc version. Check
if it still works on test deployment.
- [x] Auth callback with HTTP
- [x] Auth callback with HTTPS
- [x] Login callback with HTTP
- [x] Login callback with HTTPS
- [x] JWT still works (generation of key, generation of JWTs)
- [x] Tobira sessions still work
  • Loading branch information
owi92 authored Feb 23, 2024
2 parents 1d25386 + a6322b1 commit 97d28b2
Show file tree
Hide file tree
Showing 19 changed files with 757 additions and 620 deletions.
913 changes: 500 additions & 413 deletions backend/Cargo.lock

Large diffs are not rendered by default.

29 changes: 15 additions & 14 deletions backend/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,20 @@ bytes = "1"
chrono = { version = "0.4", default-features = false, features = ["serde", "std"] }
clap = { version = "4.2.2", features = ["derive", "string"] }
confique = { version = "0.2.0", default-features = false, features = ["toml"] }
cookie = "0.17.0"
cookie = "0.18.0"
dashmap = "5.5.3"
deadpool = { version = "0.9.0", default-features = false, features = ["managed", "rt_tokio_1"] }
deadpool-postgres = { version = "0.10", default-features = false, features = ["rt_tokio_1"] }
deadpool = { version = "0.10.0", default-features = false, features = ["managed", "rt_tokio_1"] }
deadpool-postgres = { version = "0.12.1", default-features = false, features = ["rt_tokio_1"] }
elliptic-curve = { version = "0.13.4", features = ["jwk", "sec1"] }
fallible-iterator = "0.2.0"
form_urlencoded = "1.1.0"
futures = { version = "0.3.1", default-features = false, features = ["std"] }
hex = "0.4.3"
hostname = "0.3"
hyper = { version = "0.14", features = ["client", "http1", "http2"] }
hyper-rustls = { version = "0.24.0", features = ["http2"] }
hyperlocal = { version = "0.8", default-features = false, features = ["server"] }
hyper = { version = "1", features = ["client", "http1", "http2"] }
http-body-util = "0.1"
hyper-rustls = { version = "0.26.0", features = ["http2"] }
hyper-util = { version = "0.1.3", features = ["client", "server", "http1", "http2"] }
isahc = { version = "1", features = ["static-ssl"] }
juniper = { version = "0.15.10", default-features = false, features = ["chrono", "schema-language"] }
libz-sys = { version = "1", features = ["static"] }
Expand All @@ -55,14 +56,14 @@ pem-rfc7468 = { version = "0.7.0", features = ["std"] }
percent-encoding = "2.1.0"
postgres-protocol = "0.6.6"
postgres-types = { version = "0.2.2", features = ["derive", "array-impls"] }
prometheus-client = "0.20.0"
prometheus-client = "0.22.1"
rand = "0.8.4"
regex = "1.7.1"
reinda = "0.2"
ring = "0.16"
rustls = { version = "0.21.0", features = ["dangerous_configuration"] }
rustls-native-certs = "0.6.2"
rustls-pemfile = "1.0.0"
ring = "0.17.8"
rustls = "0.22.2"
rustls-native-certs = "0.7.0"
rustls-pemfile = "2.1.0"
secrecy = { version = "0.8", features = ["serde"] }
serde = { version = "1.0.192", features = ["derive"] }
serde_json = "1"
Expand All @@ -71,13 +72,13 @@ static_assertions = "1"
tap = "1"
termcolor = "1.1.1"
time = "0.3"
tokio = { version = "=1.28", features = ["fs", "rt-multi-thread", "macros", "time"] }
tokio = { version = "1.36", features = ["fs", "rt-multi-thread", "macros", "time"] }
tokio-postgres = { version = "0.7", features = ["with-chrono-0_4", "with-serde_json-1"] }
tokio-postgres-rustls = "0.10.0"
tokio-postgres-rustls = "0.11.1"
url = "2.4.1"

[target.'cfg(target_os = "linux")'.dependencies]
procfs = "0.15.1"
procfs = "0.16.0"

[target.'cfg(not(target_env = "msvc"))'.dependencies]
tikv-jemallocator = "0.5"
Expand Down
3 changes: 1 addition & 2 deletions backend/src/auth/config.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use std::time::Duration;

use hyper::Uri;
use isahc::http::HeaderName;
use hyper::{http::HeaderName, Uri};
use secrecy::Secret;
use serde::{Deserialize, Deserializer, de::Error};

Expand Down
49 changes: 23 additions & 26 deletions backend/src/auth/handlers.rs
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
use std::unreachable;

use base64::Engine;
use hyper::{Body, StatusCode};
use hyper::{body::Incoming, StatusCode, Request};
use serde::Deserialize;

use crate::{
auth::config::LoginCredentialsHandler,
db,
http::{self, Context, Request, Response, response::bad_request},
http::{self, response::bad_request, Context, Response},
prelude::*,
config::OpencastConfig,
auth::config::LoginCredentialsHandler,
util::{download_body, ByteBody},
};
use super::{config::SessionEndpointHandler, AuthSource, SessionId, User};


/// Handles POST requests to `/~session`.
pub(crate) async fn handle_post_session(
req: Request<Body>,
req: Request<Incoming>,
ctx: &Context,
) -> Result<Response, Response> {
let user = match &ctx.config.auth.session.from_session_endpoint {
Expand All @@ -29,12 +29,7 @@ pub(crate) async fn handle_post_session(
User::from_auth_headers(&req.headers(), &ctx.config.auth)
}
SessionEndpointHandler::Callback(callback_url) => {
User::from_auth_callback(
&req.headers(),
&callback_url,
&ctx.config.auth,
&ctx.auth_caches,
).await?
User::from_auth_callback(&req.headers(), &callback_url, ctx).await?
}
};

Expand Down Expand Up @@ -70,7 +65,7 @@ pub(crate) async fn handle_post_session(
/// settings. That's the proper tool to remove sessions. Still:
///
/// TODO: maybe notify the user about these failures?
pub(crate) async fn handle_delete_session(req: Request<Body>, ctx: &Context) -> Response {
pub(crate) async fn handle_delete_session(req: Request<Incoming>, ctx: &Context) -> Response {
if ctx.config.auth.source != AuthSource::TobiraSession {
warn!("Got DELETE /~session request, but due to 'auth.source', this endpoint is disabled");
return http::response::not_found();
Expand All @@ -79,7 +74,7 @@ pub(crate) async fn handle_delete_session(req: Request<Body>, ctx: &Context) ->
let response = Response::builder()
.status(StatusCode::NO_CONTENT)
.header("set-cookie", SessionId::unset_cookie().to_string())
.body(Body::empty())
.body(ByteBody::empty())
.unwrap();


Expand Down Expand Up @@ -109,7 +104,7 @@ const USERID_FIELD: &str = "userid";
const PASSWORD_FIELD: &str = "password";

/// Handles `POST /~login` request.
pub(crate) async fn handle_post_login(req: Request<Body>, ctx: &Context) -> Response {
pub(crate) async fn handle_post_login(req: Request<Incoming>, ctx: &Context) -> Response {
if ctx.config.auth.session.from_login_credentials == LoginCredentialsHandler::None {
warn!("Got POST /~login request, but due to 'auth.mode', this endpoint is disabled.");
return http::response::not_found();
Expand All @@ -126,11 +121,11 @@ pub(crate) async fn handle_post_login(req: Request<Body>, ctx: &Context) -> Resp
}

// Download whole body.
let body = match hyper::body::to_bytes(req.into_body()).await {
let body = match download_body(req.into_body()).await {
Ok(v) => v,
Err(e) => {
error!("Failed to download login request body: {e}");
return bad_request(None);
return bad_request("");
},
};

Expand Down Expand Up @@ -158,7 +153,7 @@ pub(crate) async fn handle_post_login(req: Request<Body>, ctx: &Context) -> Resp
// Check the login data.
let user = match &ctx.config.auth.session.from_login_credentials {
LoginCredentialsHandler::Opencast => {
match check_opencast_login(&userid, &password, &ctx.config.opencast).await {
match check_opencast_login(&userid, &password, ctx).await {
Err(e) => {
error!("Error occured while checking Opencast login data: {e:#}");
return http::response::internal_server_error();
Expand All @@ -175,7 +170,7 @@ pub(crate) async fn handle_post_login(req: Request<Body>, ctx: &Context) -> Resp
*req.method_mut() = hyper::Method::POST;
*req.uri_mut() = callback_url.clone();

match User::from_callback_impl(req, &ctx.config.auth).await {
match User::from_callback_impl(req, ctx).await {
Err(e) => return e,
Ok(user) => user,
}
Expand All @@ -184,18 +179,20 @@ pub(crate) async fn handle_post_login(req: Request<Body>, ctx: &Context) -> Resp
};

match user {
None => Response::builder().status(StatusCode::FORBIDDEN).body(Body::empty()).unwrap(),
None => Response::builder()
.status(StatusCode::FORBIDDEN)
.body(ByteBody::empty())
.unwrap(),
Some(user) => create_session(user, ctx).await.unwrap_or_else(|e| e),
}
}

async fn check_opencast_login(
userid: &str,
password: &str,
config: &OpencastConfig,
ctx: &Context,
) -> Result<Option<User>> {
trace!("Checking Opencast login...");
let client = crate::util::http_client();

// Send request. We use basic auth here: our configuration checks already
// assert that we use HTTPS or Opencast is running on the same machine
Expand All @@ -204,11 +201,11 @@ async fn check_opencast_login(
.encode(&format!("{userid}:{password}"));
let auth_header = format!("Basic {}", credentials);
let req = Request::builder()
.uri(config.sync_node().clone().with_path_and_query("/info/me.json"))
.uri(ctx.config.opencast.sync_node().clone().with_path_and_query("/info/me.json"))
.header(hyper::header::AUTHORIZATION, auth_header)
.body(Body::empty())
.body(ByteBody::empty())
.unwrap();
let response = client.request(req).await?;
let response = ctx.http_client.request(req).await?;


// We treat all non-OK response as invalid login data.
Expand All @@ -233,7 +230,7 @@ async fn check_opencast_login(
email: Option<String>,
}

let body = hyper::body::to_bytes(response.into_body()).await?;
let body = download_body(response.into_body()).await?;
let mut info: InfoMeResponse = serde_json::from_slice(&body)
.context("Could not deserialize `/info/me.json` response")?;

Expand Down Expand Up @@ -274,7 +271,7 @@ async fn create_session(mut user: User, ctx: &Context) -> Result<Response, Respo
Response::builder()
.status(StatusCode::NO_CONTENT)
.header("set-cookie", session_id.set_cookie(ctx.config.auth.session.duration).to_string())
.body(Body::empty())
.body(ByteBody::empty())
.unwrap()
.pipe(Ok)
}
21 changes: 11 additions & 10 deletions backend/src/auth/jwt.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use bytes::Bytes;
use ring::{rand::{SecureRandom, SystemRandom}, signature::EcdsaKeyPair};
use serde::Serialize;
use serde_json::json;
Expand Down Expand Up @@ -64,7 +65,7 @@ impl JwtContext {
}

/// Returns the JWKS as string. This is served as public JSON document.
pub(crate) fn jwks(&self) -> &str {
pub(crate) fn jwks(&self) -> &Bytes {
&self.auth.jwks
}

Expand Down Expand Up @@ -120,12 +121,13 @@ impl JwtContext {

impl JwtConfig {
fn load_auth(&self) -> Result<JwtAuth> {
let rng = ring::rand::SystemRandom::new();
if let Some(secret_key_path) = &self.secret_key {
let pem_encoded = std::fs::read(secret_key_path)
.context("could not load secret key file")?;
let (_label, pkcs8_bytes) = pem_rfc7468::decode_vec(&pem_encoded)
.context("secret key file is not a valid PEM encoded key")?;
JwtAuth::load_es(self.signing_algorithm, &pkcs8_bytes)
JwtAuth::load_es(self.signing_algorithm, &pkcs8_bytes, &rng)
} else {
let ring_algo = match self.signing_algorithm {
Algorithm::ES256 => &ring::signature::ECDSA_P256_SHA256_FIXED_SIGNING,
Expand All @@ -136,11 +138,10 @@ impl JwtConfig {
"No JWT key specified, generating key for algorithm {}",
self.signing_algorithm.to_str(),
);
let rng = ring::rand::SystemRandom::new();
let pkcs8_bytes = ring::signature::EcdsaKeyPair::generate_pkcs8(ring_algo, &rng)
.context("failed to generate JWT ECDSA key")?;
.map_err(|_| anyhow!("failed to generate JWT ECDSA key"))?;

JwtAuth::load_es(self.signing_algorithm, pkcs8_bytes.as_ref())
JwtAuth::load_es(self.signing_algorithm, pkcs8_bytes.as_ref(), &rng)
}
}
}
Expand All @@ -149,20 +150,20 @@ impl JwtConfig {

struct JwtAuth {
signer: Box<dyn Signer>,
jwks: String,
jwks: Bytes,
}

impl JwtAuth {
/// Loads an elliptic curve key. `algo` has to be `ES256` or `ES384`!
fn load_es(algo: Algorithm, key: &[u8]) -> Result<JwtAuth> {
fn load_es(algo: Algorithm, key: &[u8], rng: &dyn SecureRandom) -> Result<JwtAuth> {
use elliptic_curve::pkcs8::DecodePrivateKey;

// Create a `ring` key pair that is used for signing.
let ring_algo = match algo {
Algorithm::ES256 => &ring::signature::ECDSA_P256_SHA256_FIXED_SIGNING,
Algorithm::ES384 => &ring::signature::ECDSA_P384_SHA384_FIXED_SIGNING,
};
let ring_key = EcdsaKeyPair::from_pkcs8(ring_algo, key).map_err(|e| {
let ring_key = EcdsaKeyPair::from_pkcs8(ring_algo, key, rng).map_err(|e| {
anyhow!("`jwt.secret_key` is not a valid ECDSA keypair for the expected \
algorithm in PKCS8 format: {e}")
})?;
Expand Down Expand Up @@ -191,7 +192,7 @@ impl JwtAuth {
}

/// Serializes the given `jwk` from `elliptic_curve` into the expected JWKS structure.
fn jwk_to_jwks(algo: Algorithm, jwk: impl Serialize) -> String {
fn jwk_to_jwks(algo: Algorithm, jwk: impl Serialize) -> Bytes {
#[derive(Serialize)]
struct Jwk<T: Serialize> {
#[serde(flatten)]
Expand All @@ -213,7 +214,7 @@ fn jwk_to_jwks(algo: Algorithm, jwk: impl Serialize) -> String {
alg: algo.to_str(),
}]
};
serde_json::to_string(&jwks).expect("failed to serialize JWKS")
serde_json::to_string(&jwks).expect("failed to serialize JWKS").into()
}

/// A signature algorithm with corresponding key. Can sign a message.
Expand Down
Loading

0 comments on commit 97d28b2

Please sign in to comment.