Skip to content

Commit

Permalink
pretty urls after start
Browse files Browse the repository at this point in the history
  • Loading branch information
mellowagain committed Oct 31, 2023
1 parent 277891a commit dd9d970
Show file tree
Hide file tree
Showing 7 changed files with 97 additions and 8 deletions.
10 changes: 10 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions am/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ serde_yaml = "0.9.21"
sha2 = "0.10.6"
tar = "0.4.38"
tempfile = "3.5.0"
termcolor = "1.3.0"
thiserror = "1.0.48"
tokio = { version = "1.28.1", features = ["full"] }
toml = "0.7.4"
Expand Down
6 changes: 6 additions & 0 deletions am/src/commands/proxy.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use crate::server::start_web_server;
use crate::terminal;
use anyhow::{bail, Context, Result};
use clap::Parser;
use directories::ProjectDirs;
use std::collections::HashMap;
use std::net::SocketAddr;
use tokio::select;
use tokio::sync::watch;
Expand Down Expand Up @@ -65,6 +67,7 @@ pub async fn handle_command(args: CliArguments) -> Result<()> {
.with_context(|| format!("Unable to create data directory: {:?}", local_data))?;

let (tx, _) = watch::channel(None);
let (urls_tx, urls_rx) = watch::channel(HashMap::new());

// Start web server for hosting the explorer, am api and proxies to the enabled services.
let web_server_task = async move {
Expand All @@ -75,10 +78,13 @@ pub async fn handle_command(args: CliArguments) -> Result<()> {
args.prometheus_url,
args.static_assets_url,
tx,
urls_tx,
)
.await
};

terminal::wait_and_print_urls(urls_rx);

select! {
biased;

Expand Down
7 changes: 6 additions & 1 deletion am/src/commands/start.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::dir::AutoCleanupDir;
use crate::downloader::{download_github_release, unpack, verify_checksum};
use crate::interactive;
use crate::server::start_web_server;
use crate::{interactive, terminal};
use anyhow::{anyhow, bail, Context, Result};
use autometrics_am::config::{endpoints_from_first_input, AmConfig};
use autometrics_am::parser::endpoint_parser;
Expand All @@ -13,6 +13,7 @@ use futures_util::FutureExt;
use indicatif::MultiProgress;
use once_cell::sync::Lazy;
use rand::distributions::{Alphanumeric, DistString};
use std::collections::HashMap;
use std::fs::File;
use std::io::{Seek, SeekFrom};
use std::net::SocketAddr;
Expand Down Expand Up @@ -281,6 +282,7 @@ pub async fn handle_command(args: CliArguments, config: AmConfig, mp: MultiProgr
}

let (tx, rx) = watch::channel(None);
let (tx_url, rx_url) = watch::channel(HashMap::new());

let static_assets_url = args.static_assets_url.clone();
// Start web server for hosting the explorer, am api and proxies to the enabled services.
Expand All @@ -292,6 +294,7 @@ pub async fn handle_command(args: CliArguments, config: AmConfig, mp: MultiProgr
None,
static_assets_url,
tx,
tx_url,
)
.await
};
Expand Down Expand Up @@ -384,6 +387,8 @@ pub async fn handle_command(args: CliArguments, config: AmConfig, mp: MultiProgr
info!("Now sampling the following endpoints for metrics: {endpoints}");
}

terminal::wait_and_print_urls(rx_url);

select! {
biased;

Expand Down
1 change: 1 addition & 0 deletions am/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ mod dir;
mod downloader;
mod interactive;
mod server;
mod terminal;

#[tokio::main]
async fn main() {
Expand Down
21 changes: 14 additions & 7 deletions am/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@ use axum::response::Redirect;
use axum::routing::{any, get};
use axum::{Router, Server};
use http::header::CONNECTION;
use std::collections::HashMap;
use std::net::SocketAddr;
use std::sync::Arc;
use tokio::sync::watch::Sender;
use tracing::{debug, info};
use tracing::debug;
use url::Url;

use crate::server::util::proxy_handler;
Expand All @@ -26,6 +27,7 @@ pub(crate) async fn start_web_server(
prometheus_proxy_url: Option<Url>,
static_assets_url: Url,
tx: Sender<Option<SocketAddr>>,
tx_url: Sender<HashMap<&'static str, String>>,
) -> Result<()> {
let is_proxying_prometheus = prometheus_proxy_url.is_some();
let should_enable_prometheus = enable_prometheus && !is_proxying_prometheus;
Expand Down Expand Up @@ -125,22 +127,27 @@ pub(crate) async fn start_web_server(

debug!("Web server listening on {}", server.local_addr());

info!("Explorer endpoint: http://{}", server.local_addr());
let mut urls = HashMap::from([("Explorer", format!("http://{}", server.local_addr()))]);

if should_enable_prometheus {
info!("Prometheus endpoint: http://127.0.0.1:9090/prometheus");
urls.insert("Prometheus", "http://127.0.0.1:9090/prometheus".to_string());
}

if is_proxying_prometheus {
info!("Proxying to prometheus: {}", prometheus_proxy_url.unwrap());
urls.insert(
"Prometheus Proxy Destination",
prometheus_proxy_url.unwrap().to_string(),
);
}

if enable_pushgateway {
info!("Pushgateway endpoint: http://127.0.0.1:9091/pushgateway");
urls.insert(
"Pushgateway",
"http://127.0.0.1:9091/pushgateway".to_string(),
);
}

// TODO: Add support for graceful shutdown
// server.with_graceful_shutdown(shutdown_signal()).await?;
tx_url.send_replace(urls);
server.await?;

Ok(())
Expand Down
59 changes: 59 additions & 0 deletions am/src/terminal.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
use anyhow::Result;
use itertools::Itertools;
use std::collections::HashMap;
use std::io::Write;
use std::time::Duration;
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
use tokio::sync::watch::Receiver;
use tracing::info;

pub(crate) fn wait_and_print_urls(mut rx: Receiver<HashMap<&'static str, String>>) {
tokio::spawn(async move {
// wait a second until all other log messages (invoked in belows `select!`) are printed
// Prometheus and Pushgateway usually dont take longer than a second to start so this should be good
tokio::time::sleep(Duration::from_secs(1)).await;

match rx.wait_for(|map| !map.is_empty()).await {
Ok(map) => {
let _ = print_urls(&map);
}
Err(err) => {
info!(?err, "failed to wait for urls");
}
}
});
}

pub(crate) fn print_urls(map: &HashMap<&str, String>) -> Result<()> {
let length = map
.iter()
.map(|(name, _)| name.len() + 5)
.max()
.unwrap_or(0);

let mut stdout = StandardStream::stdout(ColorChoice::Always);

stdout.set_color(ColorSpec::new().set_fg(Some(Color::Magenta)).set_bold(true))?;
write!(stdout, "\n am ")?;

stdout.set_color(
ColorSpec::new()
.set_fg(Some(Color::Magenta))
.set_bold(false),
)?;
write!(stdout, "v{}", env!("CARGO_PKG_VERSION"))?;

stdout.set_color(ColorSpec::new().set_fg(Some(Color::White)))?;
writeln!(stdout, " press ctrl + c to shutdown\n")?;

for (name, url) in map.iter().sorted_by(|(a, _), (b, _)| a.cmp(b)) {
stdout.set_color(ColorSpec::new().set_fg(Some(Color::White)).set_bold(true))?;
write!(stdout, " {:width$}", name, width = length)?;

stdout.set_color(ColorSpec::new().set_fg(Some(Color::White)).set_bold(false))?;
writeln!(stdout, " {}", url)?;
}

writeln!(stdout, "")?;

Check failure on line 57 in am/src/terminal.rs

View workflow job for this annotation

GitHub Actions / test

empty string literal in `writeln!`
Ok(())
}

0 comments on commit dd9d970

Please sign in to comment.