diff --git a/README.md b/README.md index 219bf7f265d61..445d8b90d96c9 100644 --- a/README.md +++ b/README.md @@ -261,7 +261,6 @@ it being synced automatically everyday. | WORKER_GROUP | default | The worker group the worker belongs to and get its configuration pulled from | Worker | | SERVER_BIND_ADDR | 0.0.0.0 | IP Address on which to bind listening socket | Server | | PORT | 8000 | Exposed port | Server | -| NUM_WORKERS | 1 | The number of worker per Worker instance (Set to 0 for API/Server instances, Set to 1 for normal workers, and > 1 for workers dedicated to native jobs) | Worker | | DISABLE_SERVER | false | Disable the external API, operate as a worker only instance | Worker | | METRICS_ADDR | None | (ee only) The socket addr at which to expose Prometheus metrics at the /metrics path. Set to "true" to expose it on port 8001 | All | | JSON_FMT | false | Output the logs in json format instead of logfmt | All | @@ -301,6 +300,7 @@ it being synced automatically everyday. | PIP_LOCAL_DEPENDENCIES | None | Specify dependencies that are installed locally and do not need to be solved nor installed again | | | ADDITIONAL_PYTHON_PATHS | None | Specify python paths (separated by a :) to be appended to the PYTHONPATH of the python jobs. To be used with PIP_LOCAL_DEPENDENCIES to use python codebases within Windmill | Worker | | INCLUDE_HEADERS | None | Whitelist of headers that are passed to jobs as args (separated by a comma) | Server | +| NUM_WORKERS | 1 | The number of worker per Worker instance (Set to 0 for API/Server instances, Set to 1 for normal workers, and > 1 for workers dedicated to native jobs) | Worker | | INSTANCE_EVENTS_WEBHOOK | None | Webhook to notify of events such as new user added, signup/invite. Can hook back to windmill to send emails | | GLOBAL_CACHE_INTERVAL | 10\*60 | (Enterprise Edition only) Interval in seconds in between bucket sync of the cache. This interval \* 2 is the time at which you're guaranteed all the worker's caches are synced together. | Worker | | WORKER_TAGS | 'deno,go,python3,bash,flow,hub,dependency' | The worker groups assigned to that workers | Worker | diff --git a/backend/src/main.rs b/backend/src/main.rs index 7a7024cb87e11..263bcfd13a058 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -198,11 +198,7 @@ Windmill Community Edition {GIT_VERSION} let mut rx = rx.resubscribe(); let base_internal_url = base_internal_url.to_string(); - let rd_delay = rand::thread_rng().gen_range(0..30); - tokio::spawn(async move { - //monitor_db is applied at start, no need to apply it twice - tokio::time::sleep(Duration::from_secs(rd_delay)).await; - + let h = tokio::spawn(async move { let mut listener = retry_listen_pg(&db).await; loop { @@ -222,16 +218,18 @@ Windmill Community Edition {GIT_VERSION} tracing::info!("Received new pg notification: {n:?}"); match n.channel() { "notify_config_change" => { - tracing::info!("Config change detected: {}", n.payload()); match n.payload() { "server" if server_mode => { + tracing::info!("Server config change detected: {}", n.payload()); + reload_server_config(&db).await; }, a@ _ if worker_mode && a == format!("worker__{}", *WORKER_GROUP) => { + tracing::info!("Worker config change detected: {}", n.payload()); reload_worker_config(&db, tx.clone(), true).await; }, _ => { - tracing::error!("config target neither a server or a worker"); + tracing::debug!("config changed but did not target this server/worker"); } } }, @@ -302,6 +300,9 @@ Windmill Community Edition {GIT_VERSION} } }); + if let Err(e) = h.await { + tracing::error!("Error waiting for monitor handle:{e}") + } Ok(()) as anyhow::Result<()> }; diff --git a/backend/src/monitor.rs b/backend/src/monitor.rs index 1206ca29fb381..668cf3ebbf792 100644 --- a/backend/src/monitor.rs +++ b/backend/src/monitor.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, fmt::Display, ops::Mul, str::FromStr, sync::Arc}; +use std::{collections::HashMap, fmt::Display, ops::Mul, str::FromStr, sync::Arc, time::Duration}; use once_cell::sync::OnceCell; use serde::de::DeserializeOwned; @@ -467,9 +467,26 @@ pub async fn reload_worker_config( let wc = WORKER_CONFIG.read().await; let config = config.unwrap(); if *wc != config { - if kill_if_change && (*wc).dedicated_worker != config.dedicated_worker { - tracing::info!("Dedicated worker config changed, sending killpill. Expecting to be restarted by supervisor."); - let _ = tx.send(()); + if kill_if_change { + if (*wc).dedicated_worker != config.dedicated_worker { + tracing::info!("Dedicated worker config changed, sending killpill. Expecting to be restarted by supervisor."); + let _ = tx.send(()); + } + + if (*wc).init_bash != config.init_bash { + tracing::info!("Init bash config changed, sending killpill. Expecting to be restarted by supervisor."); + let _ = tx.send(()); + } + + if (*wc).cache_clear != config.cache_clear { + tracing::info!("Cache clear changed, sending killpill. Expecting to be restarted by supervisor."); + let _ = tx.send(()); + tracing::info!("Waiting 5 seconds to allow others workers to start potential jobs that depend on a potential shared cache volume"); + tokio::time::sleep(Duration::from_secs(5)).await; + if let Err(e) = windmill_worker::common::clean_cache().await { + tracing::error!("Error cleaning the cache: {e}"); + } + } } drop(wc); diff --git a/backend/windmill-api/src/oauth2.rs b/backend/windmill-api/src/oauth2.rs index bba919fb5d51e..b1d2300d430d6 100644 --- a/backend/windmill-api/src/oauth2.rs +++ b/backend/windmill-api/src/oauth2.rs @@ -35,9 +35,9 @@ use tower_cookies::{Cookie, Cookies}; use windmill_audit::{audit_log, ActionKind}; use windmill_common::db::UserDB; use windmill_common::jobs::JobPayload; +use windmill_common::more_serde::maybe_number_opt; use windmill_common::users::username_to_permissioned_as; use windmill_common::utils::{not_found_if_none, now_from_db}; -use windmill_common::more_serde::maybe_number_opt; use crate::db::ApiAuthed; use crate::saml::SamlSsoLogin; @@ -659,7 +659,7 @@ pub async fn _refresh_token<'c>( async fn _exchange_token(client: OClient, refresh_token: &str) -> Result { let token_json = client - .exchange_refresh_token(&RefreshToken::from(refresh_token.clone())) + .exchange_refresh_token(&RefreshToken::from(refresh_token)) .with_client(&HTTP_CLIENT) .execute::() .await diff --git a/backend/windmill-common/src/db.rs b/backend/windmill-common/src/db.rs index 3e756e4061f61..30ae8b72cd710 100644 --- a/backend/windmill-common/src/db.rs +++ b/backend/windmill-common/src/db.rs @@ -106,7 +106,7 @@ impl UserDB { .await?; let (folders_write, folders_read): &(Vec<_>, Vec<_>) = - &authed.folders().clone().into_iter().partition(|x| x.1); + &authed.folders().into_iter().partition(|x| x.1); let mut folders_read = folders_read.clone(); folders_read.extend(folders_write.clone()); diff --git a/backend/windmill-common/src/worker.rs b/backend/windmill-common/src/worker.rs index 2c732ec2a8b4a..80ce2e3cacfa2 100644 --- a/backend/windmill-common/src/worker.rs +++ b/backend/windmill-common/src/worker.rs @@ -33,6 +33,10 @@ lazy_static::lazy_static! { pub static ref WORKER_CONFIG: Arc> = Arc::new(RwLock::new(WorkerConfig { worker_tags: Default::default(), dedicated_worker: Default::default(), + cache_clear: Default::default(), + init_bash: Default::default(), + additional_python_paths: Default::default(), + pip_local_dependencies: Default::default() })); pub static ref SERVER_CONFIG: Arc> = Arc::new(RwLock::new(ServerConfig { smtp: Default::default(), timeout_wait_result: 20 })); @@ -195,6 +199,26 @@ pub async fn load_worker_config(db: &DB) -> error::Result { }) .unwrap_or_else(|| DEFAULT_TAGS.clone()), dedicated_worker, + init_bash: config + .init_bash + .or_else(|| std::env::var("INIT_SCRIPT").ok()) + .and_then(|x| if x.is_empty() { None } else { Some(x) }), + cache_clear: config.cache_clear, + pip_local_dependencies: config.pip_local_dependencies.or_else(|| { + let pip_local_dependencies = std::env::var("PIP_LOCAL_DEPENDENCIES") + .ok() + .map(|x| x.split(',').map(|x| x.to_string()).collect()); + if pip_local_dependencies == Some(vec!["".to_string()]) { + None + } else { + pip_local_dependencies + } + }), + additional_python_paths: config.additional_python_paths.or_else(|| { + std::env::var("ADDITIONAL_PYTHON_PATHS") + .ok() + .map(|x| x.split(':').map(|x| x.to_string()).collect()) + }), }) } @@ -203,15 +227,27 @@ pub struct WorkspacedPath { pub workspace_id: String, pub path: String, } + #[derive(Serialize, Deserialize)] pub struct WorkerConfigOpt { pub worker_tags: Option>, pub dedicated_worker: Option, + pub init_bash: Option, + pub cache_clear: Option, + pub additional_python_paths: Option>, + pub pip_local_dependencies: Option>, } impl Default for WorkerConfigOpt { fn default() -> Self { - Self { worker_tags: Default::default(), dedicated_worker: Default::default() } + Self { + worker_tags: Default::default(), + dedicated_worker: Default::default(), + init_bash: Default::default(), + cache_clear: Default::default(), + additional_python_paths: Default::default(), + pip_local_dependencies: Default::default(), + } } } @@ -219,4 +255,8 @@ impl Default for WorkerConfigOpt { pub struct WorkerConfig { pub worker_tags: Vec, pub dedicated_worker: Option, + pub init_bash: Option, + pub cache_clear: Option, + pub additional_python_paths: Option>, + pub pip_local_dependencies: Option>, } diff --git a/backend/windmill-worker/src/common.rs b/backend/windmill-worker/src/common.rs index 1840f6079c7fc..3e13f101c81f9 100644 --- a/backend/windmill-worker/src/common.rs +++ b/backend/windmill-worker/src/common.rs @@ -40,7 +40,8 @@ use futures::{ }; use crate::{ - AuthedClient, MAX_RESULT_SIZE, MAX_WAIT_FOR_SIGTERM, TIMEOUT_DURATION, WHITELIST_ENVS, + AuthedClient, MAX_RESULT_SIZE, MAX_WAIT_FOR_SIGTERM, ROOT_CACHE_DIR, TIMEOUT_DURATION, + WHITELIST_ENVS, }; #[tracing::instrument(level = "trace", skip_all)] @@ -679,3 +680,10 @@ async fn append_logs(job_id: uuid::Uuid, logs: impl AsRef, db: impl Borrow< tracing::error!(%job_id, %err, "error updating logs for job {job_id}: {err}"); } } + +pub async fn clean_cache() -> error::Result<()> { + tracing::info!("Started cleaning cache"); + tokio::fs::remove_dir_all(ROOT_CACHE_DIR).await?; + tracing::info!("Finished cleaning cache"); + Ok(()) +} diff --git a/backend/windmill-worker/src/lib.rs b/backend/windmill-worker/src/lib.rs index 5325201ee4d48..cd0c1d07bd888 100644 --- a/backend/windmill-worker/src/lib.rs +++ b/backend/windmill-worker/src/lib.rs @@ -5,7 +5,7 @@ mod snowflake_executor; mod bash_executor; mod bun_executor; -mod common; +pub mod common; mod config; mod dedicated_worker; mod deno_executor; diff --git a/backend/windmill-worker/src/python_executor.rs b/backend/windmill-worker/src/python_executor.rs index dac0d68fd1f90..97ba432c12b48 100644 --- a/backend/windmill-worker/src/python_executor.rs +++ b/backend/windmill-worker/src/python_executor.rs @@ -13,6 +13,7 @@ use windmill_common::{ error::{self, Error}, jobs::QueuedJob, utils::calculate_hash, + worker::WORKER_CONFIG, }; lazy_static::lazy_static! { @@ -26,20 +27,7 @@ lazy_static::lazy_static! { static ref PIP_INDEX_URL: Option = std::env::var("PIP_INDEX_URL").ok(); static ref PIP_TRUSTED_HOST: Option = std::env::var("PIP_TRUSTED_HOST").ok(); - static ref PIP_LOCAL_DEPENDENCIES: Option> = { - let pip_local_dependencies = std::env::var("PIP_LOCAL_DEPENDENCIES") - .ok() - .map(|x| x.split(',').map(|x| x.to_string()).collect()); - if pip_local_dependencies == Some(vec!["".to_string()]) { - None - } else { - pip_local_dependencies - } - }; - static ref ADDITIONAL_PYTHON_PATHS: Option> = std::env::var("ADDITIONAL_PYTHON_PATHS") - .ok() - .map(|x| x.split(':').map(|x| x.to_string()).collect()); static ref RELATIVE_IMPORT_REGEX: Regex = Regex::new(r#"(import|from)\s(((u|f)\.)|\.)"#).unwrap(); @@ -84,7 +72,9 @@ pub async fn pip_compile( logs.push_str(&format!("\nresolving dependencies...")); set_logs(logs, job_id, db).await; logs.push_str(&format!("\ncontent of requirements:\n{}\n", requirements)); - let requirements = if let Some(pip_local_dependencies) = PIP_LOCAL_DEPENDENCIES.as_ref() { + let requirements = if let Some(pip_local_dependencies) = + WORKER_CONFIG.read().await.pip_local_dependencies.as_ref() + { let deps = pip_local_dependencies.clone(); requirements .lines() @@ -182,8 +172,13 @@ pub async fn handle_python_job( ) -> windmill_common::error::Result { create_dependencies_dir(job_dir).await; - let mut additional_python_paths: Vec = - ADDITIONAL_PYTHON_PATHS.to_owned().unwrap_or_else(|| vec![]); + let mut additional_python_paths: Vec = WORKER_CONFIG + .read() + .await + .additional_python_paths + .clone() + .unwrap_or_else(|| vec![]) + .clone(); let requirements = match requirements_o { Some(r) => r, diff --git a/backend/windmill-worker/src/worker.rs b/backend/windmill-worker/src/worker.rs index 8c3334e5ba98b..1157bf8cddfd7 100644 --- a/backend/windmill-worker/src/worker.rs +++ b/backend/windmill-worker/src/worker.rs @@ -35,7 +35,9 @@ use windmill_common::{ worker::{update_ping, CLOUD_HOSTED, WORKER_CONFIG}, DB, IS_READY, METRICS_ENABLED, }; -use windmill_queue::{canceled_job_to_result, get_queued_job, pull, HTTP_CLIENT}; +use windmill_queue::{ + canceled_job_to_result, get_queued_job, pull, push, PushIsolationLevel, HTTP_CLIENT, +}; use serde_json::{json, Value}; @@ -283,7 +285,6 @@ lazy_static::lazy_static! { pub static ref CAN_PULL: Arc> = Arc::new(RwLock::new(())); - } //only matter if CLOUD_HOSTED pub const MAX_RESULT_SIZE: usize = 1024 * 1024 * 2; // 2MB @@ -896,6 +897,13 @@ pub async fn run_worker( + db: &Pool, + same_worker_tx: Sender, + worker_name: &str, + rsmq: Option, +) -> error::Result<()> { + if let Some(content) = WORKER_CONFIG.read().await.init_bash.clone() { + let tx = PushIsolationLevel::IsolatedRoot(db.clone(), rsmq); + let (uuid, inner_tx) = push( + &db, + tx, + "admins", + windmill_common::jobs::JobPayload::Code(windmill_common::jobs::RawCode { + content: content.clone(), + path: Some(format!("init_script_{worker_name}")), + language: ScriptLang::Bash, + lock: None, + concurrent_limit: None, + concurrency_time_window_s: None, + cache_ttl: None, + }), + serde_json::Map::new(), + worker_name, + "worker@windmill.dev", + SUPERADMIN_SECRET_EMAIL.to_string(), + None, + None, + None, + None, + None, + false, + true, + None, + true, + None, + None, + None, + ) + .await?; + inner_tx.commit().await?; + same_worker_tx.send(uuid).await.map_err(to_anyhow)?; + tracing::info!("Creating initial job {uuid} from initial script script: {content}"); + } + Ok(()) +} + // async fn process_result( // client: AuthedClient, // job: QueuedJob, @@ -2283,7 +2337,7 @@ async fn lock_modules( db, worker_name, worker_dir, - job_path.clone(), + job_path, base_internal_url, token, ) @@ -2304,7 +2358,7 @@ async fn lock_modules( db, worker_name, worker_dir, - job_path.clone(), + job_path, base_internal_url, token, ) @@ -2324,7 +2378,7 @@ async fn lock_modules( db, worker_name, worker_dir, - job_path.clone(), + job_path, base_internal_url, token, ) @@ -2339,7 +2393,7 @@ async fn lock_modules( db, worker_name, worker_dir, - job_path.clone(), + job_path, base_internal_url, token, ) diff --git a/backend/windmill-worker/src/worker_flow.rs b/backend/windmill-worker/src/worker_flow.rs index 7f3528006043a..ab04fc90e84f0 100644 --- a/backend/windmill-worker/src/worker_flow.rs +++ b/backend/windmill-worker/src/worker_flow.rs @@ -757,7 +757,7 @@ async fn compute_bool_from_expr( "resume".to_string(), resumes.0.last().map(|v| json!(v)).unwrap_or_default(), )); - env.push(("resumes".to_string(), resumes.0.clone().into())); + env.push(("resumes".to_string(), resumes.0.into())); env.push(("approvers".to_string(), json!(resumes.1.clone()))); } @@ -868,7 +868,7 @@ async fn transform_input( "resume".to_string(), resumes.last().map(|v| json!(v)).unwrap_or_default(), ), - ("resumes".to_string(), resumes.clone().into()), + ("resumes".to_string(), resumes.into()), ("approvers".to_string(), json!(approvers.clone())), ]; @@ -1854,7 +1854,7 @@ async fn compute_next_flow_transform( "resume".to_string(), resumes.last().map(|v| json!(v)).unwrap_or_default(), ), - ("resumes".to_string(), resumes.clone().into()), + ("resumes".to_string(), resumes.into()), ("approvers".to_string(), json!(approvers.clone())), ] }, diff --git a/frontend/src/lib/components/Editor.svelte b/frontend/src/lib/components/Editor.svelte index 0e775b72a2097..fcb76b41fe8bf 100644 --- a/frontend/src/lib/components/Editor.svelte +++ b/frontend/src/lib/components/Editor.svelte @@ -83,6 +83,7 @@ export let awareness: any | undefined = undefined export let folding = false export let args: Record | undefined = undefined + export let useWebsockets: boolean = true languages.typescript.typescriptDefaults.setModeConfiguration({ completionItems: false, @@ -629,220 +630,222 @@ const hostname = BROWSER ? window.location.protocol + '//' + window.location.host : 'SSR' let encodedImportMap = '' - if (lang == 'typescript' && deno) { - if (filePath && filePath.split('/').length > 2) { - let expiration = new Date() - expiration.setHours(expiration.getHours() + 2) - const token = await UserService.createToken({ - requestBody: { label: 'Ephemeral lsp token', expiration: expiration.toISOString() } - }) - let root = hostname + '/api/scripts_u/tokened_raw/' + $workspaceStore + '/' + token - const importMap = { - imports: { - 'file:///': root + '/' - } - } - let path_splitted = filePath.split('/') - for (let c = 0; c < path_splitted.length; c++) { - let key = 'file://./' - for (let i = 0; i < c; i++) { - key += '../' - } - let url = path_splitted.slice(0, -c - 1).join('/') - let ending = c == path_splitted.length - 1 ? '' : '/' - importMap['imports'][key] = `${root}/${url}${ending}` - } - encodedImportMap = 'data:text/plain;base64,' + btoa(JSON.stringify(importMap)) - } - await connectToLanguageServer( - `${wsProtocol}://${window.location.host}/ws/deno`, - 'deno', - { - certificateStores: null, - enablePaths: [], - config: null, - importMap: encodedImportMap, - internalDebug: false, - lint: false, - path: null, - tlsCertificate: null, - unsafelyIgnoreCertificateErrors: null, - unstable: true, - enable: true, - codeLens: { - implementations: true, - references: true, - referencesAllFunction: false - }, - suggest: { - autoImports: true, - completeFunctionCalls: false, - names: true, - paths: true, + if (useWebsockets) { + if (lang == 'typescript' && deno) { + if (filePath && filePath.split('/').length > 2) { + let expiration = new Date() + expiration.setHours(expiration.getHours() + 2) + const token = await UserService.createToken({ + requestBody: { label: 'Ephemeral lsp token', expiration: expiration.toISOString() } + }) + let root = hostname + '/api/scripts_u/tokened_raw/' + $workspaceStore + '/' + token + const importMap = { imports: { - autoDiscover: true, - hosts: { - 'https://deno.land': true - } + 'file:///': root + '/' } } - }, - () => { - return [ - { - enable: true + let path_splitted = filePath.split('/') + for (let c = 0; c < path_splitted.length; c++) { + let key = 'file://./' + for (let i = 0; i < c; i++) { + key += '../' } - ] + let url = path_splitted.slice(0, -c - 1).join('/') + let ending = c == path_splitted.length - 1 ? '' : '/' + importMap['imports'][key] = `${root}/${url}${ending}` + } + encodedImportMap = 'data:text/plain;base64,' + btoa(JSON.stringify(importMap)) } - ) - } else if (lang === 'typescript' && !deno) { - await connectToLanguageServer( - `${wsProtocol}://${window.location.host}/ws/bun`, - 'bun', - {}, - (params, token, next) => { - return [ - { - diagnostics: { - ignoredCodes: [2307] - }, - enable: true + await connectToLanguageServer( + `${wsProtocol}://${window.location.host}/ws/deno`, + 'deno', + { + certificateStores: null, + enablePaths: [], + config: null, + importMap: encodedImportMap, + internalDebug: false, + lint: false, + path: null, + tlsCertificate: null, + unsafelyIgnoreCertificateErrors: null, + unstable: true, + enable: true, + codeLens: { + implementations: true, + references: true, + referencesAllFunction: false + }, + suggest: { + autoImports: true, + completeFunctionCalls: false, + names: true, + paths: true, + imports: { + autoDiscover: true, + hosts: { + 'https://deno.land': true + } + } } - ] - } - ) - } else if (lang === 'python') { - await connectToLanguageServer( - `${wsProtocol}://${window.location.host}/ws/pyright`, - 'pyright', - {}, - (params, token, next) => { - if (params.items.find((x) => x.section === 'python')) { + }, + () => { return [ { - analysis: { - useLibraryCodeForTypes: true, - autoImportCompletions: true, - diagnosticSeverityOverrides: { reportMissingImports: 'none' }, - typeCheckingMode: 'basic' - } + enable: true } ] } - if (params.items.find((x) => x.section === 'python.analysis')) { + ) + } else if (lang === 'typescript' && !deno) { + await connectToLanguageServer( + `${wsProtocol}://${window.location.host}/ws/bun`, + 'bun', + {}, + (params, token, next) => { return [ { - useLibraryCodeForTypes: true, - autoImportCompletions: true, - diagnosticSeverityOverrides: { reportMissingImports: 'none' }, - typeCheckingMode: 'basic' + diagnostics: { + ignoredCodes: [2307] + }, + enable: true } ] } - return next(params, token) - } - ) - - connectToLanguageServer( - `${wsProtocol}://${window.location.host}/ws/ruff`, - 'ruff', - {}, - undefined - ) - connectToLanguageServer( - `${wsProtocol}://${window.location.host}/ws/diagnostic`, - 'black', - { - formatters: { - black: { - command: 'black', - args: ['--quiet', '-'] + ) + } else if (lang === 'python') { + await connectToLanguageServer( + `${wsProtocol}://${window.location.host}/ws/pyright`, + 'pyright', + {}, + (params, token, next) => { + if (params.items.find((x) => x.section === 'python')) { + return [ + { + analysis: { + useLibraryCodeForTypes: true, + autoImportCompletions: true, + diagnosticSeverityOverrides: { reportMissingImports: 'none' }, + typeCheckingMode: 'basic' + } + } + ] } - }, - formatFiletypes: { - python: 'black' - } - }, - undefined - ) - } else if (lang === 'go') { - connectToLanguageServer( - `${wsProtocol}://${window.location.host}/ws/go`, - 'go', - { - 'build.allowImplicitNetworkAccess': true - }, - undefined - ) - } else if (lang === 'shell') { - connectToLanguageServer( - `${wsProtocol}://${window.location.host}/ws/diagnostic`, - 'shellcheck', - { - linters: { - shellcheck: { - command: 'shellcheck', - debounce: 100, - args: ['--format=gcc', '-'], - offsetLine: 0, - offsetColumn: 0, - sourceName: 'shellcheck', - formatLines: 1, - formatPattern: [ - '^[^:]+:(\\d+):(\\d+):\\s+([^:]+):\\s+(.*)$', + if (params.items.find((x) => x.section === 'python.analysis')) { + return [ { - line: 1, - column: 2, - message: 4, - security: 3 + useLibraryCodeForTypes: true, + autoImportCompletions: true, + diagnosticSeverityOverrides: { reportMissingImports: 'none' }, + typeCheckingMode: 'basic' } - ], - securities: { - error: 'error', - warning: 'warning', - note: 'info' + ] + } + return next(params, token) + } + ) + + connectToLanguageServer( + `${wsProtocol}://${window.location.host}/ws/ruff`, + 'ruff', + {}, + undefined + ) + connectToLanguageServer( + `${wsProtocol}://${window.location.host}/ws/diagnostic`, + 'black', + { + formatters: { + black: { + command: 'black', + args: ['--quiet', '-'] } + }, + formatFiletypes: { + python: 'black' } }, - filetypes: { - shell: 'shellcheck' - } - }, - undefined - ) - } else { - closeWebsockets() - } + undefined + ) + } else if (lang === 'go') { + connectToLanguageServer( + `${wsProtocol}://${window.location.host}/ws/go`, + 'go', + { + 'build.allowImplicitNetworkAccess': true + }, + undefined + ) + } else if (lang === 'shell') { + connectToLanguageServer( + `${wsProtocol}://${window.location.host}/ws/diagnostic`, + 'shellcheck', + { + linters: { + shellcheck: { + command: 'shellcheck', + debounce: 100, + args: ['--format=gcc', '-'], + offsetLine: 0, + offsetColumn: 0, + sourceName: 'shellcheck', + formatLines: 1, + formatPattern: [ + '^[^:]+:(\\d+):(\\d+):\\s+([^:]+):\\s+(.*)$', + { + line: 1, + column: 2, + message: 4, + security: 3 + } + ], + securities: { + error: 'error', + warning: 'warning', + note: 'info' + } + } + }, + filetypes: { + shell: 'shellcheck' + } + }, + undefined + ) + } else { + closeWebsockets() + } - websocketInterval && clearInterval(websocketInterval) - websocketInterval = setInterval(() => { - if (document.visibilityState == 'visible') { - if ( - !lastWsAttempt || - (new Date().getTime() - lastWsAttempt.getTime() > 60000 && nbWsAttempt < 2) - ) { + websocketInterval && clearInterval(websocketInterval) + websocketInterval = setInterval(() => { + if (document.visibilityState == 'visible') { if ( - !websocketAlive.black && - !websocketAlive.deno && - !websocketAlive.pyright && - !websocketAlive.go && - !websocketAlive.bun && - !websocketAlive.shellcheck && - !websocketAlive.ruff + !lastWsAttempt || + (new Date().getTime() - lastWsAttempt.getTime() > 60000 && nbWsAttempt < 2) ) { - console.log('reconnecting to language servers') - lastWsAttempt = new Date() - nbWsAttempt++ - reloadWebsocket() - } else { - if (nbWsAttempt >= 2) { - sendUserToast('Giving up on establishing smart assistant connection', true) - clearInterval(websocketInterval) + if ( + !websocketAlive.black && + !websocketAlive.deno && + !websocketAlive.pyright && + !websocketAlive.go && + !websocketAlive.bun && + !websocketAlive.shellcheck && + !websocketAlive.ruff + ) { + console.log('reconnecting to language servers') + lastWsAttempt = new Date() + nbWsAttempt++ + reloadWebsocket() + } else { + if (nbWsAttempt >= 2) { + sendUserToast('Giving up on establishing smart assistant connection', true) + clearInterval(websocketInterval) + } } } } - } - }, 5000) + }, 5000) + } } let pathTimeout: NodeJS.Timeout | undefined = undefined diff --git a/frontend/src/lib/components/ScheduleEditor.svelte b/frontend/src/lib/components/ScheduleEditor.svelte index 8ab59334c3567..c58b582f086a4 100644 --- a/frontend/src/lib/components/ScheduleEditor.svelte +++ b/frontend/src/lib/components/ScheduleEditor.svelte @@ -374,7 +374,7 @@

Error Handler
-
The following args will be passed to the error handler:
  • path: The path of the script or flow that failed.
  • @@ -468,7 +468,7 @@ >(ee only){/if}
    The following args will be passed to the recovery handler:
    • path: The path of the script or flow that recovered.
    • diff --git a/frontend/src/lib/components/Section.svelte b/frontend/src/lib/components/Section.svelte index 3ee5615fa03c3..19002f67d0ac3 100644 --- a/frontend/src/lib/components/Section.svelte +++ b/frontend/src/lib/components/Section.svelte @@ -1,5 +1,8 @@
      @@ -7,6 +10,9 @@

      {label} + {#if tooltip} + {tooltip} + {/if}

      diff --git a/frontend/src/lib/components/WorkspaceGroup.svelte b/frontend/src/lib/components/WorkspaceGroup.svelte index 4d5f9bfb78e59..fc8efd0752c44 100644 --- a/frontend/src/lib/components/WorkspaceGroup.svelte +++ b/frontend/src/lib/components/WorkspaceGroup.svelte @@ -1,6 +1,6 @@ { - open = false + openDelete = false }} on:confirmed={async () => { deleteWorkerGroup() - open = false + openDelete = false }} >
      @@ -73,52 +81,73 @@
      -

      {name}

      - {#if $superadmin} - { + openClean = false + }} + on:confirmed={async () => { + const ndate = Math.floor(Date.now() / 1000) + const withCacheConfig = { ...nconfig, cache_clear: ndate } + await ConfigService.updateConfig({ + name: 'worker__' + name, + requestBody: withCacheConfig + }) + if (config) { + config.cache_clear = ndate + } + sendUserToast('Worker caches clearing in 5s. Require a restart.') + dispatch('reload') + openClean = false + }} +> +
      + Are you sure you want to clean the cache of all workers of this worker group (will also + restart the workers and expect supervisor to restart them) ? - - - - { - dirty = true - if (nconfig == undefined) { - nconfig = {} - } - console.log(e.detail) - if (e.detail == 'dedicated') { - nconfig.dedicated_worker = '' - nconfig.worker_tags = undefined - } else { - nconfig.dedicated_worker = undefined - nconfig.worker_tags = [] - } - }} - class="mb-4" - > - - - - {#if selected == 'normal'} +
      + + + + drawer.closeDrawer()} title="Edit worker config '{name}'"> + {#if !$enterpriseLicense} + + Workers can still have their WORKER_TAGS passed as env. Dedicated workers are an enterprise + only feature. + +
      + {/if} + + { + dirty = true + if (nconfig == undefined) { + nconfig = {} + } + if (e.detail == 'dedicated') { + nconfig.dedicated_worker = '' + nconfig.worker_tags = undefined + } else { + nconfig.dedicated_worker = undefined + nconfig.worker_tags = [] + } + }} + class="mb-4" + > + + + + {#if selected == 'normal'} +
      {#if nconfig?.worker_tags != undefined}
      {#each nconfig.worker_tags as tag} @@ -129,6 +158,7 @@ aria-label="Remove item" on:click|preventDefault|stopPropagation={() => { if (nconfig != undefined) { + dirty = true nconfig.worker_tags = nconfig?.worker_tags?.filter((t) => t != tag) ?? [] } }} @@ -138,24 +168,29 @@ > {/each}
      - -
      - -
      +
      + +
      + +
      +
      {/if} - {:else if selected == 'dedicated'} - {#if nconfig?.dedicated_worker != undefined} - { - dirty = true +
      + {:else if selected == 'dedicated'} + {#if nconfig?.dedicated_worker != undefined} + { + dirtyCode = true + dirty = true + }} + bind:value={nconfig.dedicated_worker} + /> +

      Workers will get killed upon detecting this setting change. It is assumed they are in an + environment where the supervisor will restart them. Upon restart, they will pick the new + dedicated worker config.

      + {/if} + {/if} +
      + +
      +
      +
      + {#if dirtyCode} +
      Init script has changed, once applied, the workers will restart to apply it.
      + {/if} + { + if (config) { + dirty = true + dirtyCode = true + const code = e.detail + if (code != '') { + nconfig.init_bash = code + } else { + nconfig.init_bash = undefined + } + } }} - bind:value={nconfig.dedicated_worker} /> -

      Workers will get killed upon detecting this setting change. It is assumed they are in - an environment where the supervisor will restart them. Upon restart, they will pick the - new dedicated worker config.

      +
      +
      + +
      +
      + {#if dirty} +
      Non applied changes
      + {/if} + + - {#if !$enterpriseLicense}{selected == 'dedicated' - ? 'Dedicated workers are an enterprise only feature' - : 'The Worker Group Manager UI is an enterprise only feature. However, workers can still have their WORKER_TAGS passed as env'}{/if} + Apply changes + +
      - +
      + + + +

      {name}

      + {#if $superadmin} + + {#if config} - {/if} + + {:else if config} config {JSON.stringify(config, null, 4)} { + historyBrowserDrawerOpen = false + }} /> diff --git a/frontend/src/lib/components/common/drawer/DrawerContent.svelte b/frontend/src/lib/components/common/drawer/DrawerContent.svelte index b260fa33baec1..8e52e5957a589 100644 --- a/frontend/src/lib/components/common/drawer/DrawerContent.svelte +++ b/frontend/src/lib/components/common/drawer/DrawerContent.svelte @@ -14,15 +14,16 @@
      -
      -
      +
      +
      + + {title ?? ''} {#if tooltip != '' || documentationLink} {tooltip} {/if} -
      {#if $$slots.actions}
      diff --git a/frontend/src/routes/(root)/(logged)/folders/+page.svelte b/frontend/src/routes/(root)/(logged)/folders/+page.svelte index 79205f795e478..961bf812b886f 100644 --- a/frontend/src/routes/(root)/(logged)/folders/+page.svelte +++ b/frontend/src/routes/(root)/(logged)/folders/+page.svelte @@ -8,7 +8,7 @@ import PageHeader from '$lib/components/PageHeader.svelte' import { userStore, workspaceStore } from '$lib/stores' import { faEdit, faPlus, faTrash } from '@fortawesome/free-solid-svg-icons' - import { Button, Drawer, DrawerContent, Skeleton } from '$lib/components/common' + import { Button, Drawer, DrawerContent, Popup, Skeleton } from '$lib/components/common' import FolderInfo from '$lib/components/FolderInfo.svelte' import FolderUsageInfo from '$lib/components/FolderUsageInfo.svelte' import { canWrite } from '$lib/utils' @@ -29,11 +29,12 @@ }) } - function handleKeyUp(event: KeyboardEvent) { + function handleKeyUp(event: KeyboardEvent, close: () => void) { const key = event.key if (key === 'Enter') { event.preventDefault() addFolder() + close() } } async function addFolder() { @@ -77,22 +78,37 @@ documentationLink="https://www.windmill.dev/docs/core_concepts/groups_and_folders" >
      - -
      - -
      + + + + +
      + handleKeyUp(e, () => close(null))} + placeholder="New folder name" + bind:value={newFolderName} + /> + +
      + +
      +
      +
      diff --git a/frontend/src/routes/(root)/(logged)/groups/+page.svelte b/frontend/src/routes/(root)/(logged)/groups/+page.svelte index c5b21981e7a47..3dfef764845e3 100644 --- a/frontend/src/routes/(root)/(logged)/groups/+page.svelte +++ b/frontend/src/routes/(root)/(logged)/groups/+page.svelte @@ -4,7 +4,7 @@ import { GroupService } from '$lib/gen' import CenteredPage from '$lib/components/CenteredPage.svelte' - import { Button, Drawer, DrawerContent, Skeleton } from '$lib/components/common' + import { Button, Drawer, DrawerContent, Popup, Skeleton } from '$lib/components/common' import Dropdown from '$lib/components/Dropdown.svelte' import GroupEditor from '$lib/components/GroupEditor.svelte' import GroupInfo from '$lib/components/GroupInfo.svelte' @@ -76,16 +76,35 @@ documentationLink="https://www.windmill.dev/docs/core_concepts/groups_and_folders" >
      -
      - + + + + +
      + + +
      +
      diff --git a/frontend/src/routes/(root)/(logged)/workers/+page.svelte b/frontend/src/routes/(root)/(logged)/workers/+page.svelte index d33bcc22d5554..f1e2b4270a3a5 100644 --- a/frontend/src/routes/(root)/(logged)/workers/+page.svelte +++ b/frontend/src/routes/(root)/(logged)/workers/+page.svelte @@ -14,7 +14,7 @@ import { sendUserToast } from '$lib/toast' import { displayDate, groupBy, truncate } from '$lib/utils' import { faPlus } from '@fortawesome/free-solid-svg-icons' - import { Loader2, Pen, X } from 'lucide-svelte' + import { AlertTriangle, Loader2, Pen, X } from 'lucide-svelte' import { onDestroy, onMount } from 'svelte' let workers: WorkerPing[] | undefined = undefined @@ -246,23 +246,44 @@
      {#if $superadmin}
      - - - Worker Group configs are propagated to every workers in the worker group + +
      + + Worker Group configs are propagated to every workers in the worker group +
      +
      +
      + + + {#if !$enterpriseLicense} +
      + + EE only +
      + {/if} + +
      +
      {/if}
      - {#each groupedWorkers as worker_group, i} + {#each groupedWorkers as worker_group} { @@ -337,9 +358,8 @@
      - {#each Object.entries(workerGroups ?? {}).filter((x) => !groupedWorkers.some((y) => y[0] == x[0])) as worker_group, i} + {#each Object.entries(workerGroups ?? {}).filter((x) => !groupedWorkers.some((y) => y[0] == x[0])) as worker_group} 0} on:reload={() => { loadWorkerGroups() }}