From c11d7397f6e28d9704f63b5d2ff09aea719366cc Mon Sep 17 00:00:00 2001 From: Evrard-Nil Daillet Date: Wed, 14 Jan 2026 16:11:51 +0100 Subject: [PATCH 01/18] Make DNS TXT record TTL configurable. --- certbot/src/acme_client.rs | 13 +++++++++++-- certbot/src/bot.rs | 23 +++++++++++++++++++---- certbot/src/dns01_client.rs | 3 ++- certbot/src/dns01_client/cloudflare.rs | 7 ++++--- gateway/gateway.toml | 3 +++ gateway/src/config.rs | 9 +++++++++ 6 files changed, 48 insertions(+), 10 deletions(-) diff --git a/certbot/src/acme_client.rs b/certbot/src/acme_client.rs index d4ebcf51..50ec4589 100644 --- a/certbot/src/acme_client.rs +++ b/certbot/src/acme_client.rs @@ -28,6 +28,8 @@ pub struct AcmeClient { credentials: Credentials, dns01_client: Dns01Client, max_dns_wait: Duration, + /// TTL for DNS TXT records used in ACME challenges (in seconds). + dns_txt_ttl: u32, } #[derive(Debug, Clone)] @@ -58,6 +60,7 @@ impl AcmeClient { dns01_client: Dns01Client, encoded_credentials: &str, max_dns_wait: Duration, + dns_txt_ttl: u32, ) -> Result { let credentials: Credentials = serde_json::from_str(encoded_credentials)?; let account = Account::from_credentials(credentials.credentials).await?; @@ -67,6 +70,7 @@ impl AcmeClient { dns01_client, credentials, max_dns_wait, + dns_txt_ttl, }) } @@ -75,6 +79,7 @@ impl AcmeClient { acme_url: &str, dns01_client: Dns01Client, max_dns_wait: Duration, + dns_txt_ttl: u32, ) -> Result { let (account, credentials) = Account::create( &NewAccount { @@ -97,6 +102,7 @@ impl AcmeClient { dns01_client, credentials, max_dns_wait, + dns_txt_ttl, }) } @@ -328,10 +334,13 @@ impl AcmeClient { .remove_txt_records(&acme_domain) .await .context("failed to remove existing dns record")?; - debug!("creating TXT record for {acme_domain}"); + debug!( + "creating TXT record for {acme_domain} with TTL {}s", + self.dns_txt_ttl + ); let id = self .dns01_client - .add_txt_record(&acme_domain, &dns_value) + .add_txt_record(&acme_domain, &dns_value, self.dns_txt_ttl) .await .context("failed to create dns record")?; challenges.push(Challenge { diff --git a/certbot/src/bot.rs b/certbot/src/bot.rs index 5a9c775f..1b59b767 100644 --- a/certbot/src/bot.rs +++ b/certbot/src/bot.rs @@ -37,6 +37,10 @@ pub struct CertBotConfig { renew_expires_in: Duration, renewed_hook: Option, max_dns_wait: Duration, + /// TTL for DNS TXT records used in ACME challenges (in seconds). + /// Minimum is 60 for Cloudflare. + #[builder(default = 60)] + dns_txt_ttl: u32, } impl CertBotConfig { @@ -55,9 +59,14 @@ async fn create_new_account( dns01_client: Dns01Client, ) -> Result { info!("creating new ACME account"); - let client = AcmeClient::new_account(&config.acme_url, dns01_client, config.max_dns_wait) - .await - .context("failed to create new account")?; + let client = AcmeClient::new_account( + &config.acme_url, + dns01_client, + config.max_dns_wait, + config.dns_txt_ttl, + ) + .await + .context("failed to create new account")?; let credentials = client .dump_credentials() .context("failed to dump credentials")?; @@ -90,7 +99,13 @@ impl CertBot { let acme_client = match fs::read_to_string(&config.credentials_file) { Ok(credentials) => { if acme_matches(&credentials, &config.acme_url) { - AcmeClient::load(dns01_client, &credentials, config.max_dns_wait).await? + AcmeClient::load( + dns01_client, + &credentials, + config.max_dns_wait, + config.dns_txt_ttl, + ) + .await? } else { create_new_account(&config, dns01_client).await? } diff --git a/certbot/src/dns01_client.rs b/certbot/src/dns01_client.rs index 701d5ba9..b4d4aeaa 100644 --- a/certbot/src/dns01_client.rs +++ b/certbot/src/dns01_client.rs @@ -28,7 +28,8 @@ pub(crate) trait Dns01Api { /// Creates a TXT DNS record with the given domain and content. /// /// Returns the ID of the created record. - async fn add_txt_record(&self, domain: &str, content: &str) -> Result; + /// The `ttl` parameter specifies the time-to-live in seconds (1 = auto, min 60 for Cloudflare). + async fn add_txt_record(&self, domain: &str, content: &str, ttl: u32) -> Result; /// Add a CAA record for the given domain. async fn add_caa_record( diff --git a/certbot/src/dns01_client/cloudflare.rs b/certbot/src/dns01_client/cloudflare.rs index 222028da..d7a6b1f5 100644 --- a/certbot/src/dns01_client/cloudflare.rs +++ b/certbot/src/dns01_client/cloudflare.rs @@ -270,12 +270,13 @@ impl Dns01Api for CloudflareClient { Ok(()) } - async fn add_txt_record(&self, domain: &str, content: &str) -> Result { + async fn add_txt_record(&self, domain: &str, content: &str, ttl: u32) -> Result { let response = self .add_record(&json!({ "type": "TXT", "name": domain, "content": content, + "ttl": ttl, })) .await?; Ok(response.result.id) @@ -358,7 +359,7 @@ mod tests { let subdomain = random_subdomain(); println!("subdomain: {}", subdomain); let record_id = client - .add_txt_record(&subdomain, "1234567890") + .add_txt_record(&subdomain, "1234567890", 60) .await .unwrap(); let record = client.get_txt_records(&subdomain).await.unwrap(); @@ -375,7 +376,7 @@ mod tests { let subdomain = random_subdomain(); println!("subdomain: {}", subdomain); let record_id = client - .add_txt_record(&subdomain, "1234567890") + .add_txt_record(&subdomain, "1234567890", 60) .await .unwrap(); let record = client.get_txt_records(&subdomain).await.unwrap(); diff --git a/gateway/gateway.toml b/gateway/gateway.toml index 78446b0e..cf704b5c 100644 --- a/gateway/gateway.toml +++ b/gateway/gateway.toml @@ -38,6 +38,9 @@ renew_interval = "1h" renew_before_expiration = "10d" renew_timeout = "120s" max_dns_wait = "5m" +# TTL for DNS TXT records used in ACME challenges (in seconds). +# Minimum is 60 for Cloudflare. +dns_txt_ttl = 60 [core.wg] public_key = "" diff --git a/gateway/src/config.rs b/gateway/src/config.rs index 4809aef8..3b990795 100644 --- a/gateway/src/config.rs +++ b/gateway/src/config.rs @@ -209,6 +209,14 @@ pub struct CertbotConfig { /// Maximum time to wait for DNS propagation #[serde(with = "serde_duration")] pub max_dns_wait: Duration, + /// TTL for DNS TXT records used in ACME challenges (in seconds). + /// Minimum is 60 for Cloudflare. Lower TTL means faster DNS propagation. + #[serde(default = "default_dns_txt_ttl")] + pub dns_txt_ttl: u32, +} + +fn default_dns_txt_ttl() -> u32 { + 60 } impl CertbotConfig { @@ -228,6 +236,7 @@ impl CertbotConfig { .renew_expires_in(self.renew_before_expiration) .auto_set_caa(self.auto_set_caa) .max_dns_wait(self.max_dns_wait) + .dns_txt_ttl(self.dns_txt_ttl) .build() } From 696441b4ee2b5d81afba3addf1aea72b106f31b5 Mon Sep 17 00:00:00 2001 From: Evrard-Nil Daillet Date: Thu, 15 Jan 2026 11:21:12 +0100 Subject: [PATCH 02/18] Make DNS TXT record TTL configurable in entrypoint and deployment scripts. --- gateway/dstack-app/builder/entrypoint.sh | 1 + gateway/dstack-app/deploy-to-vmm.sh | 2 ++ 2 files changed, 3 insertions(+) diff --git a/gateway/dstack-app/builder/entrypoint.sh b/gateway/dstack-app/builder/entrypoint.sh index cd25da1f..9cd46755 100755 --- a/gateway/dstack-app/builder/entrypoint.sh +++ b/gateway/dstack-app/builder/entrypoint.sh @@ -118,6 +118,7 @@ renew_interval = "1h" renew_before_expiration = "10d" renew_timeout = "5m" max_dns_wait = "${CERTBOT_MAX_DNS_WAIT:-5m}" +dns_txt_ttl = "${CERTBOT_DNS_TXT_TTL:-60}" [core.wg] public_key = "$PUBLIC_KEY" diff --git a/gateway/dstack-app/deploy-to-vmm.sh b/gateway/dstack-app/deploy-to-vmm.sh index 2584d450..47da3fcf 100755 --- a/gateway/dstack-app/deploy-to-vmm.sh +++ b/gateway/dstack-app/deploy-to-vmm.sh @@ -82,6 +82,7 @@ GUEST_AGENT_ADDR=127.0.0.1:9206 WG_ADDR=0.0.0.0:9202 CERTBOT_MAX_DNS_WAIT=5m +CERTBOT_DNS_TXT_TTL=60 # The token used to launch the App APP_LAUNCH_TOKEN=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) @@ -141,6 +142,7 @@ SUBNET_INDEX=$SUBNET_INDEX APP_LAUNCH_TOKEN=$APP_LAUNCH_TOKEN RPC_DOMAIN=$RPC_DOMAIN CERTBOT_MAX_DNS_WAIT=$CERTBOT_MAX_DNS_WAIT +CERTBOT_DNS_TXT_TTL=$CERTBOT_DNS_TXT_TTL EOF if [ -n "$APP_COMPOSE_FILE" ]; then From c453cec6719fcee8b06d1f3cd1fd6cd9cab156af Mon Sep 17 00:00:00 2001 From: Hang Yin Date: Fri, 16 Jan 2026 22:48:15 +0000 Subject: [PATCH 03/18] fix: add white background to banner image for dark mode This ensures the logo renders properly in dark mode environments like GitHub's dark theme. Closes #439 Co-Authored-By: Claude Opus 4.5 --- dstack-logo.svg | 1 + 1 file changed, 1 insertion(+) diff --git a/dstack-logo.svg b/dstack-logo.svg index 4365f9ae..2f17b1cf 100644 --- a/dstack-logo.svg +++ b/dstack-logo.svg @@ -1,4 +1,5 @@ + From 9e3d200fc693078f031cdf322b4c819daa50427a Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Mon, 19 Jan 2026 03:08:18 +0000 Subject: [PATCH 04/18] Remove foundry submodules --- kms/auth-eth/lib/forge-std | 1 - kms/auth-eth/lib/openzeppelin-contracts-upgradeable | 1 - kms/auth-eth/lib/openzeppelin-foundry-upgrades | 1 - 3 files changed, 3 deletions(-) delete mode 160000 kms/auth-eth/lib/forge-std delete mode 160000 kms/auth-eth/lib/openzeppelin-contracts-upgradeable delete mode 160000 kms/auth-eth/lib/openzeppelin-foundry-upgrades diff --git a/kms/auth-eth/lib/forge-std b/kms/auth-eth/lib/forge-std deleted file mode 160000 index 77041d2c..00000000 --- a/kms/auth-eth/lib/forge-std +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 77041d2ce690e692d6e03cc812b57d1ddaa4d505 diff --git a/kms/auth-eth/lib/openzeppelin-contracts-upgradeable b/kms/auth-eth/lib/openzeppelin-contracts-upgradeable deleted file mode 160000 index e725abdd..00000000 --- a/kms/auth-eth/lib/openzeppelin-contracts-upgradeable +++ /dev/null @@ -1 +0,0 @@ -Subproject commit e725abddf1e01cf05ace496e950fc8e243cc7cab diff --git a/kms/auth-eth/lib/openzeppelin-foundry-upgrades b/kms/auth-eth/lib/openzeppelin-foundry-upgrades deleted file mode 160000 index cbce1e00..00000000 --- a/kms/auth-eth/lib/openzeppelin-foundry-upgrades +++ /dev/null @@ -1 +0,0 @@ -Subproject commit cbce1e00305e943aa1661d43f41e5ac72c662b07 From 08cc383b25259646ad246b66418a5e153995d8f7 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Tue, 20 Jan 2026 02:42:10 +0000 Subject: [PATCH 05/18] Fix vm_config loading from sys-config.json Previously the code was reading the entire sys-config.json file as the config, but it should only read the vm_config field within it. Extracted read_vm_config() function that properly parses SysConfig and returns the vm_config field. --- dstack-attest/src/attestation.rs | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/dstack-attest/src/attestation.rs b/dstack-attest/src/attestation.rs index 40271d9b..e578992f 100644 --- a/dstack-attest/src/attestation.rs +++ b/dstack-attest/src/attestation.rs @@ -12,6 +12,8 @@ use dcap_qvl::{ quote::{EnclaveReport, Quote, Report, TDReport10, TDReport15}, verify::VerifiedReport as TdxVerifiedReport, }; +#[cfg(feature = "quote")] +use dstack_types::SysConfig; use dstack_types::{Platform, VmConfig}; use ez_hash::{sha256, Hasher, Sha384}; use or_panic::ResultOrPanic; @@ -23,6 +25,21 @@ use sha2::Digest as _; const DSTACK_TDX: &str = "dstack-tdx"; const DSTACK_GCP_TDX: &str = "dstack-gcp-tdx"; const DSTACK_NITRO_ENCLAVE: &str = "dstack-nitro-enclave"; +#[cfg(feature = "quote")] +const SYS_CONFIG_PATH: &str = "/dstack/.host-shared/.sys-config.json"; + +/// Read vm_config from sys-config.json +#[cfg(feature = "quote")] +fn read_vm_config() -> Result { + let content = match fs_err::read_to_string(SYS_CONFIG_PATH) { + Ok(content) => content, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(String::new()), + Err(err) => return Err(err).context("Failed to read sys-config"), + }; + let sys_config: SysConfig = + serde_json::from_str(&content).context("Failed to parse sys-config")?; + Ok(sys_config.vm_config) +} /// Attestation mode #[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Encode, Decode, Serialize, Deserialize)] @@ -579,8 +596,7 @@ impl Attestation { }; let config = match "e { AttestationQuote::DstackTdx(_) => { - // TODO: Find a better way handling this hardcode path - fs_err::read_to_string("/dstack/.host-shared/.sys-config.json").unwrap_or_default() + read_vm_config().context("Failed to read VM config")? } AttestationQuote::DstackGcpTdx | AttestationQuote::DstackNitroEnclave => { bail!("Unsupported attestation mode: {mode:?}"); From 292683b3e8260aaa7bfebff665e1da98c54673e5 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 00:32:12 +0000 Subject: [PATCH 06/18] Optional setup tsm configfs --- basefiles/dstack-prepare.sh | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/basefiles/dstack-prepare.sh b/basefiles/dstack-prepare.sh index fc863a86..68077865 100755 --- a/basefiles/dstack-prepare.sh +++ b/basefiles/dstack-prepare.sh @@ -103,20 +103,22 @@ if ! [[ -e /dev/tdx_guest ]]; then modprobe tdx-guest fi -# Mount configfs for TSM (required for TDX quote generation) -if [[ ! -d /sys/kernel/config ]]; then - mkdir -p /sys/kernel/config -fi -if ! mountpoint -q /sys/kernel/config; then - log "Mounting configfs for TSM..." - mount -t configfs none /sys/kernel/config -fi - -# Create TSM report directory for TDX attestation -if [[ -e /dev/tdx_guest ]] && [[ ! -d /sys/kernel/config/tsm/report/com.intel.dcap ]]; then - log "Creating TSM report directory..." - mkdir -p /sys/kernel/config/tsm/report/com.intel.dcap -fi +# Setup configfs and TSM for TDX attestation +setup_tsm() { + if ! grep -q configfs /proc/filesystems; then + log "Warning: configfs not available in kernel, TSM may not work" + return 1 + fi + if ! mountpoint -q /sys/kernel/config 2>/dev/null; then + log "Mounting configfs for TSM..." + mount -t configfs none /sys/kernel/config + fi + if [[ -e /dev/tdx_guest ]] && [[ ! -d /sys/kernel/config/tsm/report/com.intel.dcap ]]; then + log "Creating TSM report directory..." + mkdir -p /sys/kernel/config/tsm/report/com.intel.dcap + fi +} +setup_tsm || true # Setup dstack system log "Preparing dstack system..." From 242894bda985b8ce974f3b44aeaadcba5a1939db Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 00:32:34 +0000 Subject: [PATCH 07/18] vmm: Default to 9p shared mode --- vmm/vmm.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vmm/vmm.toml b/vmm/vmm.toml index 1f14e13f..b64bf113 100644 --- a/vmm/vmm.toml +++ b/vmm/vmm.toml @@ -38,7 +38,7 @@ use_mrconfigid = true qemu_pci_hole64_size = 0 qemu_hotplug_off = false -host_share_mode = "vvfat" +host_share_mode = "9p" # QGS (Quote Generation Service) vsock port for kernel-level TSM support. # When set, QEMU will pass this port to tdx-guest for configfs-tsm quote generation. From b7521727516899a31d82e761bc447acd180e5052 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 00:42:36 +0000 Subject: [PATCH 08/18] Bump version to 0.5.6 --- Cargo.lock | 68 +++++++++++++++++++++++++++--------------------------- Cargo.toml | 2 +- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e9083251..95b28d54 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1287,7 +1287,7 @@ dependencies = [ [[package]] name = "cc-eventlog" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "digest 0.10.7", @@ -1304,7 +1304,7 @@ dependencies = [ [[package]] name = "cert-client" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "dstack-kms-rpc", @@ -1316,7 +1316,7 @@ dependencies = [ [[package]] name = "certbot" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "bon", @@ -1339,7 +1339,7 @@ dependencies = [ [[package]] name = "certbot-cli" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "certbot", @@ -1756,7 +1756,7 @@ dependencies = [ [[package]] name = "ct_monitor" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "clap", @@ -2168,7 +2168,7 @@ dependencies = [ [[package]] name = "dstack-attest" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "cc-eventlog", @@ -2193,7 +2193,7 @@ dependencies = [ [[package]] name = "dstack-gateway" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "bytes", @@ -2242,7 +2242,7 @@ dependencies = [ [[package]] name = "dstack-gateway-rpc" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "parity-scale-codec", @@ -2255,7 +2255,7 @@ dependencies = [ [[package]] name = "dstack-guest-agent" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "base64 0.22.1", @@ -2304,7 +2304,7 @@ dependencies = [ [[package]] name = "dstack-guest-agent-rpc" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "parity-scale-codec", @@ -2317,7 +2317,7 @@ dependencies = [ [[package]] name = "dstack-kms" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "chrono", @@ -2359,7 +2359,7 @@ dependencies = [ [[package]] name = "dstack-kms-rpc" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "fs-err", @@ -2373,7 +2373,7 @@ dependencies = [ [[package]] name = "dstack-mr" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "bon", @@ -2447,7 +2447,7 @@ dependencies = [ [[package]] name = "dstack-types" -version = "0.5.5" +version = "0.5.6" dependencies = [ "parity-scale-codec", "serde", @@ -2458,7 +2458,7 @@ dependencies = [ [[package]] name = "dstack-util" -version = "0.5.5" +version = "0.5.6" dependencies = [ "aes-gcm", "anyhow", @@ -2508,7 +2508,7 @@ dependencies = [ [[package]] name = "dstack-verifier" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "cc-eventlog", @@ -2536,7 +2536,7 @@ dependencies = [ [[package]] name = "dstack-vmm" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "base64 0.22.1", @@ -2585,7 +2585,7 @@ dependencies = [ [[package]] name = "dstack-vmm-rpc" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "parity-scale-codec", @@ -3221,7 +3221,7 @@ dependencies = [ [[package]] name = "guest-api" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "http-client", @@ -3498,7 +3498,7 @@ dependencies = [ [[package]] name = "host-api" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "http-client", @@ -3544,7 +3544,7 @@ dependencies = [ [[package]] name = "http-client" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "http-body-util", @@ -3962,7 +3962,7 @@ dependencies = [ [[package]] name = "iohash" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "blake2", @@ -4130,7 +4130,7 @@ dependencies = [ [[package]] name = "key-provider-client" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "serde", @@ -4223,7 +4223,7 @@ checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "load_config" -version = "0.5.5" +version = "0.5.6" dependencies = [ "figment", "rocket", @@ -4278,7 +4278,7 @@ checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" [[package]] name = "lspci" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "insta", @@ -5544,7 +5544,7 @@ checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "ra-rpc" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "bon", @@ -5563,7 +5563,7 @@ dependencies = [ [[package]] name = "ra-tls" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "bon", @@ -6002,7 +6002,7 @@ dependencies = [ [[package]] name = "rocket-vsock-listener" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "derive_more 2.1.1", @@ -6632,7 +6632,7 @@ dependencies = [ [[package]] name = "serde-duration" -version = "0.5.5" +version = "0.5.6" dependencies = [ "serde", ] @@ -6939,7 +6939,7 @@ checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "size-parser" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "serde", @@ -7084,7 +7084,7 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "supervisor" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "bon", @@ -7107,7 +7107,7 @@ dependencies = [ [[package]] name = "supervisor-client" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "clap", @@ -7251,7 +7251,7 @@ dependencies = [ [[package]] name = "tdx-attest" -version = "0.5.5" +version = "0.5.6" dependencies = [ "anyhow", "cc-eventlog", @@ -7270,7 +7270,7 @@ dependencies = [ [[package]] name = "tdx-attest-sys" -version = "0.5.5" +version = "0.5.6" dependencies = [ "bindgen", "cc", diff --git a/Cargo.toml b/Cargo.toml index d0c07582..c8f76ce2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,7 @@ # SPDX-License-Identifier: Apache-2.0 [workspace.package] -version = "0.5.5" +version = "0.5.6" authors = ["Kevin Wang ", "Leechael "] edition = "2021" license = "MIT" From 6f14cb368804f95c920a983ba26594f386bf2f10 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 00:45:03 +0000 Subject: [PATCH 09/18] Update dcap-qvl to 0.3.9 --- Cargo.lock | 107 ++++++++++++++++++++++++++++++++++++++++++++--------- Cargo.toml | 3 +- 2 files changed, 90 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 95b28d54..d174aabd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1879,9 +1879,9 @@ checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" [[package]] name = "dcap-qvl" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82f4049f76ea6a67262a7501b82f9cda2425c022a86b45902d195d70fa67a4f7" +checksum = "6b2e21006fbf3e2cb1b2b17aaa72ecaa4039ccca69ff8d79c53076c0da1ac374" dependencies = [ "anyhow", "asn1_der", @@ -1896,15 +1896,16 @@ dependencies = [ "futures", "hex", "log", + "p256", "parity-scale-codec", "pem", "reqwest", - "ring", - "rustls-webpki 0.102.8", + "rustls-pki-types", "scale-info", "serde", "serde-human-bytes", "serde_json", + "sha2 0.10.9", "tracing", "urlencoding", "wasm-bindgen-futures", @@ -1913,12 +1914,19 @@ dependencies = [ [[package]] name = "dcap-qvl-webpki" -version = "0.103.3" +version = "0.103.4+dcap.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ebdcd097c369fe3422cf3978540e0406148435ec0f4d8ecbbf201c746f19c9" +checksum = "d0af040afe66c4f26ca05f308482d98bd75a35a80a227d877c2e28c9947a9fa6" dependencies = [ + "ecdsa", + "ed25519-dalek", + "p256", + "p384", "ring", + "rsa", "rustls-pki-types", + "sha2 0.10.9", + "signature", "untrusted 0.9.0", ] @@ -4163,6 +4171,9 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] [[package]] name = "libc" @@ -4645,6 +4656,22 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint-dig" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" +dependencies = [ + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + [[package]] name = "num-conv" version = "0.1.0" @@ -4660,6 +4687,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-rational" version = "0.4.2" @@ -4838,6 +4876,18 @@ dependencies = [ "sha2 0.10.9", ] +[[package]] +name = "p384" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe42f1670a52a47d448f14b6a5c61dd78fce51856e68edaa38f7ae3a46b8d6b6" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2 0.10.9", +] + [[package]] name = "parcelona" version = "0.4.3" @@ -5121,6 +5171,17 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + [[package]] name = "pkcs8" version = "0.10.2" @@ -6050,6 +6111,27 @@ dependencies = [ "uncased", ] +[[package]] +name = "rsa" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d" +dependencies = [ + "const-oid", + "digest 0.10.7", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "sha2 0.10.9", + "signature", + "spki", + "subtle", + "zeroize", +] + [[package]] name = "ruint" version = "1.17.0" @@ -6172,7 +6254,7 @@ dependencies = [ "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.103.8", + "rustls-webpki", "subtle", "zeroize", ] @@ -6208,17 +6290,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "rustls-webpki" -version = "0.102.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted 0.9.0", -] - [[package]] name = "rustls-webpki" version = "0.103.8" diff --git a/Cargo.toml b/Cargo.toml index c8f76ce2..b7581960 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -165,8 +165,7 @@ default-net = "0.22.0" # Cryptography/Security aes-gcm = "0.10.3" curve25519-dalek = "4.1.3" -dcap-qvl = "0.3.8" -dcap-qvl-webpki = "0.103" +dcap-qvl = "0.3.9" elliptic-curve = { version = "0.13.8", features = ["pkcs8"] } getrandom = "0.3.1" hkdf = "0.12.4" From f070aa4bdc16f366baae6b0b6294ddc2955f29c9 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 01:17:00 +0000 Subject: [PATCH 10/18] Add mutex lock for TDX quote generation The TDX driver does not support concurrent access. Add global mutex locks at both tdx-attest and dstack-attest layers to prevent race conditions: - tdx-attest: Lock in get_quote() to protect low-level TDX driver calls - dstack-attest: Lock in quote_with_app_id() for future TEE environments --- dstack-attest/src/attestation.rs | 9 +++++++++ tdx-attest/src/linux.rs | 7 +++++++ 2 files changed, 16 insertions(+) diff --git a/dstack-attest/src/attestation.rs b/dstack-attest/src/attestation.rs index e578992f..30ead88a 100644 --- a/dstack-attest/src/attestation.rs +++ b/dstack-attest/src/attestation.rs @@ -28,6 +28,10 @@ const DSTACK_NITRO_ENCLAVE: &str = "dstack-nitro-enclave"; #[cfg(feature = "quote")] const SYS_CONFIG_PATH: &str = "/dstack/.host-shared/.sys-config.json"; +/// Global lock for quote generation. The underlying TDX driver does not support concurrent access. +#[cfg(feature = "quote")] +static QUOTE_LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(()); + /// Read vm_config from sys-config.json #[cfg(feature = "quote")] fn read_vm_config() -> Result { @@ -574,6 +578,11 @@ impl Attestation { } pub fn quote_with_app_id(report_data: &[u8; 64], app_id: Option<[u8; 20]>) -> Result { + // Lock to prevent concurrent quote generation (TDX driver doesn't support it) + let _guard = QUOTE_LOCK + .lock() + .map_err(|_| anyhow!("Quote lock poisoned"))?; + let mode = AttestationMode::detect()?; let runtime_events = if mode.is_composable() { RuntimeEvent::read_all().context("Failed to read runtime events")? diff --git a/tdx-attest/src/linux.rs b/tdx-attest/src/linux.rs index 9959777d..f81e45eb 100644 --- a/tdx-attest/src/linux.rs +++ b/tdx-attest/src/linux.rs @@ -6,9 +6,13 @@ use tdx_attest_sys as sys; use std::ptr; use std::slice; +use std::sync::Mutex; use sys::*; +/// Global lock for TDX attestation operations. The TDX driver does not support concurrent access. +static TDX_LOCK: Mutex<()> = Mutex::new(()); + use num_enum::FromPrimitive; use thiserror::Error; @@ -49,6 +53,9 @@ pub enum TdxAttestError { } pub fn get_quote(report_data: &TdxReportData) -> Result> { + // Lock to prevent concurrent access - TDX driver doesn't support it + let _guard = TDX_LOCK.lock().map_err(|_| TdxAttestError::Busy)?; + let mut att_key_id = TdxUuid([0; TDX_UUID_SIZE as usize]); let mut quote_ptr = ptr::null_mut(); let mut quote_size = 0; From 3f8f6d3a0bded0ab91ab0f8cfb177639d5e11144 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 01:54:50 +0000 Subject: [PATCH 11/18] Fix ra-tls compilation error --- ra-tls/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ra-tls/Cargo.toml b/ra-tls/Cargo.toml index 2a83ca64..99622298 100644 --- a/ra-tls/Cargo.toml +++ b/ra-tls/Cargo.toml @@ -19,7 +19,7 @@ hex.workspace = true hkdf.workspace = true p256.workspace = true rcgen = { workspace = true, features = ["x509-parser", "pem"] } -ring.workspace = true +ring = { workspace = true, features = ["std"] } rustls-pki-types.workspace = true serde.workspace = true serde_json.workspace = true From 743dbc91f22ebb0cf73342cd1b2f44a17f6158ed Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 02:19:02 +0000 Subject: [PATCH 12/18] fix(verifier): remove pccs_url from public API to prevent SSRF Remove the pccs_url parameter from the VerificationRequest struct to prevent potential SSRF attacks where a malicious client could specify an arbitrary URL. The PCCS URL is now only configurable via the server configuration file. Changes: - Remove pccs_url field from VerificationRequest in types.rs - Add pccs_url to CvmVerifier struct instead of passing per-request - Update all CvmVerifier::new() calls to pass pccs_url from config - Update README to reflect the configuration-only approach --- kms/src/main_service.rs | 1 + verifier/README.md | 4 ++-- verifier/src/main.rs | 7 +++---- verifier/src/types.rs | 1 - verifier/src/verification.rs | 11 +++++++++-- 5 files changed, 15 insertions(+), 9 deletions(-) diff --git a/kms/src/main_service.rs b/kms/src/main_service.rs index 57fead25..3c4fcc62 100644 --- a/kms/src/main_service.rs +++ b/kms/src/main_service.rs @@ -69,6 +69,7 @@ impl KmsState { config.image.cache_dir.display().to_string(), config.image.download_url.clone(), config.image.download_timeout, + config.pccs_url.clone(), ); Ok(Self { inner: Arc::new(KmsStateInner { diff --git a/verifier/README.md b/verifier/README.md index d3ae2767..4b7a4faa 100644 --- a/verifier/README.md +++ b/verifier/README.md @@ -78,7 +78,7 @@ You usually don't need to edit the config file. Just using the default is fine, - `image_cache_dir`: Directory for cached OS images (default: "/tmp/dstack-verifier/cache") - `image_download_url`: URL template for downloading OS images (default: dstack official releases URL) - `image_download_timeout_secs`: Download timeout in seconds (default: 300) -- `pccs_url`: Optional PCCS URL for quote verification +- `pccs_url`: PCCS URL for quote verification (default: uses Intel's public PCCS) ### Example Configuration File @@ -88,7 +88,7 @@ port = 8080 image_cache_dir = "/tmp/dstack-verifier/cache" image_download_url = "https://download.dstack.org/os-images/mr_{OS_IMAGE_HASH}.tar.gz" image_download_timeout_secs = 300 -pccs_url = "https://pccs.phala.network" +# pccs_url = "https://pccs.phala.network" ``` ## Usage diff --git a/verifier/src/main.rs b/verifier/src/main.rs index 1bd72a91..d832a6ba 100644 --- a/verifier/src/main.rs +++ b/verifier/src/main.rs @@ -85,17 +85,15 @@ async fn run_oneshot(file_path: &str, config: &Config) -> anyhow::Result<()> { .map_err(|e| anyhow::anyhow!("Failed to read file {}: {}", file_path, e))?; // Parse as VerificationRequest - let mut request: VerificationRequest = serde_json::from_str(&content) + let request: VerificationRequest = serde_json::from_str(&content) .map_err(|e| anyhow::anyhow!("Failed to parse JSON: {}", e))?; - // Ensure PCCS URL is populated from config when the report omits it - request.pccs_url = request.pccs_url.or_else(|| config.pccs_url.clone()); - // Create verifier let verifier = CvmVerifier::new( config.image_cache_dir.clone(), config.image_download_url.clone(), std::time::Duration::from_secs(config.image_download_timeout_secs), + config.pccs_url.clone(), ); // Run verification @@ -187,6 +185,7 @@ async fn main() -> Result<()> { config.image_cache_dir.clone(), config.image_download_url.clone(), std::time::Duration::from_secs(config.image_download_timeout_secs), + config.pccs_url.clone(), )); rocket::custom(figment) diff --git a/verifier/src/types.rs b/verifier/src/types.rs index c921ed8b..a82c16ae 100644 --- a/verifier/src/types.rs +++ b/verifier/src/types.rs @@ -15,7 +15,6 @@ pub struct VerificationRequest { pub vm_config: Option, #[serde(with = "serde_bytes")] pub attestation: Option>, - pub pccs_url: Option, pub debug: Option, } diff --git a/verifier/src/verification.rs b/verifier/src/verification.rs index 42eef846..af9474be 100644 --- a/verifier/src/verification.rs +++ b/verifier/src/verification.rs @@ -137,14 +137,21 @@ pub struct CvmVerifier { pub image_cache_dir: String, pub download_url: String, pub download_timeout: Duration, + pub pccs_url: Option, } impl CvmVerifier { - pub fn new(image_cache_dir: String, download_url: String, download_timeout: Duration) -> Self { + pub fn new( + image_cache_dir: String, + download_url: String, + download_timeout: Duration, + pccs_url: Option, + ) -> Self { Self { image_cache_dir, download_url, download_timeout, + pccs_url, } } @@ -408,7 +415,7 @@ impl CvmVerifier { let attestation = attestation.into_inner(); let debug = request.debug.unwrap_or(false); - let verified = attestation.verify(request.pccs_url.as_deref()).await; + let verified = attestation.verify(self.pccs_url.as_deref()).await; let verified_attestation = match verified { Ok(att) => { details.quote_verified = true; From 70720d572de837e52292dd8acb9c3e87961492f9 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 02:40:29 +0000 Subject: [PATCH 13/18] fix(event-log): strip RTMR[0-2] payloads and document semantics Strip boot-time event log payloads (RTMR 0-2) from GetQuote, Attest, and TdxQuote responses to reduce response size. Only digests are retained for verification purposes; runtime events (RTMR3) keep full payloads. Add documentation explaining event_log_verified semantics: RTMR3 events have both digest and payload verified, while RTMR 0-2 events only have digests verified through replay comparison. --- dstack-attest/src/attestation.rs | 9 ++++++--- guest-agent/src/rpc_service.rs | 20 ++++++++++++-------- sdk/curl/api-tappd.md | 10 ++++++++-- sdk/curl/api.md | 5 ++++- verifier/README.md | 4 ++-- verifier/src/types.rs | 7 +++++++ 6 files changed, 39 insertions(+), 16 deletions(-) diff --git a/dstack-attest/src/attestation.rs b/dstack-attest/src/attestation.rs index 30ead88a..4e841583 100644 --- a/dstack-attest/src/attestation.rs +++ b/dstack-attest/src/attestation.rs @@ -339,10 +339,13 @@ impl Attestation { .map(|q| serde_json::to_vec(&q.event_log).unwrap_or_default()) } - /// Get TDX event log string + /// Get TDX event log string with RTMR[0-2] payloads stripped to reduce size. + /// Only digests are kept for boot-time events; runtime events (RTMR3) retain full payload. pub fn get_tdx_event_log_string(&self) -> Option { - self.tdx_quote() - .map(|q| serde_json::to_string(&q.event_log).unwrap_or_default()) + self.tdx_quote().map(|q| { + let stripped: Vec<_> = q.event_log.iter().map(|e| e.stripped()).collect(); + serde_json::to_string(&stripped).unwrap_or_default() + }) } pub fn get_td10_report(&self) -> Option { diff --git a/guest-agent/src/rpc_service.rs b/guest-agent/src/rpc_service.rs index 03967547..c7a6b367 100644 --- a/guest-agent/src/rpc_service.rs +++ b/guest-agent/src/rpc_service.rs @@ -561,8 +561,10 @@ impl TappdRpc for InternalRpcHandlerV0 { }); } let event_log = read_event_log().context("Failed to decode event log")?; + // Strip RTMR[0-2] payloads, keep only digests + let stripped: Vec<_> = event_log.iter().map(|e| e.stripped()).collect(); let event_log = - serde_json::to_string(&event_log).context("Failed to serialize event log")?; + serde_json::to_string(&stripped).context("Failed to serialize event log")?; let quote = tdx_attest::get_quote(&report_data).context("Failed to get quote")?; Ok(TdxQuoteResponse { quote, @@ -657,12 +659,13 @@ impl WorkerRpc for ExternalRpcHandler { } else { let ed25519_quote = tdx_attest::get_quote(&ed25519_report_data) .context("Failed to get ed25519 quote")?; - let event_log = serde_json::to_string( - &read_event_log().context("Failed to read event log")?, - )?; + let raw_event_log = read_event_log().context("Failed to read event log")?; + // Strip RTMR[0-2] payloads, keep only digests + let stripped: Vec<_> = raw_event_log.iter().map(|e| e.stripped()).collect(); + let event_log = serde_json::to_string(&stripped)?; Ok(GetQuoteResponse { quote: ed25519_quote, - event_log: event_log.clone(), + event_log, report_data: ed25519_report_data.to_vec(), vm_config: self.state.inner.vm_config.clone(), }) @@ -688,9 +691,10 @@ impl WorkerRpc for ExternalRpcHandler { } else { let secp256k1_quote = tdx_attest::get_quote(&secp256k1_report_data) .context("Failed to get secp256k1 quote")?; - let event_log = serde_json::to_string( - &read_event_log().context("Failed to read event log")?, - )?; + let raw_event_log = read_event_log().context("Failed to read event log")?; + // Strip RTMR[0-2] payloads, keep only digests + let stripped: Vec<_> = raw_event_log.iter().map(|e| e.stripped()).collect(); + let event_log = serde_json::to_string(&stripped)?; Ok(GetQuoteResponse { quote: secp256k1_quote, diff --git a/sdk/curl/api-tappd.md b/sdk/curl/api-tappd.md index 4cf888d1..89cc13f8 100644 --- a/sdk/curl/api-tappd.md +++ b/sdk/curl/api-tappd.md @@ -128,12 +128,15 @@ curl --unix-socket /var/run/tappd.sock -X POST \ ```json { "quote": "", - "event_log": "quote generation log", + "event_log": "", "hash_algorithm": "sha512", "prefix": "app-data:" } ``` +**Note on Event Log:** +The `event_log` field contains a JSON array of TDX event log entries. For RTMR 0-2 (boot-time measurements), only the digest is included; the payload is stripped to reduce response size. For RTMR3 (runtime measurements), both digest and payload are included. + ### 4. Raw Quote Generates a TDX quote with raw report data. This is a low-level API that should be used with caution. @@ -166,10 +169,13 @@ curl --unix-socket /var/run/tappd.sock http://localhost/prpc/Tappd.RawQuote?repo ```json { "quote": "", - "event_log": "quote generation log" + "event_log": "" } ``` +**Note on Event Log:** +The `event_log` field contains a JSON array of TDX event log entries. For RTMR 0-2 (boot-time measurements), only the digest is included; the payload is stripped to reduce response size. For RTMR3 (runtime measurements), both digest and payload are included. + ### 5. Info Retrieves worker information. diff --git a/sdk/curl/api.md b/sdk/curl/api.md index 780867e4..32f80bca 100644 --- a/sdk/curl/api.md +++ b/sdk/curl/api.md @@ -132,12 +132,15 @@ curl --unix-socket /var/run/dstack.sock http://dstack/GetQuote?report_data=00000 ```json { "quote": "", - "event_log": "quote generation log", + "event_log": "", "report_data": "", "vm_config": "" } ``` +**Note on Event Log:** +The `event_log` field contains a JSON array of TDX event log entries. For RTMR 0-2 (boot-time measurements), only the digest is included; the payload is stripped to reduce response size. For RTMR3 (runtime measurements), both digest and payload are included. To verify the event log, submit it along with the quote to the [verifier service](../../verifier/README.md). + ### 4. Get Info Retrieves worker information. diff --git a/verifier/README.md b/verifier/README.md index 4b7a4faa..70f271ae 100644 --- a/verifier/README.md +++ b/verifier/README.md @@ -31,7 +31,7 @@ or "is_valid": true, "details": { "quote_verified": true, - "event_log_verified": true, + "event_log_verified": true, // See "Verification Process" for semantics "os_image_hash_verified": true, "report_data": "hex-encoded-64-byte-report-data", "tcb_status": "UpToDate", @@ -178,7 +178,7 @@ $ curl -s -d @quote.json localhost:8080/verify | jq The verifier performs three main verification steps: 1. **Quote Verification**: Validates the TDX quote using dcap-qvl, checking the quote signature and TCB status -2. **Event Log Verification**: Replays event logs to ensure RTMR values match and extracts app information +2. **Event Log Verification**: Replays event logs to ensure RTMR values match and extracts app information. For RTMR3 (runtime measurements), both the digest and payload integrity are verified. For RTMR 0-2 (boot-time measurements), only the digests are verified; the payload content is not validated as dstack does not define semantics for these payloads 3. **OS Image Hash Verification**: - Automatically downloads OS images if not cached locally - Uses dstack-mr to compute expected measurements diff --git a/verifier/src/types.rs b/verifier/src/types.rs index a82c16ae..736b4cfb 100644 --- a/verifier/src/types.rs +++ b/verifier/src/types.rs @@ -28,6 +28,13 @@ pub struct VerificationResponse { #[derive(Debug, Clone, Default, Serialize)] pub struct VerificationDetails { pub quote_verified: bool, + /// Indicates that the event log was verified against the quote. + /// + /// For RTMR3 (runtime measurements), both the digest and payload integrity are verified + /// by replaying the event log and comparing against the quote. For RTMR 0-2 (boot-time + /// measurements), only the digests are verified through replay comparison with the quote; + /// the payload content is not validated. dstack does not define semantics for RTMR 0-2 + /// event log payloads. pub event_log_verified: bool, pub os_image_hash_verified: bool, pub report_data: Option, From 029f167f924d5e2112fdf41dc5daea21d5aea85b Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 04:23:16 +0000 Subject: [PATCH 14/18] Add CertConfigV2 --- cert-client/src/lib.rs | 4 +- dstack-util/src/system_setup.rs | 11 +++-- gateway/src/main.rs | 2 + gateway/src/main_service/sync_client.rs | 2 + guest-agent/rpc/proto/agent_rpc.proto | 4 ++ guest-agent/src/rpc_service.rs | 14 ++++-- ra-tls/src/cert.rs | 60 ++++++++++++++++++++++--- 7 files changed, 78 insertions(+), 19 deletions(-) diff --git a/cert-client/src/lib.rs b/cert-client/src/lib.rs index 8328cd68..4e77762d 100644 --- a/cert-client/src/lib.rs +++ b/cert-client/src/lib.rs @@ -8,7 +8,7 @@ use dstack_types::{AppKeys, KeyProvider}; use ra_rpc::client::{RaClient, RaClientConfig}; use ra_tls::{ attestation::{QuoteContentType, VersionedAttestation}, - cert::{generate_ra_cert, CaCert, CertConfig, CertSigningRequestV2, Csr}, + cert::{generate_ra_cert, CaCert, CertConfigV2, CertSigningRequestV2, Csr}, rcgen::KeyPair, }; @@ -96,7 +96,7 @@ impl CertRequestClient { pub async fn request_cert( &self, key: &KeyPair, - config: CertConfig, + config: CertConfigV2, attestation_override: Option, ) -> Result> { let pubkey = key.public_key_der(); diff --git a/dstack-util/src/system_setup.rs b/dstack-util/src/system_setup.rs index 79133e14..1bcf849f 100644 --- a/dstack-util/src/system_setup.rs +++ b/dstack-util/src/system_setup.rs @@ -28,7 +28,7 @@ use luks2::{ LuksSegmentSize, }; use ra_rpc::client::{CertInfo, RaClient, RaClientConfig}; -use ra_tls::cert::generate_ra_cert; +use ra_tls::cert::{generate_ra_cert, CertConfigV2}; use rand::Rng as _; use scopeguard::defer; use serde::{Deserialize, Serialize}; @@ -48,10 +48,7 @@ use cmd_lib::run_fun as cmd; use dstack_gateway_rpc::{ gateway_client::GatewayClient, RegisterCvmRequest, RegisterCvmResponse, WireGuardPeer, }; -use ra_tls::{ - cert::CertConfig, - rcgen::{KeyPair, PKCS_ECDSA_P256_SHA256}, -}; +use ra_tls::rcgen::{KeyPair, PKCS_ECDSA_P256_SHA256}; use serde_human_bytes as hex_bytes; use serde_json::Value; @@ -388,13 +385,15 @@ impl<'a> GatewayContext<'a> { let sk = cmd!(wg genkey)?; let pk = cmd!(echo $sk | wg pubkey).or(Err(anyhow!("Failed to generate public key")))?; - let config = CertConfig { + let config = CertConfigV2 { org_name: None, subject: "dstack-guest-agent".to_string(), subject_alt_names: vec![], usage_server_auth: false, usage_client_auth: true, ext_quote: true, + not_before: None, + not_after: None, }; let cert_client = CertRequestClient::create( self.keys, diff --git a/gateway/src/main.rs b/gateway/src/main.rs index 5d86e84f..17ef2cf3 100644 --- a/gateway/src/main.rs +++ b/gateway/src/main.rs @@ -77,6 +77,8 @@ async fn maybe_gen_certs(config: &Config, tls_config: &TlsConfig) -> Result<()> usage_ra_tls: true, usage_server_auth: true, usage_client_auth: false, + not_before: None, + not_after: None, }) .await?; diff --git a/gateway/src/main_service/sync_client.rs b/gateway/src/main_service/sync_client.rs index 7feba2a0..be6985f7 100644 --- a/gateway/src/main_service/sync_client.rs +++ b/gateway/src/main_service/sync_client.rs @@ -90,6 +90,8 @@ pub(crate) async fn sync_task(proxy: Proxy) -> Result<()> { usage_ra_tls: false, usage_server_auth: false, usage_client_auth: true, + not_after: None, + not_before: None, }) .await .context("Failed to get sync-client keys")?; diff --git a/guest-agent/rpc/proto/agent_rpc.proto b/guest-agent/rpc/proto/agent_rpc.proto index f12b161d..86bec357 100644 --- a/guest-agent/rpc/proto/agent_rpc.proto +++ b/guest-agent/rpc/proto/agent_rpc.proto @@ -72,6 +72,10 @@ message GetTlsKeyArgs { bool usage_server_auth = 4; // Key usage client auth bool usage_client_auth = 5; + // Certificate validity start time as seconds since UNIX epoch + optional uint64 not_before = 6; + // Certificate validity end time as seconds since UNIX epoch + optional uint64 not_after = 7; } // The request to derive a key diff --git a/guest-agent/src/rpc_service.rs b/guest-agent/src/rpc_service.rs index c7a6b367..8e03efaa 100644 --- a/guest-agent/src/rpc_service.rs +++ b/guest-agent/src/rpc_service.rs @@ -29,7 +29,7 @@ use or_panic::ResultOrPanic; use ra_rpc::{Attestation, CallContext, RpcCall}; use ra_tls::{ attestation::{QuoteContentType, VersionedAttestation, DEFAULT_HASH_ALGORITHM}, - cert::CertConfig, + cert::CertConfigV2, kdf::{derive_ecdsa_key, derive_ecdsa_key_pair_from_bytes}, }; use rcgen::KeyPair; @@ -78,13 +78,15 @@ impl AppStateInner { .cert_client .request_cert( &key, - CertConfig { + CertConfigV2 { org_name: None, subject: "demo-cert".to_string(), subject_alt_names: vec![], usage_server_auth: false, usage_client_auth: true, ext_quote: true, + not_after: None, + not_before: None, }, attestation_override, ) @@ -233,13 +235,15 @@ impl DstackGuestRpc for InternalRpcHandler { .context("Failed to generate secure seed")?; let derived_key = derive_ecdsa_key_pair_from_bytes(&seed, &[]).context("Failed to derive key")?; - let config = CertConfig { + let config = CertConfigV2 { org_name: None, subject: request.subject, subject_alt_names: request.alt_names, usage_server_auth: request.usage_server_auth, usage_client_auth: request.usage_client_auth, ext_quote: request.usage_ra_tls, + not_after: request.not_after, + not_before: request.not_before, }; let attestation_override = self .state @@ -493,13 +497,15 @@ impl TappdRpc for InternalRpcHandlerV0 { }; let derived_key = derive_ecdsa_key_pair_from_bytes(seed, &[request.path.as_bytes()]) .context("Failed to derive key")?; - let config = CertConfig { + let config = CertConfigV2 { org_name: None, subject: request.subject, subject_alt_names: request.alt_names, usage_server_auth: request.usage_server_auth, usage_client_auth: request.usage_client_auth, ext_quote: request.usage_ra_tls, + not_before: None, + not_after: None, }; let attestation_override = self .state diff --git a/ra-tls/src/cert.rs b/ra-tls/src/cert.rs index 680a0291..27fb436f 100644 --- a/ra-tls/src/cert.rs +++ b/ra-tls/src/cert.rs @@ -4,7 +4,7 @@ //! Certificate creation functions. -use std::time::SystemTime; +use std::time::{SystemTime, UNIX_EPOCH}; use std::{path::Path, time::Duration}; use anyhow::{anyhow, bail, Context, Result}; @@ -99,6 +99,8 @@ impl CaCert { .maybe_attestation(attestation) .maybe_app_id(app_id) .special_usage(usage) + .maybe_not_before(cfg.not_before.map(unix_time_to_system_time)) + .maybe_not_after(cfg.not_after.map(unix_time_to_system_time)) .build(); self.sign(req).context("Failed to sign certificate") } @@ -121,6 +123,42 @@ pub struct CertConfig { pub ext_quote: bool, } +/// The configuration of the certificate with optional validity overrides. +#[derive(Encode, Decode, Clone, PartialEq)] +pub struct CertConfigV2 { + /// The organization name of the certificate. + pub org_name: Option, + /// The subject of the certificate. + pub subject: String, + /// The subject alternative names of the certificate. + pub subject_alt_names: Vec, + /// The purpose of the certificate. + pub usage_server_auth: bool, + /// The purpose of the certificate. + pub usage_client_auth: bool, + /// Whether the certificate is quoted. + pub ext_quote: bool, + /// The certificate validity start time as seconds since UNIX epoch. + pub not_before: Option, + /// The certificate validity end time as seconds since UNIX epoch. + pub not_after: Option, +} + +impl From for CertConfigV2 { + fn from(config: CertConfig) -> Self { + Self { + org_name: config.org_name, + subject: config.subject, + subject_alt_names: config.subject_alt_names, + usage_server_auth: config.usage_server_auth, + usage_client_auth: config.usage_client_auth, + ext_quote: config.ext_quote, + not_before: None, + not_after: None, + } + } +} + /// A certificate signing request. #[derive(Encode, Decode, Clone)] pub struct CertSigningRequestV1 { @@ -240,7 +278,7 @@ pub struct CertSigningRequestV2 { /// The public key of the certificate. pub pubkey: Vec, /// The certificate configuration. - pub config: CertConfig, + pub config: CertConfigV2, /// The attestation. pub attestation: VersionedAttestation, } @@ -251,7 +289,7 @@ impl TryFrom for CertSigningRequestV2 { Ok(Self { confirm: v0.confirm, pubkey: v0.pubkey, - config: v0.config, + config: v0.config.into(), attestation: Attestation::from_tdx_quote(v0.quote, &v0.event_log)?.into_versioned(), }) } @@ -381,6 +419,10 @@ fn add_ext(params: &mut CertificateParams, oid: &[u64], content: impl AsRef<[u8] .push(CustomExtension::from_oid_content(oid, content)); } +fn unix_time_to_system_time(secs: u64) -> SystemTime { + UNIX_EPOCH + Duration::from_secs(secs) +} + impl CertRequest<'_, KeyPair> { /// Create a self-signed certificate. pub fn self_signed(self) -> Result { @@ -624,13 +666,15 @@ mod tests { let csr = CertSigningRequestV2 { confirm: "please sign cert:".to_string(), pubkey: vec![1, 2, 3], - config: CertConfig { + config: CertConfigV2 { org_name: None, subject: "test.example.com".to_string(), subject_alt_names: vec![], usage_server_auth: true, usage_client_auth: false, ext_quote: false, + not_before: None, + not_after: None, }, attestation: Attestation { quote: AttestationQuote::DstackTdx(TdxQuote { @@ -646,7 +690,7 @@ mod tests { }; let actual = hex::encode(csr.encode()); - let expected = "44706c65617365207369676e20636572743a0c0102030040746573742e6578616d706c652e636f6d0001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + let expected = "44706c65617365207369676e20636572743a0c0102030040746573742e6578616d706c652e636f6d00010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; assert_eq!(actual, expected); } @@ -655,13 +699,15 @@ mod tests { let csr = CertSigningRequestV2 { confirm: "please sign cert:".to_string(), pubkey: vec![1, 2, 3], - config: CertConfig { + config: CertConfigV2 { org_name: None, subject: "test.example.com".to_string(), subject_alt_names: vec![], usage_server_auth: true, usage_client_auth: false, ext_quote: true, + not_before: None, + not_after: None, }, attestation: Attestation { quote: AttestationQuote::DstackTdx(TdxQuote { @@ -677,7 +723,7 @@ mod tests { }; let actual = hex::encode(csr.encode()); - let expected = "44706c65617365207369676e20636572743a0c0102030040746573742e6578616d706c652e636f6d000100010000040900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + let expected = "44706c65617365207369676e20636572743a0c0102030040746573742e6578616d706c652e636f6d0001000100000000040900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; assert_eq!(actual, expected); } } From 6d146c37ab8883c51dc040a069be34ad10ca6a4f Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 06:29:46 +0000 Subject: [PATCH 15/18] Update dcap-qvl to 0.3.10 --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d174aabd..a17f6027 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1879,9 +1879,9 @@ checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" [[package]] name = "dcap-qvl" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b2e21006fbf3e2cb1b2b17aaa72ecaa4039ccca69ff8d79c53076c0da1ac374" +checksum = "3696cfa3d2b8b26df6dadafa67dd1fa69376c1e38971c207984bc3a9f0621d05" dependencies = [ "anyhow", "asn1_der", diff --git a/Cargo.toml b/Cargo.toml index b7581960..0758da53 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -165,7 +165,7 @@ default-net = "0.22.0" # Cryptography/Security aes-gcm = "0.10.3" curve25519-dalek = "4.1.3" -dcap-qvl = "0.3.9" +dcap-qvl = "0.3.10" elliptic-curve = { version = "0.13.8", features = ["pkcs8"] } getrandom = "0.3.1" hkdf = "0.12.4" From 6ed539c4f74823ae89e053030cd641da06b426aa Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 07:29:47 +0000 Subject: [PATCH 16/18] Add back simulator --- sdk/simulator/.gitignore | 3 +++ sdk/simulator/app-compose.json | 1 + sdk/simulator/appkeys.json | 13 ++++++++++++ sdk/simulator/attestation.bin | Bin 0 -> 9504 bytes sdk/simulator/build.sh | 11 ++++++++++ sdk/simulator/dstack.toml | 36 +++++++++++++++++++++++++++++++++ sdk/simulator/sys-config.json | 12 +++++++++++ 7 files changed, 76 insertions(+) create mode 100644 sdk/simulator/.gitignore create mode 100644 sdk/simulator/app-compose.json create mode 100644 sdk/simulator/appkeys.json create mode 100644 sdk/simulator/attestation.bin create mode 100755 sdk/simulator/build.sh create mode 100644 sdk/simulator/dstack.toml create mode 100644 sdk/simulator/sys-config.json diff --git a/sdk/simulator/.gitignore b/sdk/simulator/.gitignore new file mode 100644 index 00000000..1acd17bd --- /dev/null +++ b/sdk/simulator/.gitignore @@ -0,0 +1,3 @@ +dstack-simulator +dstack-guest-agent +*.lock diff --git a/sdk/simulator/app-compose.json b/sdk/simulator/app-compose.json new file mode 100644 index 00000000..bcbba37d --- /dev/null +++ b/sdk/simulator/app-compose.json @@ -0,0 +1 @@ +{"manifest_version":2,"name":"guest-agent","runner":"docker-compose","docker_compose_file":"services:\n dstack-agent:\n image: ubuntu\n user: root\n network_mode: host\n volumes:\n - /:/host/\n - /var/run/tappd.sock:/var/run/tappd.sock\n - /var/run/dstack.sock:/var/run/dstack.sock\n entrypoint: |\n bash -c '\n apt-get update && apt-get install -y socat\n socat TCP-LISTEN:2000,fork UNIX-CONNECT:/var/run/tappd.sock &\n socat TCP-LISTEN:3000,fork UNIX-CONNECT:/var/run/dstack.sock &\n tail -f /dev/null\n '\n dstack-verifier:\n image: dstacktee/dstack-verifier:0.5.4\n ports:\n - \"8080:8080\"\n restart: unless-stopped","gateway_enabled":true,"public_logs":true,"public_sysinfo":true,"public_tcbinfo":true,"key_provider_id":"","allowed_envs":[],"no_instance_id":false,"secure_time":false,"key_provider":"kms","kms_enabled":true,"storage_fs":"ext4","pre_launch_script":"docker run --rm --privileged --pid=host --net=host -v /:/host \\\n -e SSH_GITHUB_USER=\"kvinwang\" \\\n kvin/dstack-openssh-installer:latest"} \ No newline at end of file diff --git a/sdk/simulator/appkeys.json b/sdk/simulator/appkeys.json new file mode 100644 index 00000000..1e67f019 --- /dev/null +++ b/sdk/simulator/appkeys.json @@ -0,0 +1,13 @@ +{ + "disk_crypt_key": "2cbc10ccbed084b91af2ceff8400e6082402367f18a2c6248bac17d2fc951607", + "env_crypt_key": "4f3cf0a19a0444674c8e51222afd395b8df9fad2ba3cd7956f640a4b3c046db6", + "k256_key": "d6e88992cdeeee35fe70b5db61ab66cdb191fb9b6ec9313757ef162dd7214d5d", + "k256_signature": "9e618603e72d01fedb82deff6daf2d62a572becf0059eec3f89c1ab40e1f2e594d2a283f843f34e8f39e4cc49a612496ce67223a12ac923f8efe330346dfc6c500", + "gateway_app_id": "any", + "ca_cert": "-----BEGIN CERTIFICATE-----\nMIIBmzCCAUCgAwIBAgIUU7801+krCs2OpIdne3t6OWrJ2fMwCgYIKoZIzj0EAwIw\nKTEPMA0GA1UECgwGRHN0YWNrMRYwFAYDVQQDDA1Ec3RhY2sgS01TIENBMB4XDTc1\nMDEwMTAwMDAwMFoXDTM1MTIxOTAyNTEzOVowKTEPMA0GA1UECgwGRHN0YWNrMRYw\nFAYDVQQDDA1Ec3RhY2sgS01TIENBMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE\nGbJFfdm4qmRG2YDxNv/3gS7NbHd0DusOKLENVsDAACiltuWdzqMH1YO9H3B2npwR\nbfK8+xdYqV2GE+feHISCwKNGMEQwDwYDVR0PAQH/BAUDAweGADAdBgNVHQ4EFgQU\nevjJ+VZPvDxHJ2ejjeIaUYMMcEcwEgYDVR0TAQH/BAgwBgEB/wIBATAKBggqhkjO\nPQQDAgNJADBGAiEAvAYUOGbU5QC23zzQtJqm7/hGzVK5SlI0P7yGDii+/4ACIQCN\nbKkagb0uncr6sUKlhKrpHhID+WWTvqJj0TrkvbVdCg==\n-----END CERTIFICATE-----\n", + "key_provider": { + "None": { + "key": "-----BEGIN PRIVATE KEY-----\nMIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg1PYCFKYfDmUfv5fk\nstppasf4mPGqnz0fEoLEnGx8CnKhRANCAAQZskV92biqZEbZgPE2//eBLs1sd3QO\n6w4osQ1WwMAAKKW25Z3OowfVg70fcHaenBFt8rz7F1ipXYYT594chILA\n-----END PRIVATE KEY-----\n" + } + } +} \ No newline at end of file diff --git a/sdk/simulator/attestation.bin b/sdk/simulator/attestation.bin new file mode 100644 index 0000000000000000000000000000000000000000..2c4aef253944d9ad266d0fda37a558fd559dd74e GIT binary patch literal 9504 zcmds6cYG7|`#*+IQid$qPzthI+RIKV>n?X$?Iq1lDo~Qk<+76|Y0?IeBA^Tvl%fSe z3&>QEEkm@jL1Zdha4@8R44H!Z`&`;WE%;URzu)E6s*jP^$UeAf%iXWDgR|dlaW_7axGkqfEp`ax$F3DSdbD|GioItiyKj8ix6#qV zKX{{SOLJ_z2wZ6L#u^6-UG28^QVkpS7LU1+PzZJNnV7FY31${@}rZ{qve2cInRgy*~c+%zt&j0dSg< zg33tuul7v5$}_`-@6H<9XmPeVcf#>YQ~Q;UxR`a>(#a}%Fk(_*^Y-+V)U(G=EEb+V zv5hzP!M%|ynjD{Uf6i-He>kdmaoO<9n;)J(GvxX6qkp^I@nWlq+tP*|ZTiyY>+7za z{$Mok+W3)Yu66&Zt82sfbDs}P!OQQwz3J!7My-3^pLMnoqy$_2r173Te<$+Pd;LZQN~gAy zsd$SXNUk(!F{&+jedFdcrXzS_6d@!m zLXH$E+z5@*DkC5eGK06!E;EJ|QkC2!K{XmUOdDiHPP;5GO8Q`01LH+9H)5&UA0SwG z4Fj%952H0*8Z14zEax zA-XE9NJ7F^IcSpIthu{IuCVH*}OEGCt>G^$QakHmyvn}?&erj zF4UTB^o2>UfFHG|dBTP$=0?MAmP4lS66#2i8mJ(_II3K+T@>IcFbtCb|I#se44i}( zb_4jBG=LbqCb2zkgKa#@i|9zdfhERr)rgVI;d(VOYt?24D$~R)Zj=s41+-a_XSK?; zFexVlImKcpDf1NCDJiMUB$EYrfl#Vqnsl=<>Ze7B7QPQXWs^JKt=I2H)XZ_oH6j#oh;Syx(J&Ijkc9nuL?h2c5k(AP z{Nnb=Bs!1_3L8(a!Bs3oqmv-6K#ZmpJ`jB}iR;lKqQP-off)h@k50qRl?Or*j!|pD zL`7k+U&?r?faUbKWNA2zVj5fmG7~k#5e-U*!FfNTBTNinPoUEZl=Z{~Cav9%GXOIl zqq7)f^e+%lfPerG)YS-O0ID_mlL-=lfIfji0sv|lYMUluOnBl#Eri+(7-K>mUnESx z{sIAGOsK=an2gl|_>&3v6A%~%8~}iV{qUL4=~2)d5%A2Xcgdv`|Y;ITB0IvmaF?4TjJvC4!+c z7UIGYk)zli;YW+Gn3D8M=vjz1f zV-lLfEI8^jY6(xV!tO?LOjeVG!cc+3ArNut95Fp`AS=C$@K;sH+_-^26^S;2#$q`JjudoH5Ja3JC+Vx*(`eA| zfKbpeSWL?RQe}8dX#^F5lE&a7HHNSWH(o?wh8Uqot$-FM;(w;qv}#sWKYIFJU1KT> z*!Qn`bxo^gJ=UwGo){jp$eBQoyI~2Uu_&tVLEeyovZhCs1P zXDJF{rW~I^%abU?s_bG%t}5+6?(ynP78HLLrAQV#XWws zAXe9|7|V)1n%cmW_i$*dn(~xU8&~t#fLasuOw5BJ<>?31nxJQ5zB=`452!c~Ot; zVfs(=wJKflL|C(`t79Txbs&eew8Dv~O__X+N1LV-OI$jPR0RUDoWfjfPS_-KWLjK8 zna(WcfeBt8qZ199r-Z#Bd4LF~nFIVlX4ouHi#4VqxfY4$8Dwg^RwE>%1zNMUK=Hr& zZLROL|K)Z(!<-C3B^6)wXjOjfrC+u!>2v1Qq)D#D$=su#58gh0^J#KqlPe{YUw^ry z@sr7|7hcNsePSAqefSx)xQX6gq_^6<6jZNTg^%CfH2ubg^{P_acN-%{Q~*sYsa}B~ z76Zb|-rl(^d13dZ<;~__i3si-Ij86_`ghliwrA$;Fb|w@9_e?ZbCc0Bw&B(M-;7=8 zY0yu7rO`g->Z?EVS?8^0tuFwo_#=TfB&jwiRMF+goi(Oq6Pt~1hU~WXdErCn8>MI0 zPPbIzPHODM7MJ(U-_dhrsU5rOxML<5xPFT_t}Z__n`yUGiRslnSQ9{(q_aWu;64}1ekA@#b+OTo zPfnk0-n_wcLw0_C^wYy;?CN``m+iO*mDLsBppN)#2(Jy0v`9UPTrK=)-EP6l_jn32 zOwT#wzMeFDP35(%O>c~tn=w{y6y>cSKghV`qsk87_lBl@xvIm_tbuRi^FKXtpgBzi z>gsQYY>)JBzt>;y+K$SZOMBd%SAq^IT{iZ(k9A2?lB=$`^Fh|A4d4*Vle9}JsQ19(KR=jiH0Zy&;3xCqDB}PDZ{xJjFK?>WN5l6)70_e)Ler>V>JCtDf)ddwz9irJ(IH=TDjxI@s-Le2igz~9ldsS9JcXnvG8W;yQ_{w$~U`ajjSbuDM!FbB^3&7 zpKCuDDz;fqO0M4sJwNlzXXoDOE&lo9CHv_uJFb85$|lc3qL=YkVpXeN$G>R@&4>Ed zYyVySIAaI2tt|0T^VYBWreT9NQ0PvdHp+EfvGrTu+kEuy^qmbGwt_q$6+)07ynDgU zF@IG3aM6U(qTK?=rFJh&gqa&TZPC@?$A9!Zo^T1p>dM0MXhF<^3;8H^>Cp zreockk}Y4DW}fNq_rjum%k&?&T%HtOG2!63w2H3kOXo(F51q?RgO1fr2B2Z$s`;U% zCY{y58hgSf@WeB*BLSG}Jip}hp}AWZUs`);L_?(cV!APU>?XE->p|-$UdOLJ7OgyV ze{k6lQ!=d^zjo}@{!sV&brEZ-v$-0%1`&p;UQGa8`opq9Kvv*YD?}%BA;7yI6dnW$ zf){w4-nb34d-M8Hm+tb^ODhMR{g`@gZo@`xpi?hy`S}JtgIM{^-A*augI#LQBVoh0 zs5;{k|CgWUZMoILyKBt7WA1vCQe_+K`3Uz#p$6K#Nx^G{xP)6s*?*Z{;r zjhTawm7iF}PE&O?Ehb*c>wa$4&#%+*?)Cbe7(0hmR#sAU_~?+^JkRf}jY~v(QY5op zFM!G=R<%;QNvc&&tXPSnXAsg_)SI5`Kiww8hqxH~3fG(3{qdc+&*|NPig!Z^j6B`3s9uTz*$1WPjw&?%By#TNf9O zXvveuB7;x$)4jZL$At#ABR%*2a$7%SRHy7^caGibJM8d@%~z>a(b#uT1TS($s@$fd zLQxk25WH;3%F_>OD1 z{cFRvc1kM!K*{YM$q}gh339bmq%h>C>?tu<#F?TiBmr2MX#xNysp8%{LsG8h2A%D> ztq<%ed81k9C94iDYihk!q$)6NwN+4pTi!AKbc?ivyUG{7X-&N%}f8HC0o6l}(8|>EYh_GlN4jXb8lYhB+KU4FP*fa2Aw}+C;HoxRKJfnBYy4>EsDdfC{ z!y2tw)aLVWywUmyIb~3Ji<|ZD+#2`ek1K}1)p@ALe5bPm5AJmbhM)dzlxAI0nXqKsyAZT*#J4lUqxb*bWWz&s_p;9Ryw1&sE&TTj zGDFvve|Dj~+r|t+J7&<5zVCiiGI`M4yUY8&{A&K{-L5f5ZZ5b1d#GX%41uT%jF{{O zBUAlWAC;Pw>hXnB(^Fk`aLa}HVlK?)!U9;xv5Evd5lM0FFb8G>il`O^Y=VU_m&bu& zn8y}_-(vVeyIpJ(S*-#dPhb~|UD zz|Ij_`8EN?7u!k7%7uk?zKCnHvmHWMC=gR(lFb&_9UL3!u(5@FkyXsMiv(;gCA3p) zQp6K+NH}%$L!WHaJS!DUtM6R#o#oeoz3d$h6d$~(tn79F{V&%awSS&7X<>Xz!TR!p z^UG%9t^2j?wzE$dOx%p?9pR>N$D>^97+tlPm3*P=a?9N&_^~LzV|?)Xi@$ZdzVEb& zw{(1E%ilt4uH0XDO_ev{%E2}}HJjn;TnkZFdolEc)cao<^mK{vxAFYvuzbqs`hOfG zbWoB!;jGL|K+A~Oi@a7_h8>h4(vuMg(OGPe zH|*45>8Z3eLdC4bkFizL|ALjnum(vo0SD3+q^~_ddZe_Ckj+P zQ>llhW(`fH1Z*3{gILHF z+DV6lg2guR(A4yysbsJaz!dr;K%@x#@1uNyP;q|P6{i4&Eo2MDB7vAK;(@gS%2${l zr9xpaxFj?XF-9fC1W8xEGvK8Jyc$**FgOco(DrC540b!L-Y^Ad{z6|q9V}!>%ogTf ze>4o5E$K|`OYj9EL2n_QA8g*s0Vm)S4h=i4Au8V&urrzuO^rq! TR$vvt2nQ*$mfq~qsiXf7s(l22 literal 0 HcmV?d00001 diff --git a/sdk/simulator/build.sh b/sdk/simulator/build.sh new file mode 100755 index 00000000..e60dcfa9 --- /dev/null +++ b/sdk/simulator/build.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +# SPDX-FileCopyrightText: © 2025 Phala Network +# +# SPDX-License-Identifier: BUSL-1.1 + +cd $(dirname $0) +cargo build --release -p dstack-guest-agent +cp ../../target/release/dstack-guest-agent . +ln -sf dstack-guest-agent dstack-simulator + diff --git a/sdk/simulator/dstack.toml b/sdk/simulator/dstack.toml new file mode 100644 index 00000000..abd9e43d --- /dev/null +++ b/sdk/simulator/dstack.toml @@ -0,0 +1,36 @@ +# SPDX-FileCopyrightText: © 2025 Phala Network +# +# SPDX-License-Identifier: BUSL-1.1 + +[default] +workers = 8 +max_blocking = 64 +ident = "dstack Simulator" +temp_dir = "/tmp" +keep_alive = 10 +log_level = "debug" + +[default.core] +keys_file = "appkeys.json" +compose_file = "app-compose.json" +sys_config_file = "sys-config.json" + +[default.core.simulator] +enabled = true +attestation_file = "attestation.bin" + +[internal-v0] +address = "unix:./tappd.sock" +reuse = true + +[internal] +address = "unix:./dstack.sock" +reuse = true + +[external] +address = "unix:./external.sock" +reuse = true + +[guest-api] +address = "unix:./guest.sock" +reuse = true diff --git a/sdk/simulator/sys-config.json b/sdk/simulator/sys-config.json new file mode 100644 index 00000000..1b2d5b48 --- /dev/null +++ b/sdk/simulator/sys-config.json @@ -0,0 +1,12 @@ +{ + "kms_urls": [ + "https://kms.1022.dstack.org:12001" + ], + "gateway_urls": [ + "https://tproxy.1022.dstack.org:12002" + ], + "pccs_url": "", + "docker_registry": "", + "host_api_url": "vsock://2:12000/api", + "vm_config": "{\"os_image_hash\":\"64f0d1545cd510a8dfed7ad609d105b5d41f0cb2afcfdda8867ede00c88add7a\",\"cpu_count\":1,\"memory_size\":2147483648}" +} \ No newline at end of file From 90dcdd0262327e12d3ce356c53ef65079f072077 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 07:32:38 +0000 Subject: [PATCH 17/18] Fix unit test --- ra-tls/src/cert.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ra-tls/src/cert.rs b/ra-tls/src/cert.rs index cc416e2b..dffa4757 100644 --- a/ra-tls/src/cert.rs +++ b/ra-tls/src/cert.rs @@ -690,7 +690,7 @@ mod tests { }; let actual = hex::encode(csr.encode()); - let expected = "44706c65617365207369676e20636572743a0c0102030040746573742e6578616d706c652e636f6d00010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + let expected = "44706c65617365207369676e20636572743a0c0102030040746573742e6578616d706c652e636f6d0001000000000000040900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; assert_eq!(actual, expected); } From 590b8f3186f4671398d5307f091e6b8c3a6bd7a0 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Wed, 21 Jan 2026 07:34:04 +0000 Subject: [PATCH 18/18] Update console_v1.html --- vmm/src/console_v1.html | 1549 ++++++++++++++++++++------------------- 1 file changed, 775 insertions(+), 774 deletions(-) diff --git a/vmm/src/console_v1.html b/vmm/src/console_v1.html index de869b38..54ce7d57 100644 --- a/vmm/src/console_v1.html +++ b/vmm/src/console_v1.html @@ -2834,6 +2834,7 @@

Derive VM

}; if (vmForm.value.key_provider !== undefined) { appCompose.key_provider = vmForm.value.key_provider; + // For backward compatibility if (vmForm.value.key_provider === 'kms') { appCompose.kms_enabled = true; } @@ -3170,7 +3171,7 @@

Derive VM

attachAllGpus: false, encryptedEnvs: [], // Clear environment variables ports: [], // Clear port mappings - storage_fs: ((_g = theVm.appCompose) === null || _g === void 0 ? void 0 : _g.storage_fs) || 'ext4', + storage_fs: ((_g = theVm.appCompose) === null || _g === void 0 ? void 0 : _g.storage_fs) || 'zfs', app_id: config.app_id || '', kms_urls: config.kms_urls || [], key_provider: getKeyProvider(theVm), @@ -14167,796 +14168,796 @@

Derive VM

}, map: {"@protobufjs/aspromise":"node_modules/@protobufjs/aspromise/index.js","@protobufjs/base64":"node_modules/@protobufjs/base64/index.js","@protobufjs/eventemitter":"node_modules/@protobufjs/eventemitter/index.js","@protobufjs/float":"node_modules/@protobufjs/float/index.js","@protobufjs/inquire":"node_modules/@protobufjs/inquire/index.js","@protobufjs/utf8":"node_modules/@protobufjs/utf8/index.js","@protobufjs/pool":"node_modules/@protobufjs/pool/index.js","./longbits":"node_modules/protobufjs/src/util/longbits.js"} }, 'node_modules/@protobufjs/aspromise/index.js': { factory: function(module, exports, require) { -"use strict"; -module.exports = asPromise; - -/** - * Callback as used by {@link util.asPromise}. - * @typedef asPromiseCallback - * @type {function} - * @param {Error|null} error Error, if any - * @param {...*} params Additional arguments - * @returns {undefined} - */ - -/** - * Returns a promise from a node-style callback function. - * @memberof util - * @param {asPromiseCallback} fn Function to call - * @param {*} ctx Function context - * @param {...*} params Function arguments - * @returns {Promise<*>} Promisified function - */ -function asPromise(fn, ctx/*, varargs */) { - var params = new Array(arguments.length - 1), - offset = 0, - index = 2, - pending = true; - while (index < arguments.length) - params[offset++] = arguments[index++]; - return new Promise(function executor(resolve, reject) { - params[offset] = function callback(err/*, varargs */) { - if (pending) { - pending = false; - if (err) - reject(err); - else { - var params = new Array(arguments.length - 1), - offset = 0; - while (offset < params.length) - params[offset++] = arguments[offset]; - resolve.apply(null, params); - } - } - }; - try { - fn.apply(ctx || null, params); - } catch (err) { - if (pending) { - pending = false; - reject(err); - } - } - }); -} +"use strict"; +module.exports = asPromise; + +/** + * Callback as used by {@link util.asPromise}. + * @typedef asPromiseCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {...*} params Additional arguments + * @returns {undefined} + */ + +/** + * Returns a promise from a node-style callback function. + * @memberof util + * @param {asPromiseCallback} fn Function to call + * @param {*} ctx Function context + * @param {...*} params Function arguments + * @returns {Promise<*>} Promisified function + */ +function asPromise(fn, ctx/*, varargs */) { + var params = new Array(arguments.length - 1), + offset = 0, + index = 2, + pending = true; + while (index < arguments.length) + params[offset++] = arguments[index++]; + return new Promise(function executor(resolve, reject) { + params[offset] = function callback(err/*, varargs */) { + if (pending) { + pending = false; + if (err) + reject(err); + else { + var params = new Array(arguments.length - 1), + offset = 0; + while (offset < params.length) + params[offset++] = arguments[offset]; + resolve.apply(null, params); + } + } + }; + try { + fn.apply(ctx || null, params); + } catch (err) { + if (pending) { + pending = false; + reject(err); + } + } + }); +} }, map: {} }, 'node_modules/@protobufjs/base64/index.js': { factory: function(module, exports, require) { -"use strict"; - -/** - * A minimal base64 implementation for number arrays. - * @memberof util - * @namespace - */ -var base64 = exports; - -/** - * Calculates the byte length of a base64 encoded string. - * @param {string} string Base64 encoded string - * @returns {number} Byte length - */ -base64.length = function length(string) { - var p = string.length; - if (!p) - return 0; - var n = 0; - while (--p % 4 > 1 && string.charAt(p) === "=") - ++n; - return Math.ceil(string.length * 3) / 4 - n; -}; - -// Base64 encoding table -var b64 = new Array(64); - -// Base64 decoding table -var s64 = new Array(123); - -// 65..90, 97..122, 48..57, 43, 47 -for (var i = 0; i < 64;) - s64[b64[i] = i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i - 59 | 43] = i++; - -/** - * Encodes a buffer to a base64 encoded string. - * @param {Uint8Array} buffer Source buffer - * @param {number} start Source start - * @param {number} end Source end - * @returns {string} Base64 encoded string - */ -base64.encode = function encode(buffer, start, end) { - var parts = null, - chunk = []; - var i = 0, // output index - j = 0, // goto index - t; // temporary - while (start < end) { - var b = buffer[start++]; - switch (j) { - case 0: - chunk[i++] = b64[b >> 2]; - t = (b & 3) << 4; - j = 1; - break; - case 1: - chunk[i++] = b64[t | b >> 4]; - t = (b & 15) << 2; - j = 2; - break; - case 2: - chunk[i++] = b64[t | b >> 6]; - chunk[i++] = b64[b & 63]; - j = 0; - break; - } - if (i > 8191) { - (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk)); - i = 0; - } - } - if (j) { - chunk[i++] = b64[t]; - chunk[i++] = 61; - if (j === 1) - chunk[i++] = 61; - } - if (parts) { - if (i) - parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))); - return parts.join(""); - } - return String.fromCharCode.apply(String, chunk.slice(0, i)); -}; - -var invalidEncoding = "invalid encoding"; - -/** - * Decodes a base64 encoded string to a buffer. - * @param {string} string Source string - * @param {Uint8Array} buffer Destination buffer - * @param {number} offset Destination offset - * @returns {number} Number of bytes written - * @throws {Error} If encoding is invalid - */ -base64.decode = function decode(string, buffer, offset) { - var start = offset; - var j = 0, // goto index - t; // temporary - for (var i = 0; i < string.length;) { - var c = string.charCodeAt(i++); - if (c === 61 && j > 1) - break; - if ((c = s64[c]) === undefined) - throw Error(invalidEncoding); - switch (j) { - case 0: - t = c; - j = 1; - break; - case 1: - buffer[offset++] = t << 2 | (c & 48) >> 4; - t = c; - j = 2; - break; - case 2: - buffer[offset++] = (t & 15) << 4 | (c & 60) >> 2; - t = c; - j = 3; - break; - case 3: - buffer[offset++] = (t & 3) << 6 | c; - j = 0; - break; - } - } - if (j === 1) - throw Error(invalidEncoding); - return offset - start; -}; - -/** - * Tests if the specified string appears to be base64 encoded. - * @param {string} string String to test - * @returns {boolean} `true` if probably base64 encoded, otherwise false - */ -base64.test = function test(string) { - return /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(string); -}; +"use strict"; + +/** + * A minimal base64 implementation for number arrays. + * @memberof util + * @namespace + */ +var base64 = exports; + +/** + * Calculates the byte length of a base64 encoded string. + * @param {string} string Base64 encoded string + * @returns {number} Byte length + */ +base64.length = function length(string) { + var p = string.length; + if (!p) + return 0; + var n = 0; + while (--p % 4 > 1 && string.charAt(p) === "=") + ++n; + return Math.ceil(string.length * 3) / 4 - n; +}; + +// Base64 encoding table +var b64 = new Array(64); + +// Base64 decoding table +var s64 = new Array(123); + +// 65..90, 97..122, 48..57, 43, 47 +for (var i = 0; i < 64;) + s64[b64[i] = i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i - 59 | 43] = i++; + +/** + * Encodes a buffer to a base64 encoded string. + * @param {Uint8Array} buffer Source buffer + * @param {number} start Source start + * @param {number} end Source end + * @returns {string} Base64 encoded string + */ +base64.encode = function encode(buffer, start, end) { + var parts = null, + chunk = []; + var i = 0, // output index + j = 0, // goto index + t; // temporary + while (start < end) { + var b = buffer[start++]; + switch (j) { + case 0: + chunk[i++] = b64[b >> 2]; + t = (b & 3) << 4; + j = 1; + break; + case 1: + chunk[i++] = b64[t | b >> 4]; + t = (b & 15) << 2; + j = 2; + break; + case 2: + chunk[i++] = b64[t | b >> 6]; + chunk[i++] = b64[b & 63]; + j = 0; + break; + } + if (i > 8191) { + (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk)); + i = 0; + } + } + if (j) { + chunk[i++] = b64[t]; + chunk[i++] = 61; + if (j === 1) + chunk[i++] = 61; + } + if (parts) { + if (i) + parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))); + return parts.join(""); + } + return String.fromCharCode.apply(String, chunk.slice(0, i)); +}; + +var invalidEncoding = "invalid encoding"; + +/** + * Decodes a base64 encoded string to a buffer. + * @param {string} string Source string + * @param {Uint8Array} buffer Destination buffer + * @param {number} offset Destination offset + * @returns {number} Number of bytes written + * @throws {Error} If encoding is invalid + */ +base64.decode = function decode(string, buffer, offset) { + var start = offset; + var j = 0, // goto index + t; // temporary + for (var i = 0; i < string.length;) { + var c = string.charCodeAt(i++); + if (c === 61 && j > 1) + break; + if ((c = s64[c]) === undefined) + throw Error(invalidEncoding); + switch (j) { + case 0: + t = c; + j = 1; + break; + case 1: + buffer[offset++] = t << 2 | (c & 48) >> 4; + t = c; + j = 2; + break; + case 2: + buffer[offset++] = (t & 15) << 4 | (c & 60) >> 2; + t = c; + j = 3; + break; + case 3: + buffer[offset++] = (t & 3) << 6 | c; + j = 0; + break; + } + } + if (j === 1) + throw Error(invalidEncoding); + return offset - start; +}; + +/** + * Tests if the specified string appears to be base64 encoded. + * @param {string} string String to test + * @returns {boolean} `true` if probably base64 encoded, otherwise false + */ +base64.test = function test(string) { + return /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(string); +}; }, map: {} }, 'node_modules/@protobufjs/eventemitter/index.js': { factory: function(module, exports, require) { -"use strict"; -module.exports = EventEmitter; - -/** - * Constructs a new event emitter instance. - * @classdesc A minimal event emitter. - * @memberof util - * @constructor - */ -function EventEmitter() { - - /** - * Registered listeners. - * @type {Object.} - * @private - */ - this._listeners = {}; -} - -/** - * Registers an event listener. - * @param {string} evt Event name - * @param {function} fn Listener - * @param {*} [ctx] Listener context - * @returns {util.EventEmitter} `this` - */ -EventEmitter.prototype.on = function on(evt, fn, ctx) { - (this._listeners[evt] || (this._listeners[evt] = [])).push({ - fn : fn, - ctx : ctx || this - }); - return this; -}; - -/** - * Removes an event listener or any matching listeners if arguments are omitted. - * @param {string} [evt] Event name. Removes all listeners if omitted. - * @param {function} [fn] Listener to remove. Removes all listeners of `evt` if omitted. - * @returns {util.EventEmitter} `this` - */ -EventEmitter.prototype.off = function off(evt, fn) { - if (evt === undefined) - this._listeners = {}; - else { - if (fn === undefined) - this._listeners[evt] = []; - else { - var listeners = this._listeners[evt]; - for (var i = 0; i < listeners.length;) - if (listeners[i].fn === fn) - listeners.splice(i, 1); - else - ++i; - } - } - return this; -}; - -/** - * Emits an event by calling its listeners with the specified arguments. - * @param {string} evt Event name - * @param {...*} args Arguments - * @returns {util.EventEmitter} `this` - */ -EventEmitter.prototype.emit = function emit(evt) { - var listeners = this._listeners[evt]; - if (listeners) { - var args = [], - i = 1; - for (; i < arguments.length;) - args.push(arguments[i++]); - for (i = 0; i < listeners.length;) - listeners[i].fn.apply(listeners[i++].ctx, args); - } - return this; -}; +"use strict"; +module.exports = EventEmitter; + +/** + * Constructs a new event emitter instance. + * @classdesc A minimal event emitter. + * @memberof util + * @constructor + */ +function EventEmitter() { + + /** + * Registered listeners. + * @type {Object.} + * @private + */ + this._listeners = {}; +} + +/** + * Registers an event listener. + * @param {string} evt Event name + * @param {function} fn Listener + * @param {*} [ctx] Listener context + * @returns {util.EventEmitter} `this` + */ +EventEmitter.prototype.on = function on(evt, fn, ctx) { + (this._listeners[evt] || (this._listeners[evt] = [])).push({ + fn : fn, + ctx : ctx || this + }); + return this; +}; + +/** + * Removes an event listener or any matching listeners if arguments are omitted. + * @param {string} [evt] Event name. Removes all listeners if omitted. + * @param {function} [fn] Listener to remove. Removes all listeners of `evt` if omitted. + * @returns {util.EventEmitter} `this` + */ +EventEmitter.prototype.off = function off(evt, fn) { + if (evt === undefined) + this._listeners = {}; + else { + if (fn === undefined) + this._listeners[evt] = []; + else { + var listeners = this._listeners[evt]; + for (var i = 0; i < listeners.length;) + if (listeners[i].fn === fn) + listeners.splice(i, 1); + else + ++i; + } + } + return this; +}; + +/** + * Emits an event by calling its listeners with the specified arguments. + * @param {string} evt Event name + * @param {...*} args Arguments + * @returns {util.EventEmitter} `this` + */ +EventEmitter.prototype.emit = function emit(evt) { + var listeners = this._listeners[evt]; + if (listeners) { + var args = [], + i = 1; + for (; i < arguments.length;) + args.push(arguments[i++]); + for (i = 0; i < listeners.length;) + listeners[i].fn.apply(listeners[i++].ctx, args); + } + return this; +}; }, map: {} }, 'node_modules/@protobufjs/float/index.js': { factory: function(module, exports, require) { -"use strict"; - -module.exports = factory(factory); - -/** - * Reads / writes floats / doubles from / to buffers. - * @name util.float - * @namespace - */ - -/** - * Writes a 32 bit float to a buffer using little endian byte order. - * @name util.float.writeFloatLE - * @function - * @param {number} val Value to write - * @param {Uint8Array} buf Target buffer - * @param {number} pos Target buffer offset - * @returns {undefined} - */ - -/** - * Writes a 32 bit float to a buffer using big endian byte order. - * @name util.float.writeFloatBE - * @function - * @param {number} val Value to write - * @param {Uint8Array} buf Target buffer - * @param {number} pos Target buffer offset - * @returns {undefined} - */ - -/** - * Reads a 32 bit float from a buffer using little endian byte order. - * @name util.float.readFloatLE - * @function - * @param {Uint8Array} buf Source buffer - * @param {number} pos Source buffer offset - * @returns {number} Value read - */ - -/** - * Reads a 32 bit float from a buffer using big endian byte order. - * @name util.float.readFloatBE - * @function - * @param {Uint8Array} buf Source buffer - * @param {number} pos Source buffer offset - * @returns {number} Value read - */ - -/** - * Writes a 64 bit double to a buffer using little endian byte order. - * @name util.float.writeDoubleLE - * @function - * @param {number} val Value to write - * @param {Uint8Array} buf Target buffer - * @param {number} pos Target buffer offset - * @returns {undefined} - */ - -/** - * Writes a 64 bit double to a buffer using big endian byte order. - * @name util.float.writeDoubleBE - * @function - * @param {number} val Value to write - * @param {Uint8Array} buf Target buffer - * @param {number} pos Target buffer offset - * @returns {undefined} - */ - -/** - * Reads a 64 bit double from a buffer using little endian byte order. - * @name util.float.readDoubleLE - * @function - * @param {Uint8Array} buf Source buffer - * @param {number} pos Source buffer offset - * @returns {number} Value read - */ - -/** - * Reads a 64 bit double from a buffer using big endian byte order. - * @name util.float.readDoubleBE - * @function - * @param {Uint8Array} buf Source buffer - * @param {number} pos Source buffer offset - * @returns {number} Value read - */ - -// Factory function for the purpose of node-based testing in modified global environments -function factory(exports) { - - // float: typed array - if (typeof Float32Array !== "undefined") (function() { - - var f32 = new Float32Array([ -0 ]), - f8b = new Uint8Array(f32.buffer), - le = f8b[3] === 128; - - function writeFloat_f32_cpy(val, buf, pos) { - f32[0] = val; - buf[pos ] = f8b[0]; - buf[pos + 1] = f8b[1]; - buf[pos + 2] = f8b[2]; - buf[pos + 3] = f8b[3]; - } - - function writeFloat_f32_rev(val, buf, pos) { - f32[0] = val; - buf[pos ] = f8b[3]; - buf[pos + 1] = f8b[2]; - buf[pos + 2] = f8b[1]; - buf[pos + 3] = f8b[0]; - } - - /* istanbul ignore next */ - exports.writeFloatLE = le ? writeFloat_f32_cpy : writeFloat_f32_rev; - /* istanbul ignore next */ - exports.writeFloatBE = le ? writeFloat_f32_rev : writeFloat_f32_cpy; - - function readFloat_f32_cpy(buf, pos) { - f8b[0] = buf[pos ]; - f8b[1] = buf[pos + 1]; - f8b[2] = buf[pos + 2]; - f8b[3] = buf[pos + 3]; - return f32[0]; - } - - function readFloat_f32_rev(buf, pos) { - f8b[3] = buf[pos ]; - f8b[2] = buf[pos + 1]; - f8b[1] = buf[pos + 2]; - f8b[0] = buf[pos + 3]; - return f32[0]; - } - - /* istanbul ignore next */ - exports.readFloatLE = le ? readFloat_f32_cpy : readFloat_f32_rev; - /* istanbul ignore next */ - exports.readFloatBE = le ? readFloat_f32_rev : readFloat_f32_cpy; - - // float: ieee754 - })(); else (function() { - - function writeFloat_ieee754(writeUint, val, buf, pos) { - var sign = val < 0 ? 1 : 0; - if (sign) - val = -val; - if (val === 0) - writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos); - else if (isNaN(val)) - writeUint(2143289344, buf, pos); - else if (val > 3.4028234663852886e+38) // +-Infinity - writeUint((sign << 31 | 2139095040) >>> 0, buf, pos); - else if (val < 1.1754943508222875e-38) // denormal - writeUint((sign << 31 | Math.round(val / 1.401298464324817e-45)) >>> 0, buf, pos); - else { - var exponent = Math.floor(Math.log(val) / Math.LN2), - mantissa = Math.round(val * Math.pow(2, -exponent) * 8388608) & 8388607; - writeUint((sign << 31 | exponent + 127 << 23 | mantissa) >>> 0, buf, pos); - } - } - - exports.writeFloatLE = writeFloat_ieee754.bind(null, writeUintLE); - exports.writeFloatBE = writeFloat_ieee754.bind(null, writeUintBE); - - function readFloat_ieee754(readUint, buf, pos) { - var uint = readUint(buf, pos), - sign = (uint >> 31) * 2 + 1, - exponent = uint >>> 23 & 255, - mantissa = uint & 8388607; - return exponent === 255 - ? mantissa - ? NaN - : sign * Infinity - : exponent === 0 // denormal - ? sign * 1.401298464324817e-45 * mantissa - : sign * Math.pow(2, exponent - 150) * (mantissa + 8388608); - } - - exports.readFloatLE = readFloat_ieee754.bind(null, readUintLE); - exports.readFloatBE = readFloat_ieee754.bind(null, readUintBE); - - })(); - - // double: typed array - if (typeof Float64Array !== "undefined") (function() { - - var f64 = new Float64Array([-0]), - f8b = new Uint8Array(f64.buffer), - le = f8b[7] === 128; - - function writeDouble_f64_cpy(val, buf, pos) { - f64[0] = val; - buf[pos ] = f8b[0]; - buf[pos + 1] = f8b[1]; - buf[pos + 2] = f8b[2]; - buf[pos + 3] = f8b[3]; - buf[pos + 4] = f8b[4]; - buf[pos + 5] = f8b[5]; - buf[pos + 6] = f8b[6]; - buf[pos + 7] = f8b[7]; - } - - function writeDouble_f64_rev(val, buf, pos) { - f64[0] = val; - buf[pos ] = f8b[7]; - buf[pos + 1] = f8b[6]; - buf[pos + 2] = f8b[5]; - buf[pos + 3] = f8b[4]; - buf[pos + 4] = f8b[3]; - buf[pos + 5] = f8b[2]; - buf[pos + 6] = f8b[1]; - buf[pos + 7] = f8b[0]; - } - - /* istanbul ignore next */ - exports.writeDoubleLE = le ? writeDouble_f64_cpy : writeDouble_f64_rev; - /* istanbul ignore next */ - exports.writeDoubleBE = le ? writeDouble_f64_rev : writeDouble_f64_cpy; - - function readDouble_f64_cpy(buf, pos) { - f8b[0] = buf[pos ]; - f8b[1] = buf[pos + 1]; - f8b[2] = buf[pos + 2]; - f8b[3] = buf[pos + 3]; - f8b[4] = buf[pos + 4]; - f8b[5] = buf[pos + 5]; - f8b[6] = buf[pos + 6]; - f8b[7] = buf[pos + 7]; - return f64[0]; - } - - function readDouble_f64_rev(buf, pos) { - f8b[7] = buf[pos ]; - f8b[6] = buf[pos + 1]; - f8b[5] = buf[pos + 2]; - f8b[4] = buf[pos + 3]; - f8b[3] = buf[pos + 4]; - f8b[2] = buf[pos + 5]; - f8b[1] = buf[pos + 6]; - f8b[0] = buf[pos + 7]; - return f64[0]; - } - - /* istanbul ignore next */ - exports.readDoubleLE = le ? readDouble_f64_cpy : readDouble_f64_rev; - /* istanbul ignore next */ - exports.readDoubleBE = le ? readDouble_f64_rev : readDouble_f64_cpy; - - // double: ieee754 - })(); else (function() { - - function writeDouble_ieee754(writeUint, off0, off1, val, buf, pos) { - var sign = val < 0 ? 1 : 0; - if (sign) - val = -val; - if (val === 0) { - writeUint(0, buf, pos + off0); - writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos + off1); - } else if (isNaN(val)) { - writeUint(0, buf, pos + off0); - writeUint(2146959360, buf, pos + off1); - } else if (val > 1.7976931348623157e+308) { // +-Infinity - writeUint(0, buf, pos + off0); - writeUint((sign << 31 | 2146435072) >>> 0, buf, pos + off1); - } else { - var mantissa; - if (val < 2.2250738585072014e-308) { // denormal - mantissa = val / 5e-324; - writeUint(mantissa >>> 0, buf, pos + off0); - writeUint((sign << 31 | mantissa / 4294967296) >>> 0, buf, pos + off1); - } else { - var exponent = Math.floor(Math.log(val) / Math.LN2); - if (exponent === 1024) - exponent = 1023; - mantissa = val * Math.pow(2, -exponent); - writeUint(mantissa * 4503599627370496 >>> 0, buf, pos + off0); - writeUint((sign << 31 | exponent + 1023 << 20 | mantissa * 1048576 & 1048575) >>> 0, buf, pos + off1); - } - } - } - - exports.writeDoubleLE = writeDouble_ieee754.bind(null, writeUintLE, 0, 4); - exports.writeDoubleBE = writeDouble_ieee754.bind(null, writeUintBE, 4, 0); - - function readDouble_ieee754(readUint, off0, off1, buf, pos) { - var lo = readUint(buf, pos + off0), - hi = readUint(buf, pos + off1); - var sign = (hi >> 31) * 2 + 1, - exponent = hi >>> 20 & 2047, - mantissa = 4294967296 * (hi & 1048575) + lo; - return exponent === 2047 - ? mantissa - ? NaN - : sign * Infinity - : exponent === 0 // denormal - ? sign * 5e-324 * mantissa - : sign * Math.pow(2, exponent - 1075) * (mantissa + 4503599627370496); - } - - exports.readDoubleLE = readDouble_ieee754.bind(null, readUintLE, 0, 4); - exports.readDoubleBE = readDouble_ieee754.bind(null, readUintBE, 4, 0); - - })(); - - return exports; -} - -// uint helpers - -function writeUintLE(val, buf, pos) { - buf[pos ] = val & 255; - buf[pos + 1] = val >>> 8 & 255; - buf[pos + 2] = val >>> 16 & 255; - buf[pos + 3] = val >>> 24; -} - -function writeUintBE(val, buf, pos) { - buf[pos ] = val >>> 24; - buf[pos + 1] = val >>> 16 & 255; - buf[pos + 2] = val >>> 8 & 255; - buf[pos + 3] = val & 255; -} - -function readUintLE(buf, pos) { - return (buf[pos ] - | buf[pos + 1] << 8 - | buf[pos + 2] << 16 - | buf[pos + 3] << 24) >>> 0; -} - -function readUintBE(buf, pos) { - return (buf[pos ] << 24 - | buf[pos + 1] << 16 - | buf[pos + 2] << 8 - | buf[pos + 3]) >>> 0; -} +"use strict"; + +module.exports = factory(factory); + +/** + * Reads / writes floats / doubles from / to buffers. + * @name util.float + * @namespace + */ + +/** + * Writes a 32 bit float to a buffer using little endian byte order. + * @name util.float.writeFloatLE + * @function + * @param {number} val Value to write + * @param {Uint8Array} buf Target buffer + * @param {number} pos Target buffer offset + * @returns {undefined} + */ + +/** + * Writes a 32 bit float to a buffer using big endian byte order. + * @name util.float.writeFloatBE + * @function + * @param {number} val Value to write + * @param {Uint8Array} buf Target buffer + * @param {number} pos Target buffer offset + * @returns {undefined} + */ + +/** + * Reads a 32 bit float from a buffer using little endian byte order. + * @name util.float.readFloatLE + * @function + * @param {Uint8Array} buf Source buffer + * @param {number} pos Source buffer offset + * @returns {number} Value read + */ + +/** + * Reads a 32 bit float from a buffer using big endian byte order. + * @name util.float.readFloatBE + * @function + * @param {Uint8Array} buf Source buffer + * @param {number} pos Source buffer offset + * @returns {number} Value read + */ + +/** + * Writes a 64 bit double to a buffer using little endian byte order. + * @name util.float.writeDoubleLE + * @function + * @param {number} val Value to write + * @param {Uint8Array} buf Target buffer + * @param {number} pos Target buffer offset + * @returns {undefined} + */ + +/** + * Writes a 64 bit double to a buffer using big endian byte order. + * @name util.float.writeDoubleBE + * @function + * @param {number} val Value to write + * @param {Uint8Array} buf Target buffer + * @param {number} pos Target buffer offset + * @returns {undefined} + */ + +/** + * Reads a 64 bit double from a buffer using little endian byte order. + * @name util.float.readDoubleLE + * @function + * @param {Uint8Array} buf Source buffer + * @param {number} pos Source buffer offset + * @returns {number} Value read + */ + +/** + * Reads a 64 bit double from a buffer using big endian byte order. + * @name util.float.readDoubleBE + * @function + * @param {Uint8Array} buf Source buffer + * @param {number} pos Source buffer offset + * @returns {number} Value read + */ + +// Factory function for the purpose of node-based testing in modified global environments +function factory(exports) { + + // float: typed array + if (typeof Float32Array !== "undefined") (function() { + + var f32 = new Float32Array([ -0 ]), + f8b = new Uint8Array(f32.buffer), + le = f8b[3] === 128; + + function writeFloat_f32_cpy(val, buf, pos) { + f32[0] = val; + buf[pos ] = f8b[0]; + buf[pos + 1] = f8b[1]; + buf[pos + 2] = f8b[2]; + buf[pos + 3] = f8b[3]; + } + + function writeFloat_f32_rev(val, buf, pos) { + f32[0] = val; + buf[pos ] = f8b[3]; + buf[pos + 1] = f8b[2]; + buf[pos + 2] = f8b[1]; + buf[pos + 3] = f8b[0]; + } + + /* istanbul ignore next */ + exports.writeFloatLE = le ? writeFloat_f32_cpy : writeFloat_f32_rev; + /* istanbul ignore next */ + exports.writeFloatBE = le ? writeFloat_f32_rev : writeFloat_f32_cpy; + + function readFloat_f32_cpy(buf, pos) { + f8b[0] = buf[pos ]; + f8b[1] = buf[pos + 1]; + f8b[2] = buf[pos + 2]; + f8b[3] = buf[pos + 3]; + return f32[0]; + } + + function readFloat_f32_rev(buf, pos) { + f8b[3] = buf[pos ]; + f8b[2] = buf[pos + 1]; + f8b[1] = buf[pos + 2]; + f8b[0] = buf[pos + 3]; + return f32[0]; + } + + /* istanbul ignore next */ + exports.readFloatLE = le ? readFloat_f32_cpy : readFloat_f32_rev; + /* istanbul ignore next */ + exports.readFloatBE = le ? readFloat_f32_rev : readFloat_f32_cpy; + + // float: ieee754 + })(); else (function() { + + function writeFloat_ieee754(writeUint, val, buf, pos) { + var sign = val < 0 ? 1 : 0; + if (sign) + val = -val; + if (val === 0) + writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos); + else if (isNaN(val)) + writeUint(2143289344, buf, pos); + else if (val > 3.4028234663852886e+38) // +-Infinity + writeUint((sign << 31 | 2139095040) >>> 0, buf, pos); + else if (val < 1.1754943508222875e-38) // denormal + writeUint((sign << 31 | Math.round(val / 1.401298464324817e-45)) >>> 0, buf, pos); + else { + var exponent = Math.floor(Math.log(val) / Math.LN2), + mantissa = Math.round(val * Math.pow(2, -exponent) * 8388608) & 8388607; + writeUint((sign << 31 | exponent + 127 << 23 | mantissa) >>> 0, buf, pos); + } + } + + exports.writeFloatLE = writeFloat_ieee754.bind(null, writeUintLE); + exports.writeFloatBE = writeFloat_ieee754.bind(null, writeUintBE); + + function readFloat_ieee754(readUint, buf, pos) { + var uint = readUint(buf, pos), + sign = (uint >> 31) * 2 + 1, + exponent = uint >>> 23 & 255, + mantissa = uint & 8388607; + return exponent === 255 + ? mantissa + ? NaN + : sign * Infinity + : exponent === 0 // denormal + ? sign * 1.401298464324817e-45 * mantissa + : sign * Math.pow(2, exponent - 150) * (mantissa + 8388608); + } + + exports.readFloatLE = readFloat_ieee754.bind(null, readUintLE); + exports.readFloatBE = readFloat_ieee754.bind(null, readUintBE); + + })(); + + // double: typed array + if (typeof Float64Array !== "undefined") (function() { + + var f64 = new Float64Array([-0]), + f8b = new Uint8Array(f64.buffer), + le = f8b[7] === 128; + + function writeDouble_f64_cpy(val, buf, pos) { + f64[0] = val; + buf[pos ] = f8b[0]; + buf[pos + 1] = f8b[1]; + buf[pos + 2] = f8b[2]; + buf[pos + 3] = f8b[3]; + buf[pos + 4] = f8b[4]; + buf[pos + 5] = f8b[5]; + buf[pos + 6] = f8b[6]; + buf[pos + 7] = f8b[7]; + } + + function writeDouble_f64_rev(val, buf, pos) { + f64[0] = val; + buf[pos ] = f8b[7]; + buf[pos + 1] = f8b[6]; + buf[pos + 2] = f8b[5]; + buf[pos + 3] = f8b[4]; + buf[pos + 4] = f8b[3]; + buf[pos + 5] = f8b[2]; + buf[pos + 6] = f8b[1]; + buf[pos + 7] = f8b[0]; + } + + /* istanbul ignore next */ + exports.writeDoubleLE = le ? writeDouble_f64_cpy : writeDouble_f64_rev; + /* istanbul ignore next */ + exports.writeDoubleBE = le ? writeDouble_f64_rev : writeDouble_f64_cpy; + + function readDouble_f64_cpy(buf, pos) { + f8b[0] = buf[pos ]; + f8b[1] = buf[pos + 1]; + f8b[2] = buf[pos + 2]; + f8b[3] = buf[pos + 3]; + f8b[4] = buf[pos + 4]; + f8b[5] = buf[pos + 5]; + f8b[6] = buf[pos + 6]; + f8b[7] = buf[pos + 7]; + return f64[0]; + } + + function readDouble_f64_rev(buf, pos) { + f8b[7] = buf[pos ]; + f8b[6] = buf[pos + 1]; + f8b[5] = buf[pos + 2]; + f8b[4] = buf[pos + 3]; + f8b[3] = buf[pos + 4]; + f8b[2] = buf[pos + 5]; + f8b[1] = buf[pos + 6]; + f8b[0] = buf[pos + 7]; + return f64[0]; + } + + /* istanbul ignore next */ + exports.readDoubleLE = le ? readDouble_f64_cpy : readDouble_f64_rev; + /* istanbul ignore next */ + exports.readDoubleBE = le ? readDouble_f64_rev : readDouble_f64_cpy; + + // double: ieee754 + })(); else (function() { + + function writeDouble_ieee754(writeUint, off0, off1, val, buf, pos) { + var sign = val < 0 ? 1 : 0; + if (sign) + val = -val; + if (val === 0) { + writeUint(0, buf, pos + off0); + writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos + off1); + } else if (isNaN(val)) { + writeUint(0, buf, pos + off0); + writeUint(2146959360, buf, pos + off1); + } else if (val > 1.7976931348623157e+308) { // +-Infinity + writeUint(0, buf, pos + off0); + writeUint((sign << 31 | 2146435072) >>> 0, buf, pos + off1); + } else { + var mantissa; + if (val < 2.2250738585072014e-308) { // denormal + mantissa = val / 5e-324; + writeUint(mantissa >>> 0, buf, pos + off0); + writeUint((sign << 31 | mantissa / 4294967296) >>> 0, buf, pos + off1); + } else { + var exponent = Math.floor(Math.log(val) / Math.LN2); + if (exponent === 1024) + exponent = 1023; + mantissa = val * Math.pow(2, -exponent); + writeUint(mantissa * 4503599627370496 >>> 0, buf, pos + off0); + writeUint((sign << 31 | exponent + 1023 << 20 | mantissa * 1048576 & 1048575) >>> 0, buf, pos + off1); + } + } + } + + exports.writeDoubleLE = writeDouble_ieee754.bind(null, writeUintLE, 0, 4); + exports.writeDoubleBE = writeDouble_ieee754.bind(null, writeUintBE, 4, 0); + + function readDouble_ieee754(readUint, off0, off1, buf, pos) { + var lo = readUint(buf, pos + off0), + hi = readUint(buf, pos + off1); + var sign = (hi >> 31) * 2 + 1, + exponent = hi >>> 20 & 2047, + mantissa = 4294967296 * (hi & 1048575) + lo; + return exponent === 2047 + ? mantissa + ? NaN + : sign * Infinity + : exponent === 0 // denormal + ? sign * 5e-324 * mantissa + : sign * Math.pow(2, exponent - 1075) * (mantissa + 4503599627370496); + } + + exports.readDoubleLE = readDouble_ieee754.bind(null, readUintLE, 0, 4); + exports.readDoubleBE = readDouble_ieee754.bind(null, readUintBE, 4, 0); + + })(); + + return exports; +} + +// uint helpers + +function writeUintLE(val, buf, pos) { + buf[pos ] = val & 255; + buf[pos + 1] = val >>> 8 & 255; + buf[pos + 2] = val >>> 16 & 255; + buf[pos + 3] = val >>> 24; +} + +function writeUintBE(val, buf, pos) { + buf[pos ] = val >>> 24; + buf[pos + 1] = val >>> 16 & 255; + buf[pos + 2] = val >>> 8 & 255; + buf[pos + 3] = val & 255; +} + +function readUintLE(buf, pos) { + return (buf[pos ] + | buf[pos + 1] << 8 + | buf[pos + 2] << 16 + | buf[pos + 3] << 24) >>> 0; +} + +function readUintBE(buf, pos) { + return (buf[pos ] << 24 + | buf[pos + 1] << 16 + | buf[pos + 2] << 8 + | buf[pos + 3]) >>> 0; +} }, map: {} }, 'node_modules/@protobufjs/inquire/index.js': { factory: function(module, exports, require) { -"use strict"; -module.exports = inquire; - -/** - * Requires a module only if available. - * @memberof util - * @param {string} moduleName Module to require - * @returns {?Object} Required module if available and not empty, otherwise `null` - */ -function inquire(moduleName) { - try { - var mod = eval("quire".replace(/^/,"re"))(moduleName); // eslint-disable-line no-eval - if (mod && (mod.length || Object.keys(mod).length)) - return mod; - } catch (e) {} // eslint-disable-line no-empty - return null; -} +"use strict"; +module.exports = inquire; + +/** + * Requires a module only if available. + * @memberof util + * @param {string} moduleName Module to require + * @returns {?Object} Required module if available and not empty, otherwise `null` + */ +function inquire(moduleName) { + try { + var mod = eval("quire".replace(/^/,"re"))(moduleName); // eslint-disable-line no-eval + if (mod && (mod.length || Object.keys(mod).length)) + return mod; + } catch (e) {} // eslint-disable-line no-empty + return null; +} }, map: {} }, 'node_modules/@protobufjs/utf8/index.js': { factory: function(module, exports, require) { -"use strict"; - -/** - * A minimal UTF8 implementation for number arrays. - * @memberof util - * @namespace - */ -var utf8 = exports; - -/** - * Calculates the UTF8 byte length of a string. - * @param {string} string String - * @returns {number} Byte length - */ -utf8.length = function utf8_length(string) { - var len = 0, - c = 0; - for (var i = 0; i < string.length; ++i) { - c = string.charCodeAt(i); - if (c < 128) - len += 1; - else if (c < 2048) - len += 2; - else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) { - ++i; - len += 4; - } else - len += 3; - } - return len; -}; - -/** - * Reads UTF8 bytes as a string. - * @param {Uint8Array} buffer Source buffer - * @param {number} start Source start - * @param {number} end Source end - * @returns {string} String read - */ -utf8.read = function utf8_read(buffer, start, end) { - var len = end - start; - if (len < 1) - return ""; - var parts = null, - chunk = [], - i = 0, // char offset - t; // temporary - while (start < end) { - t = buffer[start++]; - if (t < 128) - chunk[i++] = t; - else if (t > 191 && t < 224) - chunk[i++] = (t & 31) << 6 | buffer[start++] & 63; - else if (t > 239 && t < 365) { - t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000; - chunk[i++] = 0xD800 + (t >> 10); - chunk[i++] = 0xDC00 + (t & 1023); - } else - chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63; - if (i > 8191) { - (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk)); - i = 0; - } - } - if (parts) { - if (i) - parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))); - return parts.join(""); - } - return String.fromCharCode.apply(String, chunk.slice(0, i)); -}; - -/** - * Writes a string as UTF8 bytes. - * @param {string} string Source string - * @param {Uint8Array} buffer Destination buffer - * @param {number} offset Destination offset - * @returns {number} Bytes written - */ -utf8.write = function utf8_write(string, buffer, offset) { - var start = offset, - c1, // character 1 - c2; // character 2 - for (var i = 0; i < string.length; ++i) { - c1 = string.charCodeAt(i); - if (c1 < 128) { - buffer[offset++] = c1; - } else if (c1 < 2048) { - buffer[offset++] = c1 >> 6 | 192; - buffer[offset++] = c1 & 63 | 128; - } else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) { - c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF); - ++i; - buffer[offset++] = c1 >> 18 | 240; - buffer[offset++] = c1 >> 12 & 63 | 128; - buffer[offset++] = c1 >> 6 & 63 | 128; - buffer[offset++] = c1 & 63 | 128; - } else { - buffer[offset++] = c1 >> 12 | 224; - buffer[offset++] = c1 >> 6 & 63 | 128; - buffer[offset++] = c1 & 63 | 128; - } - } - return offset - start; -}; +"use strict"; + +/** + * A minimal UTF8 implementation for number arrays. + * @memberof util + * @namespace + */ +var utf8 = exports; + +/** + * Calculates the UTF8 byte length of a string. + * @param {string} string String + * @returns {number} Byte length + */ +utf8.length = function utf8_length(string) { + var len = 0, + c = 0; + for (var i = 0; i < string.length; ++i) { + c = string.charCodeAt(i); + if (c < 128) + len += 1; + else if (c < 2048) + len += 2; + else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) { + ++i; + len += 4; + } else + len += 3; + } + return len; +}; + +/** + * Reads UTF8 bytes as a string. + * @param {Uint8Array} buffer Source buffer + * @param {number} start Source start + * @param {number} end Source end + * @returns {string} String read + */ +utf8.read = function utf8_read(buffer, start, end) { + var len = end - start; + if (len < 1) + return ""; + var parts = null, + chunk = [], + i = 0, // char offset + t; // temporary + while (start < end) { + t = buffer[start++]; + if (t < 128) + chunk[i++] = t; + else if (t > 191 && t < 224) + chunk[i++] = (t & 31) << 6 | buffer[start++] & 63; + else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000; + chunk[i++] = 0xD800 + (t >> 10); + chunk[i++] = 0xDC00 + (t & 1023); + } else + chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63; + if (i > 8191) { + (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk)); + i = 0; + } + } + if (parts) { + if (i) + parts.push(String.fromCharCode.apply(String, chunk.slice(0, i))); + return parts.join(""); + } + return String.fromCharCode.apply(String, chunk.slice(0, i)); +}; + +/** + * Writes a string as UTF8 bytes. + * @param {string} string Source string + * @param {Uint8Array} buffer Destination buffer + * @param {number} offset Destination offset + * @returns {number} Bytes written + */ +utf8.write = function utf8_write(string, buffer, offset) { + var start = offset, + c1, // character 1 + c2; // character 2 + for (var i = 0; i < string.length; ++i) { + c1 = string.charCodeAt(i); + if (c1 < 128) { + buffer[offset++] = c1; + } else if (c1 < 2048) { + buffer[offset++] = c1 >> 6 | 192; + buffer[offset++] = c1 & 63 | 128; + } else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) { + c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF); + ++i; + buffer[offset++] = c1 >> 18 | 240; + buffer[offset++] = c1 >> 12 & 63 | 128; + buffer[offset++] = c1 >> 6 & 63 | 128; + buffer[offset++] = c1 & 63 | 128; + } else { + buffer[offset++] = c1 >> 12 | 224; + buffer[offset++] = c1 >> 6 & 63 | 128; + buffer[offset++] = c1 & 63 | 128; + } + } + return offset - start; +}; }, map: {} }, 'node_modules/@protobufjs/pool/index.js': { factory: function(module, exports, require) { -"use strict"; -module.exports = pool; - -/** - * An allocator as used by {@link util.pool}. - * @typedef PoolAllocator - * @type {function} - * @param {number} size Buffer size - * @returns {Uint8Array} Buffer - */ - -/** - * A slicer as used by {@link util.pool}. - * @typedef PoolSlicer - * @type {function} - * @param {number} start Start offset - * @param {number} end End offset - * @returns {Uint8Array} Buffer slice - * @this {Uint8Array} - */ - -/** - * A general purpose buffer pool. - * @memberof util - * @function - * @param {PoolAllocator} alloc Allocator - * @param {PoolSlicer} slice Slicer - * @param {number} [size=8192] Slab size - * @returns {PoolAllocator} Pooled allocator - */ -function pool(alloc, slice, size) { - var SIZE = size || 8192; - var MAX = SIZE >>> 1; - var slab = null; - var offset = SIZE; - return function pool_alloc(size) { - if (size < 1 || size > MAX) - return alloc(size); - if (offset + size > SIZE) { - slab = alloc(SIZE); - offset = 0; - } - var buf = slice.call(slab, offset, offset += size); - if (offset & 7) // align to 32 bit - offset = (offset | 7) + 1; - return buf; - }; -} +"use strict"; +module.exports = pool; + +/** + * An allocator as used by {@link util.pool}. + * @typedef PoolAllocator + * @type {function} + * @param {number} size Buffer size + * @returns {Uint8Array} Buffer + */ + +/** + * A slicer as used by {@link util.pool}. + * @typedef PoolSlicer + * @type {function} + * @param {number} start Start offset + * @param {number} end End offset + * @returns {Uint8Array} Buffer slice + * @this {Uint8Array} + */ + +/** + * A general purpose buffer pool. + * @memberof util + * @function + * @param {PoolAllocator} alloc Allocator + * @param {PoolSlicer} slice Slicer + * @param {number} [size=8192] Slab size + * @returns {PoolAllocator} Pooled allocator + */ +function pool(alloc, slice, size) { + var SIZE = size || 8192; + var MAX = SIZE >>> 1; + var slab = null; + var offset = SIZE; + return function pool_alloc(size) { + if (size < 1 || size > MAX) + return alloc(size); + if (offset + size > SIZE) { + slab = alloc(SIZE); + offset = 0; + } + var buf = slice.call(slab, offset, offset += size); + if (offset & 7) // align to 32 bit + offset = (offset | 7) + 1; + return buf; + }; +} }, map: {} }, 'node_modules/protobufjs/src/util/longbits.js': { factory: function(module, exports, require) { @@ -16139,7 +16140,7 @@

Derive VM

}, map: {"protobufjs/minimal":"node_modules/protobufjs/minimal.js"} }, 'build/ts/templates/app.html': { factory: function(module, exports, require) { -module.exports = "\n\n
\n
\n
\n
\n

dstack-vmm

\n \n v{{ version.version }}\n \n \n
\n
\n \n
\n \n
\n \n \n \n \n
\n
\n
\n
\n
\n\n \n\n \n\n \n\n
\n
\n
\n \n \n \n \n \n \n
\n
\n Total Instances:\n {{ totalVMs }}\n
\n
\n
\n
\n \n
\n \n /\n {{ maxPage || 1 }}\n
\n \n \n
\n
\n
\n\n
\n
\n
\n
Name
\n
Status
\n
Uptime
\n
View
\n
Actions
\n
\n\n
\n
\n
\n \n
\n
\n {{ vm.name }}\n
\n
\n \n \n {{ vmStatus(vm) }}\n \n
\n
{{ vm.status !== 'stopped' ? shortUptime(vm.uptime) : '-' }}
\n
\n Logs\n Stderr\n Board\n
\n
\n
\n \n
\n \n \n \n \n \n \n
\n
\n
\n
\n\n
\n
\n
\n VM ID\n
\n {{ vm.id }}\n \n
\n
\n
\n Instance ID\n
\n {{ vm.instance_id }}\n \n
\n -\n
\n
\n App ID\n
\n {{ vm.app_id }}\n \n
\n -\n
\n
\n Image\n {{ vm.configuration?.image }}\n
\n
\n vCPUs\n {{ vm.configuration?.vcpu }}\n
\n
\n Memory\n {{ formatMemory(vm.configuration?.memory) }}\n
\n
\n Swap\n {{ formatMemory(bytesToMB(vm.configuration.swap_size)) }}\n
\n
\n Disk Size\n {{ vm.configuration?.disk_size }} GB\n
\n
\n Disk Type\n {{ vm.configuration?.disk_type || 'virtio-pci' }}\n
\n
\n TEE\n {{ vm.configuration?.no_tee ? 'Disabled' : 'Enabled' }}\n
\n
\n GPUs\n
\n \n All GPUs\n \n
\n
\n \n {{ gpu.slot || gpu.product_id || ('GPU #' + (index + 1)) }}\n \n
\n
\n None\n
\n
\n
\n\n
\n

Port Mappings

\n
\n {{\n port.host_address === '127.0.0.1'\n ? 'Local'\n : (port.host_address === '0.0.0.0' ? 'Public' : port.host_address)\n }}\n {{ port.protocol.toUpperCase() }}: {{ port.host_port }} → {{ port.vm_port }}\n
\n
\n\n
\n

Features

\n {{ getVmFeatures(vm) }}\n
\n\n
\n

Network Interfaces

\n
\n
\n
\n
\n \n \n \n \n {{ iface.name }}\n
\n
\n
\n
\n MAC Address\n {{ iface.mac || '-' }}\n
\n
\n IP Address\n {{ iface.addresses.map(addr => addr.address + '/' + addr.prefix).join(', ') || '-' }}\n
\n
\n
\n
\n \n \n \n
\n
\n RX\n {{ iface.rx_bytes }} bytes\n 0\">({{ iface.rx_errors }} errors)\n
\n
\n
\n
\n \n \n \n
\n
\n TX\n {{ iface.tx_bytes }} bytes\n 0\">({{ iface.tx_errors }} errors)\n
\n
\n
\n
\n
\n
\n
\n

\n \n \n \n \n WireGuard Info\n

\n
{{ networkInfo[vm.id].wg_info }}
\n
\n
\n\n
\n
\n

App Compose

\n
\n \n \n
\n
\n
\n
{{ vm.appCompose?.docker_compose_file || 'Docker Compose content not available' }}
\n
\n
\n\n
\n
\n

User Config

\n \n
\n
{{ vm.configuration.user_config }}
\n
\n\n
\n \n \n \n
\n
\n
\n
\n\n
\n
\n \n
\n
\n
\n \n
\n
\n
\n {{ errorMessage }}\n \n
\n
\n
\n"; +module.exports = "\n\n
\n
\n
\n
\n

dstack-vmm

\n \n v{{ version.version }}\n \n \n
\n
\n \n
\n \n
\n \n \n \n \n
\n
\n
\n
\n
\n\n \n\n \n\n \n\n
\n
\n
\n \n \n \n \n \n \n
\n
\n Total Instances:\n {{ totalVMs }}\n
\n
\n
\n
\n \n
\n \n /\n {{ maxPage || 1 }}\n
\n \n \n
\n
\n
\n\n
\n
\n
\n
Name
\n
Status
\n
Uptime
\n
View
\n
Actions
\n
\n\n
\n
\n
\n \n
\n
\n {{ vm.name }}\n
\n
\n \n \n {{ vmStatus(vm) }}\n \n
\n
{{ vm.status !== 'stopped' ? shortUptime(vm.uptime) : '-' }}
\n
\n Logs\n Stderr\n Board\n
\n
\n
\n \n
\n \n \n \n \n \n \n
\n
\n
\n
\n\n
\n
\n
\n VM ID\n
\n {{ vm.id }}\n \n
\n
\n
\n Instance ID\n
\n {{ vm.instance_id }}\n \n
\n -\n
\n
\n App ID\n
\n {{ vm.app_id }}\n \n
\n -\n
\n
\n Image\n {{ vm.configuration?.image }}\n
\n
\n vCPUs\n {{ vm.configuration?.vcpu }}\n
\n
\n Memory\n {{ formatMemory(vm.configuration?.memory) }}\n
\n
\n Swap\n {{ formatMemory(bytesToMB(vm.configuration.swap_size)) }}\n
\n
\n Disk Size\n {{ vm.configuration?.disk_size }} GB\n
\n
\n Disk Type\n {{ vm.configuration?.disk_type || 'virtio-pci' }}\n
\n
\n TEE\n {{ vm.configuration?.no_tee ? 'Disabled' : 'Enabled' }}\n
\n
\n GPUs\n
\n \n All GPUs\n \n
\n
\n \n {{ gpu.slot || gpu.product_id || ('GPU #' + (index + 1)) }}\n \n
\n
\n None\n
\n
\n
\n\n
\n

Port Mappings

\n
\n {{\n port.host_address === '127.0.0.1'\n ? 'Local'\n : (port.host_address === '0.0.0.0' ? 'Public' : port.host_address)\n }}\n {{ port.protocol.toUpperCase() }}: {{ port.host_port }} → {{ port.vm_port }}\n
\n
\n\n
\n

Features

\n {{ getVmFeatures(vm) }}\n
\n\n
\n

Network Interfaces

\n
\n
\n
\n
\n \n \n \n \n {{ iface.name }}\n
\n
\n
\n
\n MAC Address\n {{ iface.mac || '-' }}\n
\n
\n IP Address\n {{ iface.addresses.map(addr => addr.address + '/' + addr.prefix).join(', ') || '-' }}\n
\n
\n
\n
\n \n \n \n
\n
\n RX\n {{ iface.rx_bytes }} bytes\n 0\">({{ iface.rx_errors }} errors)\n
\n
\n
\n
\n \n \n \n
\n
\n TX\n {{ iface.tx_bytes }} bytes\n 0\">({{ iface.tx_errors }} errors)\n
\n
\n
\n
\n
\n
\n
\n

\n \n \n \n \n WireGuard Info\n

\n
{{ networkInfo[vm.id].wg_info }}
\n
\n
\n\n
\n
\n

App Compose

\n
\n \n \n
\n
\n
\n
{{ vm.appCompose?.docker_compose_file || 'Docker Compose content not available' }}
\n
\n
\n\n
\n
\n

User Config

\n \n
\n
{{ vm.configuration.user_config }}
\n
\n\n
\n \n \n \n
\n
\n
\n
\n\n
\n
\n \n
\n
\n
\n \n
\n
\n
\n {{ errorMessage }}\n \n
\n
\n
\n"; }, map: {} } }; const cache = {};