From 2d4a193c8af50bff5f28ad33c8e38d4568686e50 Mon Sep 17 00:00:00 2001 From: Swenschaeferjohann Date: Sun, 23 Nov 2025 13:05:33 -0500 Subject: [PATCH 1/2] parse ctoken accounts explicitly --- .gitignore | 3 +++ src/common/typedefs/account/v1.rs | 20 ++++++++++++++------ src/common/typedefs/account/v2.rs | 20 ++++++++++++++------ 3 files changed, 31 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index 8196c671..2e546578 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,6 @@ test-ledger/ minio test.db docker-compose.yml + +.cursor +**/photon.log \ No newline at end of file diff --git a/src/common/typedefs/account/v1.rs b/src/common/typedefs/account/v1.rs index 77e90793..c89e2f59 100644 --- a/src/common/typedefs/account/v1.rs +++ b/src/common/typedefs/account/v1.rs @@ -33,12 +33,20 @@ pub struct Account { impl Account { pub fn parse_token_data(&self) -> Result, IngesterError> { match self.data.as_ref() { - Some(data) if self.owner.0 == COMPRESSED_TOKEN_PROGRAM => { - let data_slice = data.data.0.as_slice(); - let token_data = TokenData::try_from_slice(data_slice).map_err(|e| { - IngesterError::ParserError(format!("Failed to parse token data: {:?}", e)) - })?; - Ok(Some(token_data)) + Some(data) => { + let is_v1_token = data.discriminator.0.to_le_bytes() == [2, 0, 0, 0, 0, 0, 0, 0]; + let is_v2_token = data.discriminator.0.to_le_bytes() == [0, 0, 0, 0, 0, 0, 0, 3]; + let is_sha_flat_token = data.discriminator.0.to_le_bytes() == [0, 0, 0, 0, 0, 0, 0, 4]; + + if self.owner.0 == COMPRESSED_TOKEN_PROGRAM && (is_v1_token || is_v2_token || is_sha_flat_token) { + let data_slice = data.data.0.as_slice(); + let token_data = TokenData::try_from_slice(data_slice).map_err(|e| { + IngesterError::ParserError(format!("Failed to parse token data: {:?}", e)) + })?; + Ok(Some(token_data)) + } else { + Ok(None) + } } _ => Ok(None), } diff --git a/src/common/typedefs/account/v2.rs b/src/common/typedefs/account/v2.rs index d377572b..dd7dfe3a 100644 --- a/src/common/typedefs/account/v2.rs +++ b/src/common/typedefs/account/v2.rs @@ -41,12 +41,20 @@ pub struct AccountV2 { impl AccountV2 { pub fn parse_token_data(&self) -> Result, IngesterError> { match self.data.as_ref() { - Some(data) if self.owner.0 == COMPRESSED_TOKEN_PROGRAM => { - let data_slice = data.data.0.as_slice(); - let token_data = TokenData::try_from_slice(data_slice).map_err(|e| { - IngesterError::ParserError(format!("Failed to parse token data: {:?}", e)) - })?; - Ok(Some(token_data)) + Some(data) => { + let is_v1_token = data.discriminator.0.to_le_bytes() == [2, 0, 0, 0, 0, 0, 0, 0]; + let is_v2_token = data.discriminator.0.to_le_bytes() == [0, 0, 0, 0, 0, 0, 0, 3]; + let is_sha_flat_token = data.discriminator.0.to_le_bytes() == [0, 0, 0, 0, 0, 0, 0, 4]; + + if self.owner.0 == COMPRESSED_TOKEN_PROGRAM && (is_v1_token || is_v2_token || is_sha_flat_token) { + let data_slice = data.data.0.as_slice(); + let token_data = TokenData::try_from_slice(data_slice).map_err(|e| { + IngesterError::ParserError(format!("Failed to parse token data: {:?}", e)) + })?; + Ok(Some(token_data)) + } else { + Ok(None) + } } _ => Ok(None), } From 21c40cb22d7a9cb2635dbd0d04dc807f85da370b Mon Sep 17 00:00:00 2001 From: Swenschaeferjohann Date: Sun, 23 Nov 2025 13:49:09 -0500 Subject: [PATCH 2/2] bump --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8e483cee..d72356ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4063,7 +4063,7 @@ dependencies = [ [[package]] name = "photon-indexer" -version = "0.51.0" +version = "0.51.1" dependencies = [ "anyhow", "ark-bn254 0.5.0", diff --git a/Cargo.toml b/Cargo.toml index 0c6f08a2..b74f4ef7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,7 +7,7 @@ name = "photon-indexer" publish = true readme = "README.md" repository = "https://github.com/helius-labs/photon" -version = "0.51.0" +version = "0.51.1" [[bin]] name = "photon"