Skip to content

Commit 32cf107

Browse files
committed
Add tests for subgraph data source trigger scanning
1 parent ca39eb3 commit 32cf107

File tree

2 files changed

+251
-1
lines changed

2 files changed

+251
-1
lines changed

graph/src/blockchain/mock.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ impl Block for MockBlock {
6060
}
6161

6262
fn timestamp(&self) -> BlockTime {
63-
BlockTime::for_test(&self.ptr())
63+
BlockTime::since_epoch(self.ptr().number as i64 * 45 * 60, 0)
6464
}
6565
}
6666

Lines changed: 250 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,250 @@
1+
use std::{collections::BTreeMap, ops::Range, sync::Arc};
2+
3+
use graph::{
4+
blockchain::{
5+
block_stream::{
6+
EntityOperationKind, EntitySourceOperation, SubgraphTriggerScanRange,
7+
TriggersAdapterWrapper,
8+
},
9+
mock::MockTriggersAdapter,
10+
Block, SubgraphFilter, Trigger,
11+
},
12+
components::store::SourceableStore,
13+
data_source::CausalityRegion,
14+
prelude::{BlockHash, BlockNumber, BlockPtr, DeploymentHash, StoreError, Value},
15+
schema::{EntityType, InputSchema},
16+
};
17+
use slog::Logger;
18+
use tonic::async_trait;
19+
20+
pub struct MockSourcableStore {
21+
entities: BTreeMap<BlockNumber, Vec<EntitySourceOperation>>,
22+
schema: InputSchema,
23+
block_ptr: Option<BlockPtr>,
24+
}
25+
26+
impl MockSourcableStore {
27+
pub fn new(
28+
entities: BTreeMap<BlockNumber, Vec<EntitySourceOperation>>,
29+
schema: InputSchema,
30+
block_ptr: Option<BlockPtr>,
31+
) -> Self {
32+
Self {
33+
entities,
34+
schema,
35+
block_ptr,
36+
}
37+
}
38+
39+
pub fn set_block_ptr(&mut self, ptr: BlockPtr) {
40+
self.block_ptr = Some(ptr);
41+
}
42+
43+
pub fn clear_block_ptr(&mut self) {
44+
self.block_ptr = None;
45+
}
46+
47+
pub fn increment_block(&mut self) -> Result<(), &'static str> {
48+
if let Some(ptr) = &self.block_ptr {
49+
let new_number = ptr.number + 1;
50+
self.block_ptr = Some(BlockPtr::new(ptr.hash.clone(), new_number));
51+
Ok(())
52+
} else {
53+
Err("No block pointer set")
54+
}
55+
}
56+
57+
pub fn decrement_block(&mut self) -> Result<(), &'static str> {
58+
if let Some(ptr) = &self.block_ptr {
59+
if ptr.number == 0 {
60+
return Err("Block number already at 0");
61+
}
62+
let new_number = ptr.number - 1;
63+
self.block_ptr = Some(BlockPtr::new(ptr.hash.clone(), new_number));
64+
Ok(())
65+
} else {
66+
Err("No block pointer set")
67+
}
68+
}
69+
}
70+
71+
#[async_trait]
72+
impl SourceableStore for MockSourcableStore {
73+
fn get_range(
74+
&self,
75+
entity_types: Vec<EntityType>,
76+
_causality_region: CausalityRegion,
77+
block_range: Range<BlockNumber>,
78+
) -> Result<BTreeMap<BlockNumber, Vec<EntitySourceOperation>>, StoreError> {
79+
Ok(self
80+
.entities
81+
.range(block_range)
82+
.map(|(block_num, operations)| {
83+
let filtered_ops: Vec<EntitySourceOperation> = operations
84+
.iter()
85+
.filter(|op| entity_types.contains(&op.entity_type))
86+
.cloned()
87+
.collect();
88+
(*block_num, filtered_ops)
89+
})
90+
.filter(|(_, ops)| !ops.is_empty())
91+
.collect())
92+
}
93+
94+
fn input_schema(&self) -> InputSchema {
95+
self.schema.clone()
96+
}
97+
98+
async fn block_ptr(&self) -> Result<Option<BlockPtr>, StoreError> {
99+
Ok(self.block_ptr.clone())
100+
}
101+
}
102+
103+
#[tokio::test]
104+
async fn test_triggers_adapter_with_entities() {
105+
let id = DeploymentHash::new("test_deployment").unwrap();
106+
let schema = InputSchema::parse_latest(
107+
r#"
108+
type User @entity {
109+
id: String!
110+
name: String!
111+
age: Int
112+
}
113+
type Post @entity {
114+
id: String!
115+
title: String!
116+
author: String!
117+
}
118+
"#,
119+
id.clone(),
120+
)
121+
.unwrap();
122+
123+
let user1 = schema
124+
.make_entity(vec![
125+
("id".into(), Value::String("user1".to_owned())),
126+
("name".into(), Value::String("Alice".to_owned())),
127+
("age".into(), Value::Int(30)),
128+
])
129+
.unwrap();
130+
131+
let user2 = schema
132+
.make_entity(vec![
133+
("id".into(), Value::String("user2".to_owned())),
134+
("name".into(), Value::String("Bob".to_owned())),
135+
("age".into(), Value::Int(25)),
136+
])
137+
.unwrap();
138+
139+
let post = schema
140+
.make_entity(vec![
141+
("id".into(), Value::String("post1".to_owned())),
142+
("title".into(), Value::String("Test Post".to_owned())),
143+
("author".into(), Value::String("user1".to_owned())),
144+
])
145+
.unwrap();
146+
147+
let user_type = schema.entity_type("User").unwrap();
148+
let post_type = schema.entity_type("Post").unwrap();
149+
150+
let entity1 = EntitySourceOperation {
151+
entity_type: user_type.clone(),
152+
entity: user1,
153+
entity_op: EntityOperationKind::Create,
154+
vid: 1,
155+
};
156+
157+
let entity2 = EntitySourceOperation {
158+
entity_type: user_type,
159+
entity: user2,
160+
entity_op: EntityOperationKind::Create,
161+
vid: 2,
162+
};
163+
164+
let post_entity = EntitySourceOperation {
165+
entity_type: post_type,
166+
entity: post,
167+
entity_op: EntityOperationKind::Create,
168+
vid: 3,
169+
};
170+
171+
let mut entities = BTreeMap::new();
172+
entities.insert(1, vec![entity1, post_entity]); // Block 1 has both User and Post
173+
entities.insert(2, vec![entity2]); // Block 2 has only User
174+
175+
// Create block hash and store
176+
let hash_bytes: [u8; 32] = [0u8; 32];
177+
let block_hash = BlockHash(hash_bytes.to_vec().into_boxed_slice());
178+
let initial_block = BlockPtr::new(block_hash, 0);
179+
let store = Arc::new(MockSourcableStore::new(
180+
entities,
181+
schema.clone(),
182+
Some(initial_block),
183+
));
184+
185+
let adapter = Arc::new(MockTriggersAdapter {});
186+
let wrapper = TriggersAdapterWrapper::new(adapter, vec![store]);
187+
188+
// Filter only for User entities
189+
let filter = SubgraphFilter {
190+
subgraph: id,
191+
start_block: 0,
192+
entities: vec!["User".to_string()], // Only monitoring User entities
193+
};
194+
195+
let logger = Logger::root(slog::Discard, slog::o!());
196+
let result = wrapper
197+
.blocks_with_subgraph_triggers(&logger, &[filter], SubgraphTriggerScanRange::Range(1, 3))
198+
.await;
199+
200+
assert!(result.is_ok(), "Failed to get triggers: {:?}", result.err());
201+
let blocks = result.unwrap();
202+
203+
assert_eq!(blocks.len(), 3, "Should have found three blocks");
204+
205+
let block1 = &blocks[0];
206+
assert_eq!(block1.block.number(), 1, "First block should be number 1");
207+
let triggers1 = &block1.trigger_data;
208+
assert_eq!(
209+
triggers1.len(),
210+
1,
211+
"Block 1 should have exactly one trigger (User, not Post)"
212+
);
213+
214+
if let Trigger::Subgraph(trigger_data) = &triggers1[0] {
215+
assert_eq!(
216+
trigger_data.entity.entity_type.as_str(),
217+
"User",
218+
"Trigger should be for User entity"
219+
);
220+
assert_eq!(
221+
trigger_data.entity.vid, 1,
222+
"Should be the first User entity"
223+
);
224+
} else {
225+
panic!("Expected subgraph trigger");
226+
}
227+
228+
let block2 = &blocks[1];
229+
assert_eq!(block2.block.number(), 2, "Second block should be number 2");
230+
let triggers2 = &block2.trigger_data;
231+
assert_eq!(
232+
triggers2.len(),
233+
1,
234+
"Block 2 should have exactly one trigger"
235+
);
236+
237+
if let Trigger::Subgraph(trigger_data) = &triggers2[0] {
238+
assert_eq!(
239+
trigger_data.entity.entity_type.as_str(),
240+
"User",
241+
"Trigger should be for User entity"
242+
);
243+
assert_eq!(
244+
trigger_data.entity.vid, 2,
245+
"Should be the second User entity"
246+
);
247+
} else {
248+
panic!("Expected subgraph trigger");
249+
}
250+
}

0 commit comments

Comments
 (0)