|
1 | 1 | //! This module defines an SSA pass that detects if the final function has any subgraphs independent from inputs and outputs.
|
2 | 2 | //! If this is the case, then part of the final circuit can be completely replaced by any other passing circuit, since there are no constraints ensuring connections.
|
3 | 3 | //! So the compiler informs the developer of this as a bug
|
4 |
| -use im::HashMap; |
5 |
| - |
6 | 4 | use crate::errors::{InternalBug, SsaReport};
|
7 | 5 | use crate::ssa::ir::basic_block::BasicBlockId;
|
8 | 6 | use crate::ssa::ir::function::RuntimeType;
|
9 | 7 | use crate::ssa::ir::function::{Function, FunctionId};
|
10 | 8 | use crate::ssa::ir::instruction::{Instruction, InstructionId, Intrinsic};
|
11 | 9 | use crate::ssa::ir::value::{Value, ValueId};
|
12 | 10 | use crate::ssa::ssa_gen::Ssa;
|
| 11 | +use im::HashMap; |
| 12 | +use rayon::prelude::*; |
13 | 13 | use std::collections::{BTreeMap, HashSet};
|
14 | 14 |
|
15 | 15 | impl Ssa {
|
16 | 16 | /// Go through each top-level non-brillig function and detect if it has independent subgraphs
|
17 | 17 | #[tracing::instrument(level = "trace", skip(self))]
|
18 | 18 | pub(crate) fn check_for_underconstrained_values(&mut self) -> Vec<SsaReport> {
|
19 |
| - let mut warnings: Vec<SsaReport> = Vec::new(); |
20 |
| - for function in self.functions.values() { |
21 |
| - match function.runtime() { |
22 |
| - RuntimeType::Acir { .. } => { |
23 |
| - warnings.extend(check_for_underconstrained_values_within_function( |
24 |
| - function, |
| 19 | + let functions_id = self.functions.values().map(|f| f.id().to_usize()).collect::<Vec<_>>(); |
| 20 | + functions_id |
| 21 | + .iter() |
| 22 | + .par_bridge() |
| 23 | + .flat_map(|fid| { |
| 24 | + let function_to_process = &self.functions[&FunctionId::new(*fid)]; |
| 25 | + match function_to_process.runtime() { |
| 26 | + RuntimeType::Acir { .. } => check_for_underconstrained_values_within_function( |
| 27 | + function_to_process, |
25 | 28 | &self.functions,
|
26 |
| - )); |
| 29 | + ), |
| 30 | + RuntimeType::Brillig => Vec::new(), |
27 | 31 | }
|
28 |
| - RuntimeType::Brillig => (), |
29 |
| - } |
30 |
| - } |
31 |
| - warnings |
| 32 | + }) |
| 33 | + .collect() |
32 | 34 | }
|
33 | 35 | }
|
34 | 36 |
|
@@ -88,9 +90,8 @@ impl Context {
|
88 | 90 | self.visited_blocks.insert(block);
|
89 | 91 | self.connect_value_ids_in_block(function, block, all_functions);
|
90 | 92 | }
|
91 |
| - |
92 | 93 | // Merge ValueIds into sets, where each original small set of ValueIds is merged with another set if they intersect
|
93 |
| - self.merge_sets(); |
| 94 | + self.value_sets = Self::merge_sets_par(&self.value_sets); |
94 | 95 | }
|
95 | 96 |
|
96 | 97 | /// Find sets that contain input or output value of the function
|
@@ -267,14 +268,13 @@ impl Context {
|
267 | 268 | /// Merge all small sets into larger ones based on whether the sets intersect or not
|
268 | 269 | ///
|
269 | 270 | /// If two small sets have a common ValueId, we merge them into one
|
270 |
| - fn merge_sets(&mut self) { |
| 271 | + fn merge_sets(current: &[HashSet<ValueId>]) -> Vec<HashSet<ValueId>> { |
271 | 272 | let mut new_set_id: usize = 0;
|
272 | 273 | let mut updated_sets: HashMap<usize, HashSet<ValueId>> = HashMap::new();
|
273 | 274 | let mut value_dictionary: HashMap<ValueId, usize> = HashMap::new();
|
274 | 275 | let mut parsed_value_set: HashSet<ValueId> = HashSet::new();
|
275 | 276 |
|
276 |
| - // Go through each set |
277 |
| - for set in self.value_sets.iter() { |
| 277 | + for set in current.iter() { |
278 | 278 | // Check if the set has any of the ValueIds we've encountered at previous iterations
|
279 | 279 | let intersection: HashSet<ValueId> =
|
280 | 280 | set.intersection(&parsed_value_set).copied().collect();
|
@@ -327,7 +327,26 @@ impl Context {
|
327 | 327 | }
|
328 | 328 | updated_sets.insert(largest_set_index, largest_set);
|
329 | 329 | }
|
330 |
| - self.value_sets = updated_sets.values().cloned().collect(); |
| 330 | + updated_sets.values().cloned().collect() |
| 331 | + } |
| 332 | + |
| 333 | + /// Parallel version of merge_sets |
| 334 | + /// The sets are merged by chunks, and then the chunks are merged together |
| 335 | + fn merge_sets_par(sets: &[HashSet<ValueId>]) -> Vec<HashSet<ValueId>> { |
| 336 | + let mut sets = sets.to_owned(); |
| 337 | + let mut len = sets.len(); |
| 338 | + let mut prev_len = len + 1; |
| 339 | + |
| 340 | + while len > 1000 && len < prev_len { |
| 341 | + sets = sets.par_chunks(1000).flat_map(Self::merge_sets).collect(); |
| 342 | + |
| 343 | + prev_len = len; |
| 344 | + len = sets.len(); |
| 345 | + } |
| 346 | + // TODO: if prev_len >= len, this means we cannot effectively merge the sets anymore |
| 347 | + // We should instead partition the sets into disjoint chunks and work on those chunks, |
| 348 | + // but for now we fallback to the non-parallel implementation |
| 349 | + Self::merge_sets(&sets) |
331 | 350 | }
|
332 | 351 | }
|
333 | 352 | #[cfg(test)]
|
|
0 commit comments