BinaryNetwork
Binary network simulations for clustered E/I models.
The package contains two main layers:
BinaryNetwork.BinaryNetwork: generic binary-network building blocksBinaryNetwork.ClusteredEI_network: repository-specific clustered E/I model assembly
Typical workflow:
import numpy as np
from BinaryNetwork import ClusteredEI_network
np.random.seed(3)
network = ClusteredEI_network(parameter)
network.initialize(weight_mode="dense")
initial_state = network.state.copy()
network.enable_diff_logging(steps=1000)
network.run(1000, batch_size=128)
updates, deltas = network.consume_diff_log()
rates = network.population_rates_from_diff_logs(
initial_state,
updates,
deltas,
sample_interval=10,
)
The recommended trace format in this repository stores:
initial_statestate_updatesstate_deltas- optionally derived
spike_timesandspike_ids
This keeps simulations compact and allows later reconstruction of full sampled states, onset events, or population rates.
Regenerating docs:
python scripts/generate_api_docs.py
1"""Binary network simulations for clustered E/I models. 2 3The package contains two main layers: 4 5- `BinaryNetwork.BinaryNetwork`: generic binary-network building blocks 6- `BinaryNetwork.ClusteredEI_network`: repository-specific clustered E/I model assembly 7 8Typical workflow: 9 10```python 11import numpy as np 12 13from BinaryNetwork import ClusteredEI_network 14 15np.random.seed(3) 16network = ClusteredEI_network(parameter) 17network.initialize(weight_mode="dense") 18initial_state = network.state.copy() 19 20network.enable_diff_logging(steps=1000) 21network.run(1000, batch_size=128) 22updates, deltas = network.consume_diff_log() 23 24rates = network.population_rates_from_diff_logs( 25 initial_state, 26 updates, 27 deltas, 28 sample_interval=10, 29) 30``` 31 32The recommended trace format in this repository stores: 33 34- `initial_state` 35- `state_updates` 36- `state_deltas` 37- optionally derived `spike_times` and `spike_ids` 38 39This keeps simulations compact and allows later reconstruction of full sampled 40states, onset events, or population rates. 41 42Regenerating docs: 43 44```bash 45python scripts/generate_api_docs.py 46``` 47""" 48 49from .BinaryNetwork import ( 50 BinaryNeuronPopulation, 51 BackgroundActivity, 52 PairwiseBernoulliSynapse, 53 PoissonSynapse, 54 FixedIndegreeSynapse, 55 AllToAllSynapse, 56 BinaryNetwork, 57) 58from .ClusteredEI_network import ClusteredEI_network 59 60__all__ = [ 61 "BinaryNeuronPopulation", 62 "BackgroundActivity", 63 "PairwiseBernoulliSynapse", 64 "PoissonSynapse", 65 "FixedIndegreeSynapse", 66 "AllToAllSynapse", 67 "BinaryNetwork", 68 "ClusteredEI_network", 69]
291class BinaryNeuronPopulation(Neuron): 292 """Binary neuron population with threshold activation and configurable initializer. 293 294 Examples 295 -------- 296 >>> net = BinaryNetwork("demo") 297 >>> pop = BinaryNeuronPopulation(net, N=3, threshold=0.5, initializer=0) 298 >>> net.add_population(pop) 299 <...BinaryNeuronPopulation object...> 300 """ 301 def __init__( 302 self, 303 reference, 304 N=1, 305 threshold=1.0, 306 name="Binary Neuron Population", 307 tau=1.0, 308 initializer=None, 309 **kwargs, 310 ): 311 super().__init__(reference, N, name, tau=tau) 312 self.threshold = float(threshold) 313 self.initializer = initializer 314 315 def update(self, weights=None, state=None, index=None, input_value=None, **kwargs): 316 if input_value is None: 317 if weights is None or state is None: 318 raise ValueError("weights and state have to be provided when input_value is not set") 319 input_value = np.sum(weights * state) 320 return np.heaviside(input_value - self.threshold, 0) 321 322 def _initial_state(self): 323 if callable(self.initializer): 324 values = np.asarray(self.initializer(self.N), dtype=np.int16) 325 if values.size != self.N: 326 raise ValueError("Initializer must return {} entries".format(self.N)) 327 return values 328 if self.initializer is None: 329 return np.random.choice([0, 1], size=self.N, p=[0.8, 0.2]) 330 arr = np.asarray(self.initializer) 331 if arr.size == 1: 332 return np.full(self.N, int(arr.item()), dtype=np.int16) 333 if arr.size != self.N: 334 raise ValueError("Initializer must define exactly {} elements".format(self.N)) 335 return arr.astype(np.int16)
Binary neuron population with threshold activation and configurable initializer.
Examples
>>> net = BinaryNetwork("demo")
>>> pop = BinaryNeuronPopulation(net, N=3, threshold=0.5, initializer=0)
>>> net.add_population(pop)
<...BinaryNeuronPopulation object...>
315 def update(self, weights=None, state=None, index=None, input_value=None, **kwargs): 316 if input_value is None: 317 if weights is None or state is None: 318 raise ValueError("weights and state have to be provided when input_value is not set") 319 input_value = np.sum(weights * state) 320 return np.heaviside(input_value - self.threshold, 0)
338class BackgroundActivity(Neuron): 339 """Background population that provides a constant or stochastic drive.""" 340 def __init__(self, reference, N=1, Activity=0.5, Stochastic=False, name="Background Activity", tau=1.0): 341 super().__init__(reference, N, name, tau=tau) 342 self.Activity = Activity 343 if Stochastic: 344 self.update = self.update_stochastic 345 else: 346 self.update = self.update_deterministic 347 348 def update_stochastic(self, weights=None, state=None, Index=None, **kwargs): 349 return np.random.choice([0, 1], 1) * self.update_deterministic(weights, state, **kwargs) 350 351 def update_deterministic(self, weights=None, state=None, Index=None, **kwargs): 352 # if activity is a float, set all neurons to this activity 353 if isinstance(self.Activity, float): 354 return self.Activity 355 # if activity is a function, set neurons by this function 356 elif callable(self.Activity): 357 return self.Activity() 358 else: 359 return 1.0 360 361 def initialze(self): 362 self.state = np.array([self.update() for _ in range(self.N)])
Background population that provides a constant or stochastic drive.
340 def __init__(self, reference, N=1, Activity=0.5, Stochastic=False, name="Background Activity", tau=1.0): 341 super().__init__(reference, N, name, tau=tau) 342 self.Activity = Activity 343 if Stochastic: 344 self.update = self.update_stochastic 345 else: 346 self.update = self.update_deterministic
351 def update_deterministic(self, weights=None, state=None, Index=None, **kwargs): 352 # if activity is a float, set all neurons to this activity 353 if isinstance(self.Activity, float): 354 return self.Activity 355 # if activity is a function, set neurons by this function 356 elif callable(self.Activity): 357 return self.Activity() 358 else: 359 return 1.0
398class PairwiseBernoulliSynapse(Synapse): 399 """Sample independent Bernoulli connections for every pre/post pair. 400 401 Expected output 402 --------------- 403 After `network.initialize(...)`, the addressed weight block contains zeros 404 and `j`-valued entries sampled independently with probability `p`. 405 """ 406 def __init__(self, reference, pre, post, p=0.5, j=1.0): 407 super().__init__(reference, pre, post) 408 self.p = float(p) 409 self.j = float(j) 410 411 def initialze(self): 412 p = self.p 413 iterations = 1 414 while p > 1: 415 p /= 2.0 416 iterations += 1 417 if self.reference.weight_mode == "sparse": 418 total_pairs = self.post.N * self.pre.N 419 if total_pairs == 0 or p <= 0.0: 420 return 421 data_chunks: List[np.ndarray] = [] 422 row_chunks: List[np.ndarray] = [] 423 col_chunks: List[np.ndarray] = [] 424 for _ in range(iterations): 425 sample_count = int(np.random.binomial(total_pairs, p)) 426 if sample_count <= 0: 427 continue 428 if sample_count >= total_pairs: 429 flat = np.arange(total_pairs, dtype=np.int64) 430 else: 431 flat = np.random.choice(total_pairs, size=sample_count, replace=False).astype(np.int64, copy=False) 432 rows = flat // self.pre.N 433 cols = flat % self.pre.N 434 row_chunks.append(rows) 435 col_chunks.append(cols) 436 data_chunks.append(np.full(sample_count, self.j, dtype=self.reference.weight_dtype)) 437 if data_chunks: 438 self._append_sparse_entries( 439 np.concatenate(row_chunks), 440 np.concatenate(col_chunks), 441 np.concatenate(data_chunks), 442 ) 443 return 444 shape = (self.post.N, self.pre.N) 445 block = np.zeros(shape, dtype=self.reference.weight_dtype) 446 for _ in range(iterations): 447 draws = (np.random.random(size=shape) < p).astype(block.dtype, copy=False) 448 block += draws * self.j 449 self._write_block(block)
Sample independent Bernoulli connections for every pre/post pair.
Expected output
After network.initialize(...), the addressed weight block contains zeros
and j-valued entries sampled independently with probability p.
411 def initialze(self): 412 p = self.p 413 iterations = 1 414 while p > 1: 415 p /= 2.0 416 iterations += 1 417 if self.reference.weight_mode == "sparse": 418 total_pairs = self.post.N * self.pre.N 419 if total_pairs == 0 or p <= 0.0: 420 return 421 data_chunks: List[np.ndarray] = [] 422 row_chunks: List[np.ndarray] = [] 423 col_chunks: List[np.ndarray] = [] 424 for _ in range(iterations): 425 sample_count = int(np.random.binomial(total_pairs, p)) 426 if sample_count <= 0: 427 continue 428 if sample_count >= total_pairs: 429 flat = np.arange(total_pairs, dtype=np.int64) 430 else: 431 flat = np.random.choice(total_pairs, size=sample_count, replace=False).astype(np.int64, copy=False) 432 rows = flat // self.pre.N 433 cols = flat % self.pre.N 434 row_chunks.append(rows) 435 col_chunks.append(cols) 436 data_chunks.append(np.full(sample_count, self.j, dtype=self.reference.weight_dtype)) 437 if data_chunks: 438 self._append_sparse_entries( 439 np.concatenate(row_chunks), 440 np.concatenate(col_chunks), 441 np.concatenate(data_chunks), 442 ) 443 return 444 shape = (self.post.N, self.pre.N) 445 block = np.zeros(shape, dtype=self.reference.weight_dtype) 446 for _ in range(iterations): 447 draws = (np.random.random(size=shape) < p).astype(block.dtype, copy=False) 448 block += draws * self.j 449 self._write_block(block)
452class PoissonSynapse(Synapse): 453 """Sample Poisson-distributed multi-edge counts per pre/post pair. 454 455 Expected output 456 --------------- 457 The addressed weight block contains non-negative multiples of `j` with 458 Poisson-distributed counts. 459 """ 460 def __init__(self, reference, pre, post, rate=0.5, j=1.0): 461 super().__init__(reference, pre, post) 462 self.rate = float(rate) 463 self.j = float(j) 464 465 def initialze(self): 466 if self.reference.weight_mode == "sparse": 467 total_pairs = self.post.N * self.pre.N 468 if total_pairs == 0 or self.rate <= 0.0: 469 return 470 sample_count = int(np.random.poisson(lam=self.rate * total_pairs)) 471 if sample_count <= 0: 472 return 473 flat = np.random.randint(total_pairs, size=sample_count, dtype=np.int64) 474 unique, counts = _compress_flat_samples(flat, total_pairs) 475 rows = unique // self.pre.N 476 cols = unique % self.pre.N 477 values = counts.astype(self.reference.weight_dtype, copy=False) * self.j 478 self._append_sparse_entries(rows, cols, values) 479 return 480 shape = (self.post.N, self.pre.N) 481 samples = np.random.poisson(lam=self.rate, size=shape).astype(self.reference.weight_dtype, copy=False) 482 self._write_block(samples * self.j)
Sample Poisson-distributed multi-edge counts per pre/post pair.
Expected output
The addressed weight block contains non-negative multiples of j with
Poisson-distributed counts.
465 def initialze(self): 466 if self.reference.weight_mode == "sparse": 467 total_pairs = self.post.N * self.pre.N 468 if total_pairs == 0 or self.rate <= 0.0: 469 return 470 sample_count = int(np.random.poisson(lam=self.rate * total_pairs)) 471 if sample_count <= 0: 472 return 473 flat = np.random.randint(total_pairs, size=sample_count, dtype=np.int64) 474 unique, counts = _compress_flat_samples(flat, total_pairs) 475 rows = unique // self.pre.N 476 cols = unique % self.pre.N 477 values = counts.astype(self.reference.weight_dtype, copy=False) * self.j 478 self._append_sparse_entries(rows, cols, values) 479 return 480 shape = (self.post.N, self.pre.N) 481 samples = np.random.poisson(lam=self.rate, size=shape).astype(self.reference.weight_dtype, copy=False) 482 self._write_block(samples * self.j)
485class FixedIndegreeSynapse(Synapse): 486 """Connect each target neuron to a fixed number of randomly drawn inputs. 487 488 Expected output 489 --------------- 490 Each target row receives approximately `round(p * N_pre)` incoming entries. 491 When `multapses` is `True`, presynaptic partners are sampled with replacement. 492 When `multapses` is `False`, partners are sampled without replacement. 493 """ 494 def __init__(self, reference, pre, post, p=0.5, j=1.0, multapses=True): 495 super().__init__(reference, pre, post) 496 self.p = float(p) 497 self.j = float(j) 498 self.multapses = bool(multapses) 499 500 def initialze(self): 501 p = max(self.p, 0.0) 502 target_count = int(round(p * self.pre.N)) 503 target_count = max(target_count, 0) 504 if target_count == 0: 505 return 506 if (not self.multapses) and target_count > self.pre.N: 507 raise ValueError( 508 f"Fixed-indegree sampling without multapses requested indegree {target_count} " 509 f"from only {self.pre.N} presynaptic neurons." 510 ) 511 if self.reference.weight_mode == "sparse": 512 rows = np.repeat(np.arange(self.post.N, dtype=np.int64), target_count) 513 if self.multapses: 514 cols = np.random.randint(self.pre.N, size=self.post.N * target_count, dtype=np.int64) 515 else: 516 col_chunks = [ 517 np.random.choice(self.pre.N, size=target_count, replace=False).astype(np.int64, copy=False) 518 for _ in range(self.post.N) 519 ] 520 cols = np.concatenate(col_chunks) if col_chunks else np.zeros(0, dtype=np.int64) 521 values = np.full(rows.size, self.j, dtype=self.reference.weight_dtype) 522 self._append_sparse_entries(rows, cols, values) 523 return 524 block = np.zeros((self.post.N, self.pre.N), dtype=self.reference.weight_dtype) 525 for tgt in range(self.post.N): 526 pres = np.random.choice(self.pre.N, size=target_count, replace=self.multapses) 527 np.add.at(block[tgt], pres, self.j) 528 self._write_block(block)
Connect each target neuron to a fixed number of randomly drawn inputs.
Expected output
Each target row receives approximately round(p * N_pre) incoming entries.
When multapses is True, presynaptic partners are sampled with replacement.
When multapses is False, partners are sampled without replacement.
500 def initialze(self): 501 p = max(self.p, 0.0) 502 target_count = int(round(p * self.pre.N)) 503 target_count = max(target_count, 0) 504 if target_count == 0: 505 return 506 if (not self.multapses) and target_count > self.pre.N: 507 raise ValueError( 508 f"Fixed-indegree sampling without multapses requested indegree {target_count} " 509 f"from only {self.pre.N} presynaptic neurons." 510 ) 511 if self.reference.weight_mode == "sparse": 512 rows = np.repeat(np.arange(self.post.N, dtype=np.int64), target_count) 513 if self.multapses: 514 cols = np.random.randint(self.pre.N, size=self.post.N * target_count, dtype=np.int64) 515 else: 516 col_chunks = [ 517 np.random.choice(self.pre.N, size=target_count, replace=False).astype(np.int64, copy=False) 518 for _ in range(self.post.N) 519 ] 520 cols = np.concatenate(col_chunks) if col_chunks else np.zeros(0, dtype=np.int64) 521 values = np.full(rows.size, self.j, dtype=self.reference.weight_dtype) 522 self._append_sparse_entries(rows, cols, values) 523 return 524 block = np.zeros((self.post.N, self.pre.N), dtype=self.reference.weight_dtype) 525 for tgt in range(self.post.N): 526 pres = np.random.choice(self.pre.N, size=target_count, replace=self.multapses) 527 np.add.at(block[tgt], pres, self.j) 528 self._write_block(block)
531class AllToAllSynapse(Synapse): 532 """Dense all-to-all block with constant weight value. 533 534 Expected output 535 --------------- 536 The addressed weight block is filled with the constant value `j`. 537 """ 538 def __init__(self, reference, pre, post, j=1.0): 539 super().__init__(reference, pre, post) 540 self.j = float(j) 541 542 def initialze(self): 543 if self.reference.weight_mode == "sparse": 544 total_pairs = self.post.N * self.pre.N 545 if total_pairs == 0: 546 return 547 rows = np.repeat(np.arange(self.post.N, dtype=np.int64), self.pre.N) 548 cols = np.tile(np.arange(self.pre.N, dtype=np.int64), self.post.N) 549 values = np.full(total_pairs, self.j, dtype=self.reference.weight_dtype) 550 self._append_sparse_entries(rows, cols, values) 551 return 552 block = np.full((self.post.N, self.pre.N), self.j, dtype=self.reference.weight_dtype) 553 self._write_block(block)
Dense all-to-all block with constant weight value.
Expected output
The addressed weight block is filled with the constant value j.
542 def initialze(self): 543 if self.reference.weight_mode == "sparse": 544 total_pairs = self.post.N * self.pre.N 545 if total_pairs == 0: 546 return 547 rows = np.repeat(np.arange(self.post.N, dtype=np.int64), self.pre.N) 548 cols = np.tile(np.arange(self.pre.N, dtype=np.int64), self.post.N) 549 values = np.full(total_pairs, self.j, dtype=self.reference.weight_dtype) 550 self._append_sparse_entries(rows, cols, values) 551 return 552 block = np.full((self.post.N, self.pre.N), self.j, dtype=self.reference.weight_dtype) 553 self._write_block(block)
556class BinaryNetwork: 557 """Binary network simulator with dense/sparse backends and diff-log tracing. 558 559 Examples 560 -------- 561 >>> np.random.seed(0) 562 >>> net = BinaryNetwork("demo") 563 >>> exc = BinaryNeuronPopulation(net, N=2, threshold=0.5, tau=5.0, initializer=[0, 1]) 564 >>> inh = BinaryNeuronPopulation(net, N=1, threshold=0.5, tau=5.0, initializer=[0]) 565 >>> net.add_population(exc) 566 <...BinaryNeuronPopulation object...> 567 >>> net.add_population(inh) 568 <...BinaryNeuronPopulation object...> 569 >>> net.add_synapse(AllToAllSynapse(net, exc, exc, j=0.6)) 570 >>> net.initialize(weight_mode="dense", autapse=False) 571 >>> net.state.shape 572 (3,) 573 """ 574 def __init__(self, name="Some Binary Network"): 575 self.name = name 576 self.N = 0 577 self.population: List[Neuron] = [] 578 self.synapses: List[Synapse] = [] 579 self.state: Optional[np.ndarray] = None 580 self.weights_dense: Optional[np.ndarray] = None 581 self.weights_csr = None 582 self.weights_csc = None 583 self.weights = None # compatibility alias 584 self.LUT = None # look up table for the update function 585 self.sim_steps = 0 586 self.population_lookup = None 587 self.neuron_lookup = None 588 self.update_prob = None 589 self.thresholds = None 590 self.field = None 591 self.weight_mode = "dense" 592 self.weight_dtype = np.float32 593 self._population_views = np.zeros((0, 2), dtype=np.int64) 594 self._population_cdf = np.zeros(0, dtype=np.float64) 595 self._sparse_rows: List[np.ndarray] = [] 596 self._sparse_cols: List[np.ndarray] = [] 597 self._sparse_data: List[np.ndarray] = [] 598 self._step_log_buffer: Optional[np.ndarray] = None 599 self._step_log_index = 0 600 self._step_log_dummy = np.zeros((0, 0), dtype=np.int8) 601 self._diff_log_updates: Optional[np.ndarray] = None 602 self._diff_log_deltas: Optional[np.ndarray] = None 603 self._diff_log_index = 0 604 self._diff_log_dummy_updates = np.zeros(0, dtype=np.uint16) 605 self._diff_log_dummy_deltas = np.zeros(0, dtype=np.int8) 606 607 def add_population(self, population: Neuron): 608 self.population.append(population) 609 self.N += population.N 610 return population 611 612 def add_synapse(self, synapse: Synapse): 613 self.synapses.append(synapse) 614 615 def initialize( 616 self, 617 autapse: bool = False, 618 weight_mode: str = "auto", 619 ram_budget_gb: float = 12.0, 620 weight_dtype=np.float32, 621 ): 622 """Allocate state, sample connectivity, and prepare the cached input field. 623 624 Examples 625 -------- 626 >>> np.random.seed(1) 627 >>> net = BinaryNetwork("init-demo") 628 >>> pop = BinaryNeuronPopulation(net, N=2, threshold=0.1, initializer=[1, 0]) 629 >>> net.add_population(pop) 630 <...BinaryNeuronPopulation object...> 631 >>> net.initialize(weight_mode="dense") 632 >>> net.field.shape 633 (2,) 634 """ 635 if self.N == 0: 636 raise RuntimeError("Cannot initialize network without populations.") 637 self.weight_dtype = np.dtype(weight_dtype) 638 if self.weight_dtype not in (np.float32, np.float64): 639 raise ValueError("weight_dtype must be float32 or float64.") 640 self.state = np.zeros(self.N, dtype=np.int8) 641 self.field = np.zeros(self.N, dtype=self.weight_dtype) 642 self.update_prob = np.zeros(self.N, dtype=self.weight_dtype) 643 self.population_lookup = np.zeros(self.N, dtype=np.int32) 644 self.neuron_lookup = np.zeros(self.N, dtype=np.int32) 645 self.thresholds = np.zeros(self.N, dtype=self.weight_dtype) 646 pop_count = len(self.population) 647 pop_views = np.zeros((pop_count, 2), dtype=np.int64) 648 pop_update_mass = np.zeros(pop_count, dtype=np.float64) 649 N_start = 0 650 for idx, population in enumerate(self.population): 651 population.set_view([N_start, N_start + population.N]) 652 N_start += population.N 653 population.initialze() 654 pop_views[idx, :] = population.view 655 pop_update_mass[idx] = population.N / max(population.tau, 1e-9) 656 self.update_prob[population.view[0]:population.view[1]] = 1.0 / max(population.tau, 1e-9) 657 self.population_lookup[population.view[0]:population.view[1]] = idx 658 self.neuron_lookup[population.view[0]:population.view[1]] = np.arange( 659 population.N, dtype=np.int32 660 ) 661 threshold_value = getattr(population, "threshold", 0.0) 662 self.thresholds[population.view[0]:population.view[1]] = float(threshold_value) 663 self.LUT = np.array([population.view for population in self.population]) 664 total = float(self.update_prob.sum()) 665 if not math.isfinite(total) or total <= 0: 666 raise RuntimeError("Invalid update probabilities. Check tau values.") 667 self.update_prob /= total 668 pop_mass_total = float(pop_update_mass.sum()) 669 if not math.isfinite(pop_mass_total) or pop_mass_total <= 0: 670 raise RuntimeError("Invalid population update masses. Check tau values.") 671 self._population_views = pop_views 672 self._population_cdf = np.cumsum(pop_update_mass / pop_mass_total) 673 self.weight_mode = self._choose_weight_mode(weight_mode, ram_budget_gb) 674 self.weights_dense = None 675 self.weights_csr = None 676 self.weights_csc = None 677 self.weights = None 678 self._sparse_rows.clear() 679 self._sparse_cols.clear() 680 self._sparse_data.clear() 681 if self.weight_mode == "dense": 682 self.weights_dense = np.zeros((self.N, self.N), dtype=self.weight_dtype) 683 else: 684 if sp is None: 685 raise ModuleNotFoundError( 686 "SciPy is required for sparse weight mode. Install it via 'pip install scipy'." 687 ) 688 for synapse in self.synapses: 689 synapse.set_view( 690 np.array( 691 [[synapse.pre.view[0], synapse.pre.view[1]], [synapse.post.view[0], synapse.post.view[1]]], 692 dtype=np.int64, 693 ) 694 ) 695 synapse.initialze() 696 if self.weight_mode == "dense": 697 if not autapse: 698 np.fill_diagonal(self.weights_dense, 0.0) 699 self.weights = self.weights_dense 700 else: 701 row = np.concatenate(self._sparse_rows) if self._sparse_rows else np.zeros(0, dtype=np.int64) 702 col = np.concatenate(self._sparse_cols) if self._sparse_cols else np.zeros(0, dtype=np.int64) 703 data = np.concatenate(self._sparse_data) if self._sparse_data else np.zeros(0, dtype=self.weight_dtype) 704 if not autapse and row.size: 705 keep = row != col 706 row = row[keep] 707 col = col[keep] 708 data = data[keep] 709 matrix = sp.coo_matrix((data, (row, col)), shape=(self.N, self.N), dtype=self.weight_dtype) 710 self.weights_csr = matrix.tocsr() 711 self.weights_csc = matrix.tocsc() 712 self.weights = self.weights_csr 713 self._recompute_field() 714 self.sim_steps = 0 715 716 def _recompute_field(self): 717 state_float = self.state.astype(self.weight_dtype, copy=False) 718 if self.weight_mode == "dense": 719 self.field = self.weights_dense @ state_float 720 else: 721 self.field = self.weights_csr.dot(state_float).astype(self.weight_dtype, copy=False) 722 723 def _write_weight_block(self, row_start, row_end, col_start, col_end, block): 724 block = np.asarray(block, dtype=self.weight_dtype) 725 if block.shape != (row_end - row_start, col_end - col_start): 726 raise ValueError("Block shape does not match target slice.") 727 if self.weight_mode == "dense": 728 self.weights_dense[row_start:row_end, col_start:col_end] += block 729 else: 730 rows = np.arange(row_start, row_end, dtype=np.int64) 731 cols = np.arange(col_start, col_end, dtype=np.int64) 732 row_idx = np.repeat(rows, block.shape[1]) 733 col_idx = np.tile(cols, block.shape[0]) 734 values = block.reshape(-1) 735 mask = values != 0.0 736 if mask.any(): 737 self._sparse_rows.append(row_idx[mask]) 738 self._sparse_cols.append(col_idx[mask]) 739 self._sparse_data.append(values[mask]) 740 741 def _append_sparse_entries(self, row_idx: np.ndarray, col_idx: np.ndarray, values: np.ndarray) -> None: 742 if self.weight_mode != "sparse": 743 raise RuntimeError("Sparse entries can only be appended in sparse weight mode.") 744 rows = np.asarray(row_idx, dtype=np.int64).ravel() 745 cols = np.asarray(col_idx, dtype=np.int64).ravel() 746 data = np.asarray(values, dtype=self.weight_dtype).ravel() 747 if rows.size != cols.size or rows.size != data.size: 748 raise ValueError("Sparse row/col/data arrays must have the same length.") 749 if rows.size == 0: 750 return 751 mask = data != 0.0 752 if mask.any(): 753 self._sparse_rows.append(rows[mask]) 754 self._sparse_cols.append(cols[mask]) 755 self._sparse_data.append(data[mask]) 756 757 def _choose_weight_mode(self, requested: str, ram_budget_gb: float) -> str: 758 requested = str(requested or "auto").lower() 759 if requested in {"dense", "sparse"}: 760 return requested 761 if requested != "auto": 762 raise ValueError("weight_mode must be 'dense', 'sparse', or 'auto'.") 763 # Estimate dense memory footprint assuming float32. 764 bytes_per_entry = np.dtype(self.weight_dtype).itemsize 765 dense_bytes = self.N * self.N * bytes_per_entry 766 dense_gb = dense_bytes / (1024 ** 3) 767 safety = 0.6 * float(ram_budget_gb) 768 print("Simulating network as: " + "dense" if dense_gb <= safety else "sparse") 769 return "dense" if dense_gb <= safety else "sparse" 770 771 def _select_neurons(self, count: int) -> np.ndarray: 772 if count <= 0: 773 return np.zeros(0, dtype=np.int64) 774 if self._population_cdf.size == 0: 775 return np.random.choice(self.N, size=count, p=self.update_prob) 776 pop_indices = np.searchsorted(self._population_cdf, np.random.random(size=count), side="right") 777 pop_indices = np.minimum(pop_indices, self._population_views.shape[0] - 1) 778 counts = np.bincount(pop_indices, minlength=self._population_views.shape[0]) 779 neurons = np.empty(count, dtype=np.int64) 780 offset = 0 781 for pop_idx, pop_count in enumerate(counts): 782 if pop_count == 0: 783 continue 784 start, end = self._population_views[pop_idx] 785 neurons[offset:offset + pop_count] = np.random.randint(start, end, size=pop_count) 786 offset += pop_count 787 np.random.shuffle(neurons) 788 return neurons 789 790 def _update_batch_dense(self, neurons: np.ndarray, log_states, log_enabled: bool, log_offset: int): 791 if neurons.size == 0: 792 return 793 diff_enabled = self._diff_log_updates is not None and self._diff_log_deltas is not None 794 if diff_enabled: 795 diff_updates = self._diff_log_updates 796 diff_deltas = self._diff_log_deltas 797 diff_offset = self._diff_log_index 798 else: 799 diff_updates = self._diff_log_dummy_updates 800 diff_deltas = self._diff_log_dummy_deltas 801 diff_offset = 0 802 _dense_batch_kernel( 803 neurons, 804 self.state, 805 self.field, 806 self.thresholds, 807 self.weights_dense, 808 log_states, 809 log_enabled, 810 log_offset, 811 diff_updates, 812 diff_deltas, 813 diff_enabled, 814 diff_offset, 815 ) 816 817 def _update_batch_sparse(self, neurons: np.ndarray, log_states, log_enabled: bool, log_offset: int): 818 if neurons.size == 0: 819 return 820 diff_enabled = self._diff_log_updates is not None and self._diff_log_deltas is not None 821 if diff_enabled: 822 diff_updates = self._diff_log_updates 823 diff_deltas = self._diff_log_deltas 824 diff_offset = self._diff_log_index 825 else: 826 diff_updates = self._diff_log_dummy_updates 827 diff_deltas = self._diff_log_dummy_deltas 828 diff_offset = 0 829 _sparse_batch_kernel( 830 neurons, 831 self.state, 832 self.field, 833 self.thresholds, 834 self.weights_csc.data, 835 self.weights_csc.indices, 836 self.weights_csc.indptr, 837 log_states, 838 log_enabled, 839 log_offset, 840 diff_updates, 841 diff_deltas, 842 diff_enabled, 843 diff_offset, 844 ) 845 846 def update(self): 847 neuron = self._select_neurons(1) 848 self._process_batch(neuron) 849 850 def _process_batch(self, neurons: Sequence[int]): 851 neurons = np.asarray(neurons, dtype=np.int64) 852 log_enabled = self._step_log_buffer is not None 853 log_offset = self._step_log_index 854 if log_enabled and self._step_log_buffer is not None: 855 if log_offset + neurons.size > self._step_log_buffer.shape[0]: 856 raise RuntimeError( 857 "Step logging buffer exhausted. Increase allocated steps when enabling step logging." 858 ) 859 log_buffer = self._step_log_buffer 860 else: 861 log_buffer = self._step_log_dummy 862 if self._diff_log_updates is not None and self._diff_log_deltas is not None: 863 if self._diff_log_index + neurons.size > self._diff_log_updates.shape[0]: 864 raise RuntimeError("Diff logging buffer exhausted. Increase allocated steps when enabling diff logging.") 865 if self.weight_mode == "dense": 866 self._update_batch_dense(neurons, log_buffer, log_enabled, log_offset) 867 else: 868 self._update_batch_sparse(neurons, log_buffer, log_enabled, log_offset) 869 if log_enabled: 870 self._step_log_index += neurons.size 871 if self._diff_log_updates is not None and self._diff_log_deltas is not None: 872 self._diff_log_index += neurons.size 873 self.sim_steps += neurons.size 874 875 def run(self, steps=1000, batch_size=1): 876 """Advance the network for `steps` asynchronous updates. 877 878 Examples 879 -------- 880 >>> np.random.seed(2) 881 >>> net = BinaryNetwork("run-demo") 882 >>> pop = BinaryNeuronPopulation(net, N=2, threshold=0.1, initializer=[1, 0]) 883 >>> net.add_population(pop) 884 <...BinaryNeuronPopulation object...> 885 >>> net.initialize(weight_mode="dense") 886 >>> net.run(steps=4, batch_size=2) 887 >>> int(net.sim_steps) 888 4 889 """ 890 if self.state is None or self.update_prob is None: 891 raise RuntimeError("Call initialize() before run().") 892 if batch_size <= 0: 893 raise ValueError("batch_size must be positive.") 894 steps_done = 0 895 while steps_done < steps: 896 current_batch = min(batch_size, steps - steps_done) 897 neurons = self._select_neurons(current_batch) 898 self._process_batch(neurons) 899 steps_done += current_batch 900 901 def enable_step_logging(self, steps: int): 902 steps = int(max(0, steps)) 903 if steps == 0: 904 self._step_log_buffer = None 905 self._step_log_index = 0 906 return 907 self._step_log_buffer = np.zeros((steps, self.N), dtype=np.int8) 908 self._step_log_index = 0 909 910 def consume_step_log(self) -> np.ndarray: 911 if self._step_log_buffer is None: 912 return np.zeros((0, self.N), dtype=np.int8) 913 filled = min(self._step_log_index, self._step_log_buffer.shape[0]) 914 data = self._step_log_buffer[:filled].copy() 915 self._step_log_buffer = None 916 self._step_log_index = 0 917 return data 918 919 def enable_diff_logging(self, steps: int): 920 """Allocate a diff-log buffer for `steps` asynchronous updates. 921 922 Examples 923 -------- 924 >>> net = BinaryNetwork("log-demo") 925 >>> pop = BinaryNeuronPopulation(net, N=1, threshold=0.0, initializer=[0]) 926 >>> net.add_population(pop) 927 <...BinaryNeuronPopulation object...> 928 >>> net.initialize(weight_mode="dense") 929 >>> net.enable_diff_logging(steps=3) 930 >>> net._diff_log_updates.shape 931 (3,) 932 """ 933 steps = int(max(0, steps)) 934 if steps == 0: 935 self._diff_log_updates = None 936 self._diff_log_deltas = None 937 self._diff_log_index = 0 938 return 939 self._diff_log_updates = np.zeros(steps, dtype=np.uint16) 940 self._diff_log_deltas = np.zeros(steps, dtype=np.int8) 941 self._diff_log_index = 0 942 943 def consume_diff_log(self) -> tuple[np.ndarray, np.ndarray]: 944 """Return the recorded diff log as `(updates, deltas)` row arrays. 945 946 Expected output 947 --------------- 948 The returned arrays have shape `(1, recorded_steps)` for the current 949 simulation backend. 950 """ 951 if self._diff_log_updates is None or self._diff_log_deltas is None: 952 return np.zeros((1, 0), dtype=np.uint16), np.zeros((1, 0), dtype=np.int8) 953 filled = min(self._diff_log_index, self._diff_log_updates.shape[0]) 954 updates = self._diff_log_updates[:filled].copy()[None, :] 955 deltas = self._diff_log_deltas[:filled].copy()[None, :] 956 self._diff_log_updates = None 957 self._diff_log_deltas = None 958 self._diff_log_index = 0 959 return updates, deltas 960 961 @staticmethod 962 def reconstruct_states_from_diff_logs( 963 initial_state: np.ndarray, 964 updates: np.ndarray, 965 deltas: np.ndarray, 966 *, 967 sample_interval: int = 1, 968 ) -> np.ndarray: 969 """Reconstruct sampled full network states from diff-log traces. 970 971 Examples 972 -------- 973 >>> BinaryNetwork.reconstruct_states_from_diff_logs( 974 ... initial_state=np.array([0, 0], dtype=np.uint8), 975 ... updates=np.array([[0, 1, 0]], dtype=np.uint16), 976 ... deltas=np.array([[1, 1, -1]], dtype=np.int8), 977 ... ) 978 array([[1, 0], 979 [1, 1], 980 [0, 1]], dtype=uint8) 981 """ 982 update_arr = np.asarray(updates, dtype=np.int64) 983 delta_arr = np.asarray(deltas, dtype=np.int8) 984 if update_arr.ndim == 1: 985 update_arr = update_arr[None, :] 986 if delta_arr.ndim == 1: 987 delta_arr = delta_arr[None, :] 988 state = np.asarray(initial_state, dtype=np.int8).ravel().copy() 989 if update_arr.ndim != 2 or delta_arr.shape != update_arr.shape: 990 return np.zeros((0, state.size), dtype=np.uint8) 991 stride = max(1, int(sample_interval)) 992 return _reconstruct_states_kernel(state, update_arr, delta_arr, stride) 993 994 def population_rates_from_diff_logs( 995 self, 996 initial_state: np.ndarray, 997 updates: np.ndarray, 998 deltas: np.ndarray, 999 *, 1000 sample_interval: int = 1, 1001 populations: Optional[Sequence[Neuron]] = None, 1002 ) -> np.ndarray: 1003 """Compute per-population activity traces directly from diff logs. 1004 1005 Examples 1006 -------- 1007 >>> net = BinaryNetwork("rates-demo") 1008 >>> pop_a = BinaryNeuronPopulation(net, N=1, initializer=[0]) 1009 >>> pop_b = BinaryNeuronPopulation(net, N=1, initializer=[0]) 1010 >>> net.add_population(pop_a) 1011 <...BinaryNeuronPopulation object...> 1012 >>> net.add_population(pop_b) 1013 <...BinaryNeuronPopulation object...> 1014 >>> net.initialize(weight_mode="dense") 1015 >>> rates = net.population_rates_from_diff_logs( 1016 ... np.array([0, 0], dtype=np.uint8), 1017 ... np.array([[0, 1, 0]], dtype=np.uint16), 1018 ... np.array([[1, 1, -1]], dtype=np.int8), 1019 ... ) 1020 >>> rates.tolist() 1021 [[1.0, 0.0], [1.0, 1.0], [0.0, 1.0]] 1022 """ 1023 pops = list(self.population if populations is None else populations) 1024 if not pops: 1025 return np.zeros((0, 0), dtype=np.float32) 1026 update_arr = np.asarray(updates, dtype=np.int64) 1027 delta_arr = np.asarray(deltas, dtype=np.int8) 1028 if update_arr.ndim == 1: 1029 update_arr = update_arr[None, :] 1030 if delta_arr.ndim == 1: 1031 delta_arr = delta_arr[None, :] 1032 if update_arr.ndim != 2 or delta_arr.shape != update_arr.shape: 1033 return np.zeros((0, len(pops)), dtype=np.float32) 1034 initial = np.asarray(initial_state, dtype=np.int8).ravel() 1035 pop_count = len(pops) 1036 sizes = np.zeros(pop_count, dtype=np.float32) 1037 cluster_of = np.empty(initial.size, dtype=np.int32) 1038 cluster_sums = np.zeros(pop_count, dtype=np.int64) 1039 for idx, pop in enumerate(pops): 1040 start, end = int(pop.view[0]), int(pop.view[1]) 1041 cluster_of[start:end] = idx 1042 sizes[idx] = max(1, end - start) 1043 cluster_sums[idx] = int(initial[start:end].sum()) 1044 stride = max(1, int(sample_interval)) 1045 return _population_rates_kernel(cluster_sums, sizes, cluster_of, update_arr, delta_arr, stride) 1046 1047 @staticmethod 1048 def extract_spike_events_from_diff_logs( 1049 updates: np.ndarray, 1050 deltas: np.ndarray, 1051 ) -> tuple[np.ndarray, np.ndarray]: 1052 """Extract onset events `(times, neuron_ids)` from diff-log traces. 1053 1054 Examples 1055 -------- 1056 >>> times, ids = BinaryNetwork.extract_spike_events_from_diff_logs( 1057 ... np.array([[0, 1, 0]], dtype=np.uint16), 1058 ... np.array([[1, 1, -1]], dtype=np.int8), 1059 ... ) 1060 >>> times.tolist() 1061 [0.0, 1.0] 1062 >>> ids.tolist() 1063 [0, 1] 1064 """ 1065 update_arr = np.asarray(updates, dtype=np.int64) 1066 delta_arr = np.asarray(deltas, dtype=np.int8) 1067 if update_arr.ndim == 1: 1068 update_arr = update_arr[None, :] 1069 if delta_arr.ndim == 1: 1070 delta_arr = delta_arr[None, :] 1071 if update_arr.ndim != 2 or delta_arr.shape != update_arr.shape: 1072 return np.zeros(0, dtype=np.float64), np.zeros(0, dtype=np.int64) 1073 mask = delta_arr > 0 1074 if not mask.any(): 1075 return np.zeros(0, dtype=np.float64), np.zeros(0, dtype=np.int64) 1076 per_step, steps = update_arr.shape 1077 times = np.repeat(np.arange(steps, dtype=np.int64), per_step) 1078 flat_updates = update_arr.reshape(-1, order="F") 1079 flat_mask = mask.reshape(-1, order="F") 1080 return times[flat_mask].astype(np.float64, copy=False), flat_updates[flat_mask].astype(np.int64, copy=False)
Binary network simulator with dense/sparse backends and diff-log tracing.
Examples
>>> np.random.seed(0)
>>> net = BinaryNetwork("demo")
>>> exc = BinaryNeuronPopulation(net, N=2, threshold=0.5, tau=5.0, initializer=[0, 1])
>>> inh = BinaryNeuronPopulation(net, N=1, threshold=0.5, tau=5.0, initializer=[0])
>>> net.add_population(exc)
<...BinaryNeuronPopulation object...>
>>> net.add_population(inh)
<...BinaryNeuronPopulation object...>
>>> net.add_synapse(AllToAllSynapse(net, exc, exc, j=0.6))
>>> net.initialize(weight_mode="dense", autapse=False)
>>> net.state.shape
(3,)
574 def __init__(self, name="Some Binary Network"): 575 self.name = name 576 self.N = 0 577 self.population: List[Neuron] = [] 578 self.synapses: List[Synapse] = [] 579 self.state: Optional[np.ndarray] = None 580 self.weights_dense: Optional[np.ndarray] = None 581 self.weights_csr = None 582 self.weights_csc = None 583 self.weights = None # compatibility alias 584 self.LUT = None # look up table for the update function 585 self.sim_steps = 0 586 self.population_lookup = None 587 self.neuron_lookup = None 588 self.update_prob = None 589 self.thresholds = None 590 self.field = None 591 self.weight_mode = "dense" 592 self.weight_dtype = np.float32 593 self._population_views = np.zeros((0, 2), dtype=np.int64) 594 self._population_cdf = np.zeros(0, dtype=np.float64) 595 self._sparse_rows: List[np.ndarray] = [] 596 self._sparse_cols: List[np.ndarray] = [] 597 self._sparse_data: List[np.ndarray] = [] 598 self._step_log_buffer: Optional[np.ndarray] = None 599 self._step_log_index = 0 600 self._step_log_dummy = np.zeros((0, 0), dtype=np.int8) 601 self._diff_log_updates: Optional[np.ndarray] = None 602 self._diff_log_deltas: Optional[np.ndarray] = None 603 self._diff_log_index = 0 604 self._diff_log_dummy_updates = np.zeros(0, dtype=np.uint16) 605 self._diff_log_dummy_deltas = np.zeros(0, dtype=np.int8)
615 def initialize( 616 self, 617 autapse: bool = False, 618 weight_mode: str = "auto", 619 ram_budget_gb: float = 12.0, 620 weight_dtype=np.float32, 621 ): 622 """Allocate state, sample connectivity, and prepare the cached input field. 623 624 Examples 625 -------- 626 >>> np.random.seed(1) 627 >>> net = BinaryNetwork("init-demo") 628 >>> pop = BinaryNeuronPopulation(net, N=2, threshold=0.1, initializer=[1, 0]) 629 >>> net.add_population(pop) 630 <...BinaryNeuronPopulation object...> 631 >>> net.initialize(weight_mode="dense") 632 >>> net.field.shape 633 (2,) 634 """ 635 if self.N == 0: 636 raise RuntimeError("Cannot initialize network without populations.") 637 self.weight_dtype = np.dtype(weight_dtype) 638 if self.weight_dtype not in (np.float32, np.float64): 639 raise ValueError("weight_dtype must be float32 or float64.") 640 self.state = np.zeros(self.N, dtype=np.int8) 641 self.field = np.zeros(self.N, dtype=self.weight_dtype) 642 self.update_prob = np.zeros(self.N, dtype=self.weight_dtype) 643 self.population_lookup = np.zeros(self.N, dtype=np.int32) 644 self.neuron_lookup = np.zeros(self.N, dtype=np.int32) 645 self.thresholds = np.zeros(self.N, dtype=self.weight_dtype) 646 pop_count = len(self.population) 647 pop_views = np.zeros((pop_count, 2), dtype=np.int64) 648 pop_update_mass = np.zeros(pop_count, dtype=np.float64) 649 N_start = 0 650 for idx, population in enumerate(self.population): 651 population.set_view([N_start, N_start + population.N]) 652 N_start += population.N 653 population.initialze() 654 pop_views[idx, :] = population.view 655 pop_update_mass[idx] = population.N / max(population.tau, 1e-9) 656 self.update_prob[population.view[0]:population.view[1]] = 1.0 / max(population.tau, 1e-9) 657 self.population_lookup[population.view[0]:population.view[1]] = idx 658 self.neuron_lookup[population.view[0]:population.view[1]] = np.arange( 659 population.N, dtype=np.int32 660 ) 661 threshold_value = getattr(population, "threshold", 0.0) 662 self.thresholds[population.view[0]:population.view[1]] = float(threshold_value) 663 self.LUT = np.array([population.view for population in self.population]) 664 total = float(self.update_prob.sum()) 665 if not math.isfinite(total) or total <= 0: 666 raise RuntimeError("Invalid update probabilities. Check tau values.") 667 self.update_prob /= total 668 pop_mass_total = float(pop_update_mass.sum()) 669 if not math.isfinite(pop_mass_total) or pop_mass_total <= 0: 670 raise RuntimeError("Invalid population update masses. Check tau values.") 671 self._population_views = pop_views 672 self._population_cdf = np.cumsum(pop_update_mass / pop_mass_total) 673 self.weight_mode = self._choose_weight_mode(weight_mode, ram_budget_gb) 674 self.weights_dense = None 675 self.weights_csr = None 676 self.weights_csc = None 677 self.weights = None 678 self._sparse_rows.clear() 679 self._sparse_cols.clear() 680 self._sparse_data.clear() 681 if self.weight_mode == "dense": 682 self.weights_dense = np.zeros((self.N, self.N), dtype=self.weight_dtype) 683 else: 684 if sp is None: 685 raise ModuleNotFoundError( 686 "SciPy is required for sparse weight mode. Install it via 'pip install scipy'." 687 ) 688 for synapse in self.synapses: 689 synapse.set_view( 690 np.array( 691 [[synapse.pre.view[0], synapse.pre.view[1]], [synapse.post.view[0], synapse.post.view[1]]], 692 dtype=np.int64, 693 ) 694 ) 695 synapse.initialze() 696 if self.weight_mode == "dense": 697 if not autapse: 698 np.fill_diagonal(self.weights_dense, 0.0) 699 self.weights = self.weights_dense 700 else: 701 row = np.concatenate(self._sparse_rows) if self._sparse_rows else np.zeros(0, dtype=np.int64) 702 col = np.concatenate(self._sparse_cols) if self._sparse_cols else np.zeros(0, dtype=np.int64) 703 data = np.concatenate(self._sparse_data) if self._sparse_data else np.zeros(0, dtype=self.weight_dtype) 704 if not autapse and row.size: 705 keep = row != col 706 row = row[keep] 707 col = col[keep] 708 data = data[keep] 709 matrix = sp.coo_matrix((data, (row, col)), shape=(self.N, self.N), dtype=self.weight_dtype) 710 self.weights_csr = matrix.tocsr() 711 self.weights_csc = matrix.tocsc() 712 self.weights = self.weights_csr 713 self._recompute_field() 714 self.sim_steps = 0
Allocate state, sample connectivity, and prepare the cached input field.
Examples
>>> np.random.seed(1)
>>> net = BinaryNetwork("init-demo")
>>> pop = BinaryNeuronPopulation(net, N=2, threshold=0.1, initializer=[1, 0])
>>> net.add_population(pop)
<...BinaryNeuronPopulation object...>
>>> net.initialize(weight_mode="dense")
>>> net.field.shape
(2,)
875 def run(self, steps=1000, batch_size=1): 876 """Advance the network for `steps` asynchronous updates. 877 878 Examples 879 -------- 880 >>> np.random.seed(2) 881 >>> net = BinaryNetwork("run-demo") 882 >>> pop = BinaryNeuronPopulation(net, N=2, threshold=0.1, initializer=[1, 0]) 883 >>> net.add_population(pop) 884 <...BinaryNeuronPopulation object...> 885 >>> net.initialize(weight_mode="dense") 886 >>> net.run(steps=4, batch_size=2) 887 >>> int(net.sim_steps) 888 4 889 """ 890 if self.state is None or self.update_prob is None: 891 raise RuntimeError("Call initialize() before run().") 892 if batch_size <= 0: 893 raise ValueError("batch_size must be positive.") 894 steps_done = 0 895 while steps_done < steps: 896 current_batch = min(batch_size, steps - steps_done) 897 neurons = self._select_neurons(current_batch) 898 self._process_batch(neurons) 899 steps_done += current_batch
Advance the network for steps asynchronous updates.
Examples
>>> np.random.seed(2)
>>> net = BinaryNetwork("run-demo")
>>> pop = BinaryNeuronPopulation(net, N=2, threshold=0.1, initializer=[1, 0])
>>> net.add_population(pop)
<...BinaryNeuronPopulation object...>
>>> net.initialize(weight_mode="dense")
>>> net.run(steps=4, batch_size=2)
>>> int(net.sim_steps)
4
910 def consume_step_log(self) -> np.ndarray: 911 if self._step_log_buffer is None: 912 return np.zeros((0, self.N), dtype=np.int8) 913 filled = min(self._step_log_index, self._step_log_buffer.shape[0]) 914 data = self._step_log_buffer[:filled].copy() 915 self._step_log_buffer = None 916 self._step_log_index = 0 917 return data
919 def enable_diff_logging(self, steps: int): 920 """Allocate a diff-log buffer for `steps` asynchronous updates. 921 922 Examples 923 -------- 924 >>> net = BinaryNetwork("log-demo") 925 >>> pop = BinaryNeuronPopulation(net, N=1, threshold=0.0, initializer=[0]) 926 >>> net.add_population(pop) 927 <...BinaryNeuronPopulation object...> 928 >>> net.initialize(weight_mode="dense") 929 >>> net.enable_diff_logging(steps=3) 930 >>> net._diff_log_updates.shape 931 (3,) 932 """ 933 steps = int(max(0, steps)) 934 if steps == 0: 935 self._diff_log_updates = None 936 self._diff_log_deltas = None 937 self._diff_log_index = 0 938 return 939 self._diff_log_updates = np.zeros(steps, dtype=np.uint16) 940 self._diff_log_deltas = np.zeros(steps, dtype=np.int8) 941 self._diff_log_index = 0
Allocate a diff-log buffer for steps asynchronous updates.
Examples
>>> net = BinaryNetwork("log-demo")
>>> pop = BinaryNeuronPopulation(net, N=1, threshold=0.0, initializer=[0])
>>> net.add_population(pop)
<...BinaryNeuronPopulation object...>
>>> net.initialize(weight_mode="dense")
>>> net.enable_diff_logging(steps=3)
>>> net._diff_log_updates.shape
(3,)
943 def consume_diff_log(self) -> tuple[np.ndarray, np.ndarray]: 944 """Return the recorded diff log as `(updates, deltas)` row arrays. 945 946 Expected output 947 --------------- 948 The returned arrays have shape `(1, recorded_steps)` for the current 949 simulation backend. 950 """ 951 if self._diff_log_updates is None or self._diff_log_deltas is None: 952 return np.zeros((1, 0), dtype=np.uint16), np.zeros((1, 0), dtype=np.int8) 953 filled = min(self._diff_log_index, self._diff_log_updates.shape[0]) 954 updates = self._diff_log_updates[:filled].copy()[None, :] 955 deltas = self._diff_log_deltas[:filled].copy()[None, :] 956 self._diff_log_updates = None 957 self._diff_log_deltas = None 958 self._diff_log_index = 0 959 return updates, deltas
Return the recorded diff log as (updates, deltas) row arrays.
Expected output
The returned arrays have shape (1, recorded_steps) for the current
simulation backend.
961 @staticmethod 962 def reconstruct_states_from_diff_logs( 963 initial_state: np.ndarray, 964 updates: np.ndarray, 965 deltas: np.ndarray, 966 *, 967 sample_interval: int = 1, 968 ) -> np.ndarray: 969 """Reconstruct sampled full network states from diff-log traces. 970 971 Examples 972 -------- 973 >>> BinaryNetwork.reconstruct_states_from_diff_logs( 974 ... initial_state=np.array([0, 0], dtype=np.uint8), 975 ... updates=np.array([[0, 1, 0]], dtype=np.uint16), 976 ... deltas=np.array([[1, 1, -1]], dtype=np.int8), 977 ... ) 978 array([[1, 0], 979 [1, 1], 980 [0, 1]], dtype=uint8) 981 """ 982 update_arr = np.asarray(updates, dtype=np.int64) 983 delta_arr = np.asarray(deltas, dtype=np.int8) 984 if update_arr.ndim == 1: 985 update_arr = update_arr[None, :] 986 if delta_arr.ndim == 1: 987 delta_arr = delta_arr[None, :] 988 state = np.asarray(initial_state, dtype=np.int8).ravel().copy() 989 if update_arr.ndim != 2 or delta_arr.shape != update_arr.shape: 990 return np.zeros((0, state.size), dtype=np.uint8) 991 stride = max(1, int(sample_interval)) 992 return _reconstruct_states_kernel(state, update_arr, delta_arr, stride)
Reconstruct sampled full network states from diff-log traces.
Examples
>>> BinaryNetwork.reconstruct_states_from_diff_logs(
... initial_state=np.array([0, 0], dtype=np.uint8),
... updates=np.array([[0, 1, 0]], dtype=np.uint16),
... deltas=np.array([[1, 1, -1]], dtype=np.int8),
... )
array([[1, 0],
[1, 1],
[0, 1]], dtype=uint8)
994 def population_rates_from_diff_logs( 995 self, 996 initial_state: np.ndarray, 997 updates: np.ndarray, 998 deltas: np.ndarray, 999 *, 1000 sample_interval: int = 1, 1001 populations: Optional[Sequence[Neuron]] = None, 1002 ) -> np.ndarray: 1003 """Compute per-population activity traces directly from diff logs. 1004 1005 Examples 1006 -------- 1007 >>> net = BinaryNetwork("rates-demo") 1008 >>> pop_a = BinaryNeuronPopulation(net, N=1, initializer=[0]) 1009 >>> pop_b = BinaryNeuronPopulation(net, N=1, initializer=[0]) 1010 >>> net.add_population(pop_a) 1011 <...BinaryNeuronPopulation object...> 1012 >>> net.add_population(pop_b) 1013 <...BinaryNeuronPopulation object...> 1014 >>> net.initialize(weight_mode="dense") 1015 >>> rates = net.population_rates_from_diff_logs( 1016 ... np.array([0, 0], dtype=np.uint8), 1017 ... np.array([[0, 1, 0]], dtype=np.uint16), 1018 ... np.array([[1, 1, -1]], dtype=np.int8), 1019 ... ) 1020 >>> rates.tolist() 1021 [[1.0, 0.0], [1.0, 1.0], [0.0, 1.0]] 1022 """ 1023 pops = list(self.population if populations is None else populations) 1024 if not pops: 1025 return np.zeros((0, 0), dtype=np.float32) 1026 update_arr = np.asarray(updates, dtype=np.int64) 1027 delta_arr = np.asarray(deltas, dtype=np.int8) 1028 if update_arr.ndim == 1: 1029 update_arr = update_arr[None, :] 1030 if delta_arr.ndim == 1: 1031 delta_arr = delta_arr[None, :] 1032 if update_arr.ndim != 2 or delta_arr.shape != update_arr.shape: 1033 return np.zeros((0, len(pops)), dtype=np.float32) 1034 initial = np.asarray(initial_state, dtype=np.int8).ravel() 1035 pop_count = len(pops) 1036 sizes = np.zeros(pop_count, dtype=np.float32) 1037 cluster_of = np.empty(initial.size, dtype=np.int32) 1038 cluster_sums = np.zeros(pop_count, dtype=np.int64) 1039 for idx, pop in enumerate(pops): 1040 start, end = int(pop.view[0]), int(pop.view[1]) 1041 cluster_of[start:end] = idx 1042 sizes[idx] = max(1, end - start) 1043 cluster_sums[idx] = int(initial[start:end].sum()) 1044 stride = max(1, int(sample_interval)) 1045 return _population_rates_kernel(cluster_sums, sizes, cluster_of, update_arr, delta_arr, stride)
Compute per-population activity traces directly from diff logs.
Examples
>>> net = BinaryNetwork("rates-demo")
>>> pop_a = BinaryNeuronPopulation(net, N=1, initializer=[0])
>>> pop_b = BinaryNeuronPopulation(net, N=1, initializer=[0])
>>> net.add_population(pop_a)
<...BinaryNeuronPopulation object...>
>>> net.add_population(pop_b)
<...BinaryNeuronPopulation object...>
>>> net.initialize(weight_mode="dense")
>>> rates = net.population_rates_from_diff_logs(
... np.array([0, 0], dtype=np.uint8),
... np.array([[0, 1, 0]], dtype=np.uint16),
... np.array([[1, 1, -1]], dtype=np.int8),
... )
>>> rates.tolist()
[[1.0, 0.0], [1.0, 1.0], [0.0, 1.0]]
1047 @staticmethod 1048 def extract_spike_events_from_diff_logs( 1049 updates: np.ndarray, 1050 deltas: np.ndarray, 1051 ) -> tuple[np.ndarray, np.ndarray]: 1052 """Extract onset events `(times, neuron_ids)` from diff-log traces. 1053 1054 Examples 1055 -------- 1056 >>> times, ids = BinaryNetwork.extract_spike_events_from_diff_logs( 1057 ... np.array([[0, 1, 0]], dtype=np.uint16), 1058 ... np.array([[1, 1, -1]], dtype=np.int8), 1059 ... ) 1060 >>> times.tolist() 1061 [0.0, 1.0] 1062 >>> ids.tolist() 1063 [0, 1] 1064 """ 1065 update_arr = np.asarray(updates, dtype=np.int64) 1066 delta_arr = np.asarray(deltas, dtype=np.int8) 1067 if update_arr.ndim == 1: 1068 update_arr = update_arr[None, :] 1069 if delta_arr.ndim == 1: 1070 delta_arr = delta_arr[None, :] 1071 if update_arr.ndim != 2 or delta_arr.shape != update_arr.shape: 1072 return np.zeros(0, dtype=np.float64), np.zeros(0, dtype=np.int64) 1073 mask = delta_arr > 0 1074 if not mask.any(): 1075 return np.zeros(0, dtype=np.float64), np.zeros(0, dtype=np.int64) 1076 per_step, steps = update_arr.shape 1077 times = np.repeat(np.arange(steps, dtype=np.int64), per_step) 1078 flat_updates = update_arr.reshape(-1, order="F") 1079 flat_mask = mask.reshape(-1, order="F") 1080 return times[flat_mask].astype(np.float64, copy=False), flat_updates[flat_mask].astype(np.int64, copy=False)
Extract onset events (times, neuron_ids) from diff-log traces.
Examples
>>> times, ids = BinaryNetwork.extract_spike_events_from_diff_logs(
... np.array([[0, 1, 0]], dtype=np.uint16),
... np.array([[1, 1, -1]], dtype=np.int8),
... )
>>> times.tolist()
[0.0, 1.0]
>>> ids.tolist()
[0, 1]
216class ClusteredEI_network(BaseBinaryNetwork): 217 """Repository-specific clustered E/I binary network. 218 219 Examples 220 -------- 221 >>> parameter = { 222 ... "Q": 2, 223 ... "N_E": 4, 224 ... "N_I": 2, 225 ... "V_th": 1.0, 226 ... "g": 1.0, 227 ... "p0_ee": 0.2, 228 ... "p0_ei": 0.2, 229 ... "p0_ie": 0.2, 230 ... "p0_ii": 0.2, 231 ... "R_Eplus": 1.0, 232 ... "R_j": 0.0, 233 ... "m_X": 0.0, 234 ... "tau_e": 5.0, 235 ... "tau_i": 10.0, 236 ... } 237 >>> net = ClusteredEI_network(parameter) 238 >>> net.Q 239 2 240 """ 241 242 def __init__(self, parameter: Dict, *, kappa: Optional[float] = None, connection_type: Optional[str] = None, name="Binary EI Network"): 243 super().__init__(name) 244 self.parameter = dict(parameter) 245 self.Q = int(self.parameter["Q"]) 246 self.connection_type = _normalize_conn_type(connection_type or self.parameter.get("connection_type")) 247 self.multapses = bool(self.parameter.get("multapses", True)) 248 self.kappa = float(kappa if kappa is not None else self.parameter.get("kappa", 0.0)) 249 self.connection_parameters = _compute_cluster_parameters(self.parameter, self.kappa) 250 self.E_sizes = _split_counts(int(self.parameter["N_E"]), self.Q) 251 self.I_sizes = _split_counts(int(self.parameter["N_I"]), self.Q) 252 self.total_neurons = sum(self.E_sizes) + sum(self.I_sizes) 253 self.initializers_E, self.initializers_I = _resolve_initializers( 254 self.parameter.get("initial_activity"), 255 self.E_sizes, 256 self.I_sizes, 257 ) 258 self.E_pops: List[BinaryNeuronPopulation] = [] 259 self.I_pops: List[BinaryNeuronPopulation] = [] 260 self.other_pops: List[Neuron] = [] 261 self._structure_created = False 262 263 def _build_populations(self): 264 tau_e = float(self.parameter["tau_e"]) 265 tau_i = float(self.parameter.get("tau_i", 0.5 * tau_e)) 266 theta_E = self.connection_parameters["theta_E"] 267 theta_I = self.connection_parameters["theta_I"] 268 for idx, size in enumerate(self.E_sizes): 269 pop = BinaryNeuronPopulation( 270 self, 271 N=size, 272 threshold=theta_E, 273 tau=tau_e, 274 name=f"E{idx}", 275 initializer=self.initializers_E[idx], 276 ) 277 pop.cluster_index = idx 278 pop.cell_type = "E" 279 self.E_pops.append(self.add_population(pop)) 280 for idx, size in enumerate(self.I_sizes): 281 pop = BinaryNeuronPopulation( 282 self, 283 N=size, 284 threshold=theta_I, 285 tau=tau_i, 286 name=f"I{idx}", 287 initializer=self.initializers_I[idx], 288 ) 289 pop.cluster_index = idx 290 pop.cell_type = "I" 291 self.I_pops.append(self.add_population(pop)) 292 293 def _synapse_factory(self): 294 if self.connection_type == "poisson": 295 return lambda pre, post, p, j: PoissonSynapse(self, pre, post, rate=p, j=j) 296 if self.connection_type == "fixed-indegree": 297 return lambda pre, post, p, j: FixedIndegreeSynapse( 298 self, 299 pre, 300 post, 301 p=p, 302 j=j, 303 multapses=self.multapses, 304 ) 305 return lambda pre, post, p, j: PairwiseBernoulliSynapse(self, pre, post, p=p, j=j) 306 307 def _build_synapses(self): 308 builder = self._synapse_factory() 309 p_plus = self.connection_parameters["p_plus"] 310 p_minus = self.connection_parameters["p_minus"] 311 j_plus = self.connection_parameters["j_plus"] 312 j_minus = self.connection_parameters["j_minus"] 313 pre_groups = [self.E_pops, self.I_pops] 314 for target_group_idx, target_pops in enumerate([self.E_pops, self.I_pops]): 315 for source_group_idx, source_pops in enumerate(pre_groups): 316 for post_pop in target_pops: 317 for pre_pop in source_pops: 318 same_cluster = pre_pop.cluster_index == post_pop.cluster_index 319 p_matrix = p_plus if same_cluster else p_minus 320 j_matrix = j_plus if same_cluster else j_minus 321 p_value = p_matrix[target_group_idx, source_group_idx] 322 j_value = j_matrix[target_group_idx, source_group_idx] 323 self.add_synapse(builder(pre_pop, post_pop, p_value, j_value)) 324 325 def _build_background(self): 326 m_X = float(self.parameter.get("m_X", 0.0) or 0.0) 327 if m_X == 0.0: 328 return 329 bg = BackgroundActivity(self, N=1, Activity=m_X, name="Background") 330 #self.other_pops.append(self.add_population(bg)) 331 J_EX = float(self.connection_parameters["external_exc"]) 332 J_IX = float(self.connection_parameters["external_inh"]) 333 for pop in self.E_pops: 334 pop.threshold-=m_X*J_EX 335 #self.add_synapse(AllToAllSynapse(self, bg, pop, j=J_EX)) 336 for pop in self.I_pops: 337 pop.threshold -= m_X * J_IX 338 #self.add_synapse(AllToAllSynapse(self, bg, pop, j=J_IX)) 339 340 def _ensure_structure(self): 341 if self._structure_created: 342 return 343 self._build_populations() 344 self._build_synapses() 345 self._build_background() 346 self._structure_created = True 347 348 def initialize( 349 self, 350 autapse: bool = False, 351 weight_mode: str = "auto", 352 ram_budget_gb: float = 12.0, 353 weight_dtype=np.float32, 354 ): 355 """Build clustered populations/synapses and initialize the parent network. 356 357 Expected output 358 --------------- 359 After initialization, `state`, `field`, and the sampled connectivity are 360 allocated and ready for `run(...)`. 361 """ 362 self._ensure_structure() 363 super().initialize( 364 autapse=autapse, 365 weight_mode=weight_mode, 366 ram_budget_gb=ram_budget_gb, 367 weight_dtype=weight_dtype, 368 ) 369 370 def reinitalize(self): 371 """Rebuild the simulation state with the stored parameters.""" 372 self.initialize()
Repository-specific clustered E/I binary network.
Examples
>>> parameter = {
... "Q": 2,
... "N_E": 4,
... "N_I": 2,
... "V_th": 1.0,
... "g": 1.0,
... "p0_ee": 0.2,
... "p0_ei": 0.2,
... "p0_ie": 0.2,
... "p0_ii": 0.2,
... "R_Eplus": 1.0,
... "R_j": 0.0,
... "m_X": 0.0,
... "tau_e": 5.0,
... "tau_i": 10.0,
... }
>>> net = ClusteredEI_network(parameter)
>>> net.Q
2
242 def __init__(self, parameter: Dict, *, kappa: Optional[float] = None, connection_type: Optional[str] = None, name="Binary EI Network"): 243 super().__init__(name) 244 self.parameter = dict(parameter) 245 self.Q = int(self.parameter["Q"]) 246 self.connection_type = _normalize_conn_type(connection_type or self.parameter.get("connection_type")) 247 self.multapses = bool(self.parameter.get("multapses", True)) 248 self.kappa = float(kappa if kappa is not None else self.parameter.get("kappa", 0.0)) 249 self.connection_parameters = _compute_cluster_parameters(self.parameter, self.kappa) 250 self.E_sizes = _split_counts(int(self.parameter["N_E"]), self.Q) 251 self.I_sizes = _split_counts(int(self.parameter["N_I"]), self.Q) 252 self.total_neurons = sum(self.E_sizes) + sum(self.I_sizes) 253 self.initializers_E, self.initializers_I = _resolve_initializers( 254 self.parameter.get("initial_activity"), 255 self.E_sizes, 256 self.I_sizes, 257 ) 258 self.E_pops: List[BinaryNeuronPopulation] = [] 259 self.I_pops: List[BinaryNeuronPopulation] = [] 260 self.other_pops: List[Neuron] = [] 261 self._structure_created = False
348 def initialize( 349 self, 350 autapse: bool = False, 351 weight_mode: str = "auto", 352 ram_budget_gb: float = 12.0, 353 weight_dtype=np.float32, 354 ): 355 """Build clustered populations/synapses and initialize the parent network. 356 357 Expected output 358 --------------- 359 After initialization, `state`, `field`, and the sampled connectivity are 360 allocated and ready for `run(...)`. 361 """ 362 self._ensure_structure() 363 super().initialize( 364 autapse=autapse, 365 weight_mode=weight_mode, 366 ram_budget_gb=ram_budget_gb, 367 weight_dtype=weight_dtype, 368 )
370 def reinitalize(self): 371 """Rebuild the simulation state with the stored parameters.""" 372 self.initialize()
Rebuild the simulation state with the stored parameters.
Inherited Members
- BinaryNetwork
- name
- N
- population
- synapses
- state
- weights_dense
- weights_csr
- weights_csc
- weights
- LUT
- sim_steps
- population_lookup
- neuron_lookup
- update_prob
- thresholds
- field
- weight_mode
- weight_dtype
- add_population
- add_synapse
- update
- run
- enable_step_logging
- consume_step_log
- enable_diff_logging
- consume_diff_log
- reconstruct_states_from_diff_logs
- population_rates_from_diff_logs
- extract_spike_events_from_diff_logs