21
21
"cosine_similarity" ,
22
22
"dot_similarity" ,
23
23
"multiset" ,
24
+ "multibind" ,
25
+ "sequence" ,
26
+ "distinct_sequence" ,
24
27
"ngrams" ,
25
28
"hash_table" ,
26
29
"map_range" ,
@@ -436,24 +439,52 @@ def multiset(
436
439
dim = - 2 ,
437
440
keepdim = False ,
438
441
dtype = None ,
442
+ out = None ,
439
443
) -> Tensor :
440
- """Returns element -wise sum of hypervectors hv
444
+ """Element -wise sum of input hypervectors
441
445
442
446
Args:
443
447
input (Tensor): input hypervector tensor
444
448
dim (int, optional): dimension over which to bundle the hypervectors. Defaults to -2.
445
449
keepdim (bool, optional): whether to keep the bundled dimension. Defaults to False.
446
450
dtype (torch.dtype, optional): if specified determins the type of the returned tensor, otherwise same as input.
451
+ out (Tensor, optional): the output tensor.
447
452
448
453
Returns:
449
454
Tensor: bundled hypervector
455
+ """
456
+ return torch .sum (input , dim = dim , keepdim = keepdim , dtype = dtype , out = out )
457
+
458
+
459
+ def multibind (input : Tensor , * , dim = - 2 , keepdim = False , dtype = None , out = None ) -> Tensor :
460
+ """Element-wise multiplication of input hypervectors
461
+
462
+ Args:
463
+ input (Tensor): input hypervector tensor
464
+ dim (int, optional): dimension over which to bind the hypervectors. Defaults to -2.
465
+ keepdim (bool, optional): whether to keep the bundled dimension. Defaults to False.
466
+ dtype (torch.dtype, optional): if specified determins the type of the returned tensor, otherwise same as input.
467
+ out (Tensor, optional): the output tensor.
450
468
469
+ Returns:
470
+ Tensor: bound hypervector
451
471
"""
472
+ return torch .prod (input , dim = dim , keepdim = keepdim , dtype = dtype , out = out )
473
+
452
474
453
- return torch .sum (input , dim = dim , keepdim = keepdim , dtype = dtype )
475
+ def ngrams (input : Tensor , n = 3 ) -> Tensor :
476
+ """Creates a hypervector containing the n-gram statistics of input
454
477
478
+ Arguments are of shape (*, n, d) where `*` is any dimensions including none, `n` is the
479
+ number of values, and d is the dimensionality of the hypervector.
455
480
456
- def ngrams (input : Tensor , n = 3 ):
481
+ Args:
482
+ input (Tensor): The value hypervectors.
483
+ n (int, optional): The size of each n-gram. Defaults to 3.
484
+
485
+ Returns:
486
+ Tensor: output hypervector of shape (*, d)
487
+ """
457
488
n_gram = None
458
489
for i in range (0 , n ):
459
490
if i == (n - 1 ):
@@ -468,8 +499,9 @@ def ngrams(input: Tensor, n=3):
468
499
return multiset (n_gram )
469
500
470
501
471
- def hash_table (keys : Tensor , values : Tensor ):
502
+ def hash_table (keys : Tensor , values : Tensor ) -> Tensor :
472
503
"""Combines the keys and values hypervectors to create a hash table.
504
+
473
505
Arguments are of shape (*, v, d) where `*` is any dimensions including none, `v` is the
474
506
number of key-value pairs, and d is the dimensionality of the hypervector.
475
507
@@ -483,6 +515,48 @@ def hash_table(keys: Tensor, values: Tensor):
483
515
return multiset (bind (keys , values ))
484
516
485
517
518
+ def sequence (input : Tensor ) -> Tensor :
519
+ """Creates a bundling-based sequence
520
+
521
+ The first value is permuted n-1 times, the last value is permuted 0 times.
522
+
523
+ Args:
524
+ input (Tensor): The n hypervector values of shape (*, n, d).
525
+
526
+ Returns:
527
+ Tensor: output hypervector of shape (*, d)
528
+ """
529
+ dim = - 2
530
+ n = input .size (dim )
531
+
532
+ enum = enumerate (torch .unbind (input , dim ))
533
+ permuted = [permute (hv , shifts = n - i - 1 ) for i , hv in enum ]
534
+ permuted = torch .stack (permuted , dim )
535
+
536
+ return multiset (permuted )
537
+
538
+
539
+ def distinct_sequence (input : Tensor ) -> Tensor :
540
+ """Creates a binding-based sequence
541
+
542
+ The first value is permuted n-1 times, the last value is permuted 0 times.
543
+
544
+ Args:
545
+ input (Tensor): The n hypervector values of shape (*, n, d).
546
+
547
+ Returns:
548
+ Tensor: output hypervector of shape (*, d)
549
+ """
550
+ dim = - 2
551
+ n = input .size (dim )
552
+
553
+ enum = enumerate (torch .unbind (input , dim ))
554
+ permuted = [permute (hv , shifts = n - i - 1 ) for i , hv in enum ]
555
+ permuted = torch .stack (permuted , dim )
556
+
557
+ return multibind (permuted )
558
+
559
+
486
560
def map_range (
487
561
input : Tensor ,
488
562
in_min : float ,
@@ -554,6 +628,7 @@ def cleanup(input: Tensor, memory: Tensor, threshold=0.0) -> Tensor:
554
628
Args:
555
629
input (Tensor): The hypervector to cleanup
556
630
memory (Tensor): The `n` hypervectors in memory of shape (n, d)
631
+ threshold (float, optional): minimal similarity between input and any hypervector in memory. Defaults to 0.0.
557
632
558
633
Returns:
559
634
Tensor: output tensor
0 commit comments