aboutsummaryrefslogtreecommitdiff
path: root/modules/sub_quadratic_attention.py
diff options
context:
space:
mode:
authorbrkirch <brkirch@users.noreply.github.com>2023-01-06 16:42:47 -0500
committerbrkirch <brkirch@users.noreply.github.com>2023-01-06 16:42:47 -0500
commitc18add68ef7d2de3617cbbaff864b0c74cfdf6c0 (patch)
tree6388fd13dad83130907dd39859c069eb9e014416 /modules/sub_quadratic_attention.py
parent848605fb654a55ee6947335d7df6e13366606fad (diff)
Added license
Diffstat (limited to 'modules/sub_quadratic_attention.py')
-rw-r--r--modules/sub_quadratic_attention.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sub_quadratic_attention.py b/modules/sub_quadratic_attention.py
index 95924d24..fea7aaac 100644
--- a/modules/sub_quadratic_attention.py
+++ b/modules/sub_quadratic_attention.py
@@ -1,7 +1,7 @@
# original source:
# https://github.com/AminRezaei0x443/memory-efficient-attention/blob/1bc0d9e6ac5f82ea43a375135c4e1d3896ee1694/memory_efficient_attention/attention_torch.py
# license:
-# unspecified
+# MIT License (see Memory Efficient Attention under the Licenses section in the web UI interface for the full license)
# credit:
# Amin Rezaei (original author)
# Alex Birch (optimized algorithm for 3D tensors, at the expense of removing bias, masking and callbacks)