Memoize computing tokens

As I'm not sure how expensive this is, but it doesn't need doing for every
request.
This commit is contained in:
Christopher Baines 2023-11-24 16:11:57 +00:00
parent 241a704db1
commit b9026488ed
1 changed files with 16 additions and 13 deletions

View File

@ -21,21 +21,24 @@
#:use-module (squee) #:use-module (squee)
#:use-module (gcrypt hash) #:use-module (gcrypt hash)
#:use-module (gcrypt base64) #:use-module (gcrypt base64)
#:use-module (guix memoization)
#:export (compute-tokens-for-build-server)) #:export (compute-tokens-for-build-server))
(define (compute-token secret-key-base build-server-id token-seed) (define compute-token
(let ((source-string (memoize
(simple-format #f "~A:~A:~A" (lambda (secret-key-base build-server-id token-seed)
secret-key-base (let ((source-string
build-server-id (simple-format #f "~A:~A:~A"
token-seed))) secret-key-base
(string-filter build-server-id
;; Remove the + / and = to make handling the value easier token-seed)))
char-set:letter+digit (string-filter
(base64-encode ;; Remove the + / and = to make handling the value easier
(bytevector-hash char-set:letter+digit
(string->utf8 source-string) (base64-encode
(hash-algorithm sha1)))))) (bytevector-hash
(string->utf8 source-string)
(hash-algorithm sha1))))))))
(define (compute-tokens-for-build-server conn secret-key-base build-server-id) (define (compute-tokens-for-build-server conn secret-key-base build-server-id)
(define query (define query