diff --git a/.gitignore b/.gitignore index 07858e3b..610edf60 100644 --- a/.gitignore +++ b/.gitignore @@ -40,3 +40,4 @@ main/main /main/protocol.json /main/identities.json /load-test/config.json +/integration-test/config.json diff --git a/Makefile b/Makefile index 8664c1f3..796f23f2 100644 --- a/Makefile +++ b/Makefile @@ -39,7 +39,7 @@ IMAGE_TAG := $(VERSION) IMAGE_ARCHS := amd64 arm arm64 386 # supported architectures GO = go -GO_VERSION := 1.16 +GO_VERSION := 1.19 LDFLAGS = -ldflags "-buildid= -s -w -X main.Version=$(VERSION) -X main.Revision=$(REVISION)" GO_BUILD = $(GO) build -tags="netgo" -trimpath $(LDFLAGS) UPX=upx --quiet --quiet @@ -50,7 +50,7 @@ THISDIR = $(dir $(realpath $(firstword $(MAKEFILE_LIST)))) .PHONY: lint lint: @# we supress echoing the command, so every output line - @# can be considered a linting error. + @# can be considered a linting error. @$(DOCKER) run --rm -v $(THISDIR):/app:ro -w /app $(GO_LINTER_IMAGE) golangci-lint run .PHONY: build @@ -66,9 +66,10 @@ test: $(DOCKER) run -t --rm -v $(THISDIR):/app -w /app golang:$(GO_VERSION) \ go test ./... -.PHONY: image +.PHONY: image image: - $(DOCKER) build -t $(IMAGE_REPO):$(IMAGE_TAG) \ + $(DOCKER) build -t $(IMAGE_REPO):$(IMAGE_TAG)-arm \ + --build-arg="GOARCH=arm" \ --build-arg="VERSION=$(VERSION)" \ --build-arg="REVISION=$(REVISION)" \ --build-arg="GOVERSION=$(GO_VERSION)" \ diff --git a/README.md b/README.md index e4606395..e5fe8976 100644 --- a/README.md +++ b/README.md @@ -67,12 +67,10 @@ make publish IMAGE_TAG=stable # will tag a multi-arch image with the selected ta The configuration can be set via a configuration file (`config.json`) or environment variables. -There are three mandatory configurations: +There are two mandatory configurations: -1. a DSN to connect to a postgreSQL database, where the protocol context (i.e. keys, previous signatures, auth - tokens) can be stored -2. a 32 byte base64 encoded secret, which will be used to encrypt the signing keys -3. a static authentication token to protect the registration endpoint +1. the desired database driver and DSN (see [Context Management](#Context-Management)) +2. a 32 byte base64 encoded secret, which will be used to encrypt the signing keys in the database > You can generate a random 32 byte base64 encoded secret in a Linux/macOS terminal > with `head -c 32 /dev/urandom | base64` @@ -80,7 +78,7 @@ There are three mandatory configurations: At start-up, the client will first check if the `UBIRCH_SECRET32` environment variable exists and, if it does exist, load the configuration from the environment variables. If the `UBIRCH_SECRET32` environment variable is not set or empty, the client will try to load the `config.json`-file from the working directory. If neither exist, the client will -abort and exit. +abort and exit with status `1`. ### File Based Configuration @@ -88,9 +86,7 @@ abort and exit. ```json { - "postgresDSN": "postgres://:@:5432/", - "secret32": "<32 byte secret (base64 encoded)>", - "registerAuth": "" + "secret32": "<32 byte secret (base64 encoded)>" } ``` @@ -98,10 +94,8 @@ abort and exit. ### Environment Based Configuration -```shell -UBIRCH_POSTGRES_DSN=postgres://:@:5432/ +```console UBIRCH_SECRET32=<32 byte secret (base64 encoded)> -UBIRCH_REGISTERAUTH= ``` > See [example.env](main/config/example.env) as an example for environment-based configuration. @@ -109,23 +103,97 @@ UBIRCH_REGISTERAUTH= All further configuration parameters have default values, that can be changed as described under [Optional Configurations](#optional-configurations). -## Identity Initialization +## Context Management + +The identity context is stored persistently in a database. The user can choose between connecting to a postgres database +or using the local file system, i.e. SQLite. + +- `config.json`: + ```json + "dbDriver": "", + ``` +- or environment variable: + ```console + UBIRCH_DB_DRIVER= + ``` + +### PostgreSQL + +In order to connect the client to a postgres database, the DSN must be set in the configuration. + +- add the following key-value pair to your `config.json`: + ```json + "dbDSN": "postgres://:@:5432/", + ``` +- or set the following environment variable: + ```console + UBIRCH_DB_DSN=postgres://:@:5432/ + ``` + +The maximum number of open connections to the database can be limited with `dbMaxConns` (json) +or `UBIRCH_DB_MAX_CONNS` (env). The default is `0` (unlimited). + +### SQLite + +If the driver is set to `sqlite`, the client will by default create a SQLite database file `sqlite.db` in the mounted +volume upon first startup. + +Alternatively, a custom path (relative to the mounted volume) and filename can be set in the configuration. +It is also possible to overwrite the default SQLite database configuration by appending a `?` followed by a query string +to the filename. For more information about this, see https://pkg.go.dev/modernc.org/sqlite#Driver.Open + +- `config.json`: + ```json + "dbDSN": "path/to/sqlite.db", + ``` +- or environment variable: + ```console + UBIRCH_DB_DSN=path/to/sqlite.db + ``` + +The use of a SQLite database is appropriate in case the application is running on a system with limited space, like +embedded devices, and only one or very few identities need to be managed. + +When compared to postgreSQL, a drawback of SQLite is the performance while handling a high load of chaining +requests for multiple identities at the same time. + +## Identity Registration / Initialization The UBIRCH client is able to handle multiple cryptographic identities. An identity has a universally unique identifier (UUID), private and public key, and an auth token. -Before signing requests can be processed, an identity has to be initialized. +Before signing requests can be processed, a UUID with its auth token has to be registered at the UBIRCH client. -The initialization consists of three parts: +Registering a new UUID triggers an identity initialization, which consists of three parts: -- key generation (ECDSA) +- key pair generation (ECDSA) - public key registration at the UBIRCH backend -- storing of the context in the database +- storing of the identity context in the database + +Identities can be registered at the UBIRCH client in two ways: + +1. via [configuration](#identity-initialization-via-configuration) +2. via [HTTP request](#identity-registration) -The initialization can be triggered via [HTTP request](#identity-registration) -or [configuration](#identity-initialization-via-configuration). +If identity registration via HTTP requests is desired, the respective endpoint must be enabled and a static +authentication token must be set in the configuration. +This token is then used to authenticate requests against the identity registration endpoint. -Either way, the first step is to register the identity's UUID with the UBIRCH backend -and acquire an authentication token. +- json: + +```json + "staticAuth": "", + "enableRegistrationEndpoint": true, +``` + +- env: + +```console +UBIRCH_STATIC_AUTH= +UBIRCH_ENABLE_REGISTRATION_ENDPOINT=true +``` + +Before registering a new identity at the UBIRCH client, the first step is to register the identity's UUID with the +UBIRCH backend and acquire an authentication token for that identity. > A UUID can easily be generated in a Linux/macOS terminal with the `uuidgen` command. @@ -150,26 +218,29 @@ their authentication token. } ``` - or as environment variable: - ```shell + ```console UBIRCH_DEVICES=:,: ``` Alternatively, the device UUIDs and their corresponding authentication tokens can also be set through a file `identities.json`. See example: [example_identities.json](main/config/example_identities.json) +Once the identities have been initialized successfully, their UUIDs and auth tokens are persistently stored in the +connected database and can be removed from the configuration. + ## Run Client in Docker container To get the latest multi-architecture image, check the [releases](https://github.com/ubirch/ubirch-client-go/releases/latest) and pull the latest release from Docker Hub using the release tag, e.g.: -```console +```shell docker pull ubirch/ubirch-client:v2.2.3 ``` To start the multi-arch Docker image on any system, run: -```console +```shell docker run -v $(pwd):/data --network host ubirch/ubirch-client:v2.2.3 ``` @@ -179,21 +250,37 @@ variables), and the TLS certificate and key files (if TLS is enabled). It is also possible to pass an absolute path instead of `$(pwd)`. -If the */data* path is not used for either configuration file, nor TLS cert files, +If the */data* path is not used for either configuration file, SQLite DB, nor TLS cert files, the `-v $(pwd):/data` parameter can be omitted. ## Interface Description -The UBIRCH client provides HTTP endpoints for both original data and direct hash injection, i.e. the SHA256 digest of -the original data. If the client receives original data, it will create a SHA256 hash before any further processing. - -This means, UBIRCH will never see your original data. It also means that the original data will have to be stored -independently in order to be able to verify it later. - -> When receiving a JSON data package, the UBIRCH client will sort the keys alphabetically and remove insignificant -> space characters before hashing. -> -> See [reproducibility of hashes](#reproducibility-of-hashes). +| Endpoint | Description | +|--------------------------------|---------------------------------------------------------------------------| +| `/healthz` | Liveness check | +| `/readyz` | Readiness check | +| `/metrics` | Prometheus Metrics | +| `/register` | [Identity Registration](#Identity-Registration) | +| `/${uuid}/csr` | [CSR Generation](#CSR-Generation) | +| `/${uuid}` | [Anchoring Hashes (chained) - original data](#Anchoring-Hashes) | +| `/${uuid}/hash` | [Anchoring Hashes (chained) - hash](#Anchoring-Hashes) | +| `/${uuid}/anchor` | [Anchoring Hashes (no chain) - original data](#Anchoring-Hashes) | +| `/${uuid}/anchor/hash` | [Anchoring Hashes (no chain) - hash](#Anchoring-Hashes) | +| `/${uuid}/disable` | [Disabling Hashes - original data](#Update-Operations) | +| `/${uuid}/disable/hash` | [Disabling Hashes - hash](#Update-Operations) | +| `/${uuid}/enable` | [Enabling Hashes - original data](#Update-Operations) | +| `/${uuid}/enable/hash` | [Enabling Hashes - hash](#Update-Operations) | +| `/${uuid}/delete` | [Deleting Hashes - original data](#Update-Operations) | +| `/${uuid}/delete/hash` | [Deleting Hashes - hash](#Update-Operations) | +| `/${uuid}/offline` | [Offline Sealing Hashes (chained) - original data](#UPP-Offline-Sealing) | +| `/${uuid}/offline/hash` | [Offline Sealing Hashes (chained) - hash](#UPP-Offline-Sealing) | +| `/${uuid}/anchor/offline` | [Offline Sealing Hashes (no chain) - original data](#UPP-Offline-Sealing) | +| `/${uuid}/anchor/offline/hash` | [Offline Sealing Hashes (no chain) - hash](#UPP-Offline-Sealing) | +| `/verify` | [Verifying Hashes - original data](#UPP-Verification) | +| `/verify/hash` | [Verifying Hashes - hash](#UPP-Verification) | +| `/verify/offline` | [Offline Verification - original data](#UPP-Offline-Verification) | +| `/verify/offline/hash` | [Offline Verification - hash](#UPP-Offline-Verification) | +| `/device/updateActive` | [Key De- and Re-activation](#Key-Deactivation) | ### Identity Registration @@ -201,7 +288,7 @@ Sending a registration request invokes the generation of a ECDSA key pair for si contains an X.509 Certificate Signing Request in PEM format. curl ${host}/register -X PUT \ - -H "X-Auth-Token: ${registerAuth}" \ + -H "X-Auth-Token: ${staticAuth}" \ -H "Content-Type: application/json" \ -d '{"uuid":${device_uuid}, "password":${password}}' \ -i @@ -210,55 +297,109 @@ The "password" is the [UBIRCH backend authentication token](#how-to-acquire-the- ### CSR Generation +If CSR creation via HTTP requests is desired, the respective endpoint must be enabled and a static +authentication token must be set in the configuration. +This token is then used to authenticate requests against the CSR creation endpoint. + +- json: + +```json + "staticAuth": "", + "enableCSRCreationEndpoint": true, +``` + +- env: + +```console +UBIRCH_STATIC_AUTH= +UBIRCH_ENABLE_CSR_CREATION_ENDPOINT=true +``` + A CSR for an already registered identity can be retrieved from the CSR endpoint. curl ${host}/${uuid}/csr -X GET \ - -H "X-Auth-Token: ${registerAuth}" \ + -H "X-Auth-Token: ${staticAuth}" \ -i ### UPP Signing +The UBIRCH client provides HTTP endpoints for both original data and direct hash injection, i.e. the SHA256 digest of +the original data. If the client receives original data, it will create a SHA256 hash before any further processing. + +This means, UBIRCH will never see your original data. It also means that the original data will have to be stored +independently in order to be able to verify it later. + +> When receiving a JSON data package, the UBIRCH client will sort the keys alphabetically and remove insignificant +> space characters before hashing. +> +> See [reproducibility of hashes](#reproducibility-of-hashes). + Signing service endpoints require an authentication token, which corresponds to the `UUID` used in the request. The -token must be sent with the request header. Without it, the client will not accept the request. +token must be sent with the request header. -| Request Header | Description | +| Request Header | Description | |----------------|------------------------------------------| | `X-Auth-Token` | UBIRCH backend token related to `` | > See [how to acquire the UBIRCH backend token](#how-to-acquire-the-ubirch-backend-authentication-token). -#### Anchoring Hashes (chained) +#### Anchoring Hashes -| Method | Path | Content-Type | Description | -|--------|------|--------------|-------------| -| POST | `/` | `application/octet-stream` | original data (binary) will be hashed, chained, signed, and anchored | -| POST | `/` | `application/json` | original data (JSON data package) will be hashed, chained, signed, and anchored | -| POST | `//hash` | `application/octet-stream` | SHA256 hash (binary) will be chained, signed, and anchored | -| POST | `//hash` | `text/plain` | SHA256 hash (base64 string repr.) will be chained, signed, and anchored | +- chained -#### Anchoring Hashes (no chain) +| Method | Path | Content-Type | Description | +|--------|----------------|----------------------------|---------------------------------------------------------------------------------| +| POST | `/` | `application/octet-stream` | original data (binary) will be hashed, chained, signed, and anchored | +| POST | `/` | `application/json` | original data (JSON data package) will be hashed, chained, signed, and anchored | +| POST | `//hash` | `application/octet-stream` | SHA256 hash (binary) will be chained, signed, and anchored | +| POST | `//hash` | `text/plain` | SHA256 hash (base64 string repr.) will be chained, signed, and anchored | -| Method | Path | Content-Type | Description | -|--------|------|--------------|-------------| -| POST | `//anchor` | `application/octet-stream` | original data (binary) will be hashed signed, and anchored | -| POST | `//anchor` | `application/json` | original data (JSON data package) will be hashed signed, and anchored | -| POST | `//anchor/hash` | `application/octet-stream` | SHA256 hash (binary) will be signed, and anchored | -| POST | `//anchor/hash` | `text/plain` | SHA256 hash (base64 string repr.) will be signed, and anchored | +- no chain + +| Method | Path | Content-Type | Description | +|--------|-----------------------|----------------------------|------------------------------------------------------------------------| +| POST | `//anchor` | `application/octet-stream` | original data (binary) will be hashed, signed, and anchored | +| POST | `//anchor` | `application/json` | original data (JSON data package) will be hashed, signed, and anchored | +| POST | `//anchor/hash` | `application/octet-stream` | SHA256 hash (binary) will be signed, and anchored | +| POST | `//anchor/hash` | `text/plain` | SHA256 hash (base64 string repr.) will be signed, and anchored | #### Update Operations Beside anchoring, the client can request hash update operations from the UBIRCH backend, i.e. `disable`, `enable` and `delete`. -| Update Operation | Path (original data)| Path (hash) | -|------------------|---------------------|-------------| -| disable | `//disable` | `//disable/hash` | -| enable | `//enable` | `//enable/hash` | -| delete | `//delete` | `//delete/hash` | +| Update Operation | Path (original data) | Path (hash) | +|------------------|----------------------|------------------------| +| disable | `//disable` | `//disable/hash` | +| enable | `//enable` | `//enable/hash` | +| delete | `//delete` | `//delete/hash` | Hash update requests to the UBIRCH backend must come from the same UUID that anchored said hash and be signed by the same private key that signed the anchoring request. +#### UPP Offline Sealing + +The client supports offline hash sealing, where the created UPP is not sent to the UBIRCH backend, but only returned as +part of the HTTP response content. + +- chained + +| Method | Path | Content-Type | Description | +|--------|------------------------|----------------------------|-----------------------------------------------------------------------| +| POST | `//offline` | `application/octet-stream` | original data (binary) will be hashed, chained, and signed | +| POST | `//offline` | `application/json` | original data (JSON data package) will be hashed, chained, and signed | +| POST | `//offline/hash` | `application/octet-stream` | SHA256 hash (binary) will be chained and signed | +| POST | `//offline/hash` | `text/plain` | SHA256 hash (base64 string repr.) will be chained and signed | + +- no chain + +| Method | Path | Content-Type | Description | +|--------|-------------------------------|----------------------------|-------------------------------------------------------------| +| POST | `//anchor/offline` | `application/octet-stream` | original data (binary) will be hashed and signed | +| POST | `//anchor/offline` | `application/json` | original data (JSON data package) will be hashed and signed | +| POST | `//anchor/offline/hash` | `application/octet-stream` | SHA256 hash (binary) will be signed | +| POST | `//anchor/offline/hash` | `text/plain` | SHA256 hash (base64 string repr.) will be signed | + #### UPP Signing Response Response codes indicate the successful delivery of the UPP to the UBIRCH backend. Any code other than `200` should be @@ -273,7 +414,6 @@ The response body consists of either an error message, or a JSON map with - the public key, that corresponds to the private key with which the UPP was signed, - the response from the UBIRCH backend, - the unique request ID -- *optional:* a description of an occurred error (**the `error`-key is only present in case an error occurred**) ```fundamental { @@ -286,7 +426,6 @@ The response body consists of either an error message, or a JSON map with "content": "" }, "requestID": "", - "error": "error message" } ``` @@ -307,10 +446,11 @@ The response body consists of either an error message, or a JSON map with | | | x | invalid SHA256 hash size (≠ 32 bytes) | | 401 - Unauthorized | x | x | unknown UUID | | | x | x | invalid auth token | +| 403 - Forbidden | x | x | UPP signature verification failed (*only for verification*) | | 404 - Not Found | x | x | invalid UUID | | | x | x | invalid operation (≠ `anchor` / `disable` / `enable` / `delete`) | | 500 - Internal Server Error | x | x | signing failed | -| | x | x | sending request to server failed | +| 502 - Bad Gateway | x | x | sending request to UBIRCH backend failed | | 503 - Service Temporarily Unavailable | x | x | service busy | | 504 - Gateway Timeout | x | x | service was unable to produce a timely response | @@ -327,15 +467,15 @@ for *Niomon* error codes. 1. original data (JSON): - anchor hash (**chained**) - ```console + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: application/json" \ -d '{"id": "ba70ad8b-a564-4e58-9a3b-224ac0f0153f", "ts": 1585838578, "data": "1234567890"}' \ -i ``` - - anchor hash (**unchained**) - ```console + - anchor hash (**no chain**) + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f/anchor \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: application/json" \ @@ -343,7 +483,7 @@ for *Niomon* error codes. -i ``` - disable hash - ```console + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f/disable \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: application/json" \ @@ -351,7 +491,7 @@ for *Niomon* error codes. -i ``` - enable hash - ```console + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f/enable \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: application/json" \ @@ -359,7 +499,7 @@ for *Niomon* error codes. -i ``` - delete hash - ```console + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f/delete \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: application/json" \ @@ -370,15 +510,15 @@ for *Niomon* error codes. 1. direct data hash injection - anchor hash (**chained**) - ```console + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f/hash \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: text/plain" \ -d "wp1WK/3z5yHiGBYUZReiMN4UVM2lUJzAtGg9kFtdy3A=" \ -i ``` - - anchor hash (**unchained**) - ```console + - anchor hash (**no chain**) + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f/anchor/hash \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: text/plain" \ @@ -386,7 +526,7 @@ for *Niomon* error codes. -i ``` - disable hash - ```console + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f/disable/hash \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: text/plain" \ @@ -394,7 +534,7 @@ for *Niomon* error codes. -i ``` - enable hash - ```console + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f/enable/hash \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: text/plain" \ @@ -402,7 +542,7 @@ for *Niomon* error codes. -i ``` - delete hash - ```console + ```shell curl localhost:8080/ba70ad8b-a564-4e58-9a3b-224ac0f0153f/delete/hash \ -H "X-Auth-Token: 32e325d5-b6a9-4800-b750-49c53b9350fc" \ -H "Content-Type: text/plain" \ @@ -414,12 +554,34 @@ for *Niomon* error codes. Verification endpoints do not require an authentication token. -| Method | Path | Content-Type | Description | -|--------|------|--------------|-------------| -| POST | `/verify` | `application/octet-stream` | verify hash of original data (binary) | -| POST | `/verify` | `application/json` | verify hash of original data (JSON data package) | -| POST | `/verify/hash` | `application/octet-stream` | verify hash (binary) | -| POST | `/verify/hash` | `text/plain` | verify hash (base64 string repr.) | +| Method | Path | Content-Type | Description | +|--------|----------------|----------------------------|--------------------------------------------------| +| POST | `/verify` | `application/octet-stream` | verify hash of original data (binary) | +| POST | `/verify` | `application/json` | verify hash of original data (JSON data package) | +| POST | `/verify/hash` | `application/octet-stream` | verify hash (binary) | +| POST | `/verify/hash` | `text/plain` | verify hash (base64 string repr.) | + +#### UPP Offline Verification + +It is possible to verify that a UPP contains a given data hash and has a valid signature of a known identity without an +internet connection. + +Just like the standard verification, the offline verification endpoint expects the data or data hash in the request +body, but additionally expects the base64 representation of the UPP in the header `X-Ubirch-UPP`. + +```shell +curl ${host}/verify/offline -X POST \ + -H "X-Ubirch-UPP: liPEEO6QegJtYkRNpXhDJV/hplXEQGLoJh2SbSjQ7datOhGsWokSqO7Sckts1LGOxiBQ8SZoeql8ypLHpDLiYXQZ4MJ9vx1y/5rXwKl7VV+PG8eNEFAAxCCSV78s1WG2QGMS5vBVZOF51/JDHjjBqk/8x3VgpfL+dMRA7xvnoOSTNAYMJxItAkbzAMcD+YP2AX1bkfkV8EJUCNk7oI8DSPKmnNZ8gsb7fEv6DXGMTdFkGTtOvgpmkCNU7g==" \ + -H "Content-Type: application/json" \ + -d '{"id": "ee907a02-6d62-444d-a578-43255fe1a655", "ts": 1651746633, "data": "1234567890"}' +``` + +```shell +curl ${host}/verify/offline/hash -X POST \ + -H "X-Ubirch-UPP: liPEEO6QegJtYkRNpXhDJV/hplXEQGLoJh2SbSjQ7datOhGsWokSqO7Sckts1LGOxiBQ8SZoeql8ypLHpDLiYXQZ4MJ9vx1y/5rXwKl7VV+PG8eNEFAAxCCSV78s1WG2QGMS5vBVZOF51/JDHjjBqk/8x3VgpfL+dMRA7xvnoOSTNAYMJxItAkbzAMcD+YP2AX1bkfkV8EJUCNk7oI8DSPKmnNZ8gsb7fEv6DXGMTdFkGTtOvgpmkCNU7g==" \ + -H "Content-Type: text/plain" \ + -d "kle/LNVhtkBjEubwVWThedfyQx44wapP/Md1YKXy/nQ=" +``` #### UPP Verification Response @@ -444,20 +606,41 @@ The response body consists of either an error message, or a JSON map with } ``` -### Key De-/Re-Activation +### Key Deactivation + +If key de- and re-activation via HTTP requests is desired, the respective endpoint must be enabled and a static +authentication token must be set in the configuration. +This token is then used to authenticate requests against the deactivation endpoint. -A key can be de-activated ... +- json: + +```json + "staticAuth": "", + "enableDeactivationEndpoint": true, +``` + +- env: + +```console +UBIRCH_STATIC_AUTH= +UBIRCH_ENABLE_DEACTIVATION_ENDPOINT=true +``` + +A key can be deactivated with the following request. Signing requests for identities with deactivated key will fail with +status code `400`. curl ${host}/device/updateActive -X PUT \ - -H "X-Auth-Token: ${registerAuth}" \ + -H "X-Auth-Token: ${staticAuth}" \ -H "Content-Type: application/json" \ -d '{"id":${device_uuid},"active":false}' \ -i -... and re-activated . +### Key Reactivation + +A deactivated key can be reactivated with the following request. curl ${host}/device/updateActive -X PUT \ - -H "X-Auth-Token: ${registerAuth}" \ + -H "X-Auth-Token: ${staticAuth}" \ -H "Content-Type: application/json" \ -d '{"id":${device_uuid},"active":true}' \ -i @@ -479,7 +662,7 @@ http://localhost:8080 Here is an example of a request to the client using `CURL`. - original data (JSON): - ```console + ```shell curl localhost:8080/ \ -H "X-Auth-Token: " \ -H "Content-Type: application/json" \ @@ -488,7 +671,7 @@ Here is an example of a request to the client using `CURL`. ``` - direct data hash injection: - ```console + ```shell curl localhost:8080//hash \ -H "X-Auth-Token: " \ -H "Content-Type: text/plain" \ @@ -557,6 +740,29 @@ package!** ## Optional Configurations +### SQLite DSN + +If no postgres DSN is set, the client defaults to the usage of a SQLite database with the following DSN + +``` +sqlite.db?_txlock=EXCLUSIVE&_pragma=journal_mode(WAL)&_pragma=synchronous(FULL)&_pragma=wal_autocheckpoint(4)&_pragma=wal_checkpoint(PASSIVE)&_pragma=journal_size_limit(32000)&_pragma=busy_timeout(100) +``` + +The default values can be overwritten by adding a custom SQLite DSN to the configuration. + +- json: + ```json + "sqliteDSN": "[?]", + ``` +- env: + ```console + UBIRCH_SQLITE_DSN=[?] + ``` + +A query string can optionally be appended to the database file name. If no query string is appended, the defaults from +above will be used. More information about the query string can be found in the documentation of the sqlite +library: https://pkg.go.dev/modernc.org/sqlite#Driver.Open + ### Set the UBIRCH backend environment The `env` configuration refers to the UBIRCH backend environment. The default value is `prod`, which is the production @@ -574,27 +780,10 @@ To switch to the `demo` backend environment "env": "demo" ``` - or set the following environment variable: - ```shell + ```console UBIRCH_ENV=demo ``` -### Customize X.509 Certificate Signing Requests - -The client creates X.509 Certificate Signing Requests (*CSRs*) for the public keys of the devices it is managing. The * -Common Name* of the CSR subject is the UUID associated with the public key. The values for the *Organization* and * -Country* of the CSR subject can be set through the configuration. - -- add the following key-value pairs to your `config.json`: - ```json - "CSR_country": "", - "CSR_organization": "" - ``` -- or set the following environment variables: - ```shell - UBIRCH_CSR_COUNTRY= - UBIRCH_CSR_ORGANIZATION= - ``` - ### Set TCP address You can specify the TCP address for the server to listen on, in the form `host:port`. If empty, port 8080 is used. @@ -604,7 +793,7 @@ You can specify the TCP address for the server to listen on, in the form `host:p "TCP_addr": ":8080", ``` - or set the following environment variable: - ```shell + ```console UBIRCH_TCP_ADDR=:8080 ``` @@ -623,7 +812,7 @@ You can specify the TCP address for the server to listen on, in the form `host:p In order to serve HTTPS endpoints, you can run the following command to create a self-signed certificate with openssl. With this command it will be valid for ten years. - ```console + ```shell openssl req -x509 -newkey rsa:4096 -keyout key.pem -nodes -out cert.pem -days 3650 ``` @@ -634,7 +823,7 @@ You can specify the TCP address for the server to listen on, in the form `host:p "TLS": true ``` - or set the following environment variable: - ```shell + ```console UBIRCH_TLS=true ``` @@ -650,7 +839,7 @@ You can specify the TCP address for the server to listen on, in the form `host:p "TLSKeyFile": "" ``` - or set the following environment variables: - ```shell + ```console UBIRCH_TLS_CERTFILE=certs/cert.pem UBIRCH_TLS_KEYFILE=certs/key.pem ``` @@ -669,7 +858,7 @@ To enable CORS and configure a list of *allowed origins*, i.e. origins a cross-d "CORS_origins": [""] ``` - or set the following environment variables: - ```shell + ```console UBIRCH_CORS=true UBIRCH_CORS_ORIGINS= ``` @@ -681,6 +870,23 @@ Setting *allowed origins* is optional. If CORS is enabled, but no *allowed origi is `["*"]` which means, **all** origins will be allowed. +### Customize X.509 Certificate Signing Requests + +The client creates X.509 Certificate Signing Requests (*CSRs*) for the public keys of the devices it is managing. The * +Common Name* of the CSR subject is the UUID associated with the public key. The values for the *Organization* and * +Country* of the CSR subject can be set through the configuration. + +- add the following key-value pairs to your `config.json`: + ```json + "CSR_country": "", + "CSR_organization": "" + ``` +- or set the following environment variables: + ```console + UBIRCH_CSR_COUNTRY= + UBIRCH_CSR_ORGANIZATION= + ``` + ### Extended Debug Output To set the logging level to `debug` and so enable extended debug output, @@ -690,7 +896,7 @@ To set the logging level to `debug` and so enable extended debug output, "debug": true ``` - or set the following environment variable: - ```shell + ```console UBIRCH_DEBUG=true ``` @@ -698,54 +904,146 @@ To set the logging level to `debug` and so enable extended debug output, By default, the log of the client is in JSON format. To change it to a (more human-eye-friendly) text format, -- add the following key-value pairs to your `config.json`: +- add the following key-value pair to your `config.json`: ```json "logTextFormat": true ``` -- or set the following environment variables: - ```shell +- or set the following environment variable: + ```console UBIRCH_LOGTEXTFORMAT=true ``` +### Log Known Identities + +To log the UUIDs of all known (registered) identities at startup, + +- add the following key-value pair to your `config.json`: + ```json + "logKnownIdentities": true + ``` +- or set the following environment variable: + ```console + UBIRCH_LOG_KNOWN_IDENTITIES=true + ``` + +### Request Timeouts + +The following request-related timeouts can be configured. + +| json | env | description | default value | +|----------------------------|-------------------------------|------------------------------------------------------------------------------------------------------|---------------| +| `identityServiceTimeoutMs` | `IDENTITY_SERVICE_TIMEOUT_MS` | time limit for requests to the UBIRCH identity service in milliseconds | 10000 | +| `authServiceTimeoutMs` | `AUTH_SERVICE_TIMEOUT_MS` | time limit for requests to the UBIRCH authentication service (niomon) in milliseconds | 2000 | +| `verifyServiceTimeoutMs` | `VERIFY_SERVICE_TIMEOUT_MS` | time limit for requests to the UBIRCH verification service in milliseconds | 600 | +| `verificationTimeoutMs` | `VERIFICATION_TIMEOUT_MS` | time limit for repeated attempts to verify a hash at the UBIRCH verification service in milliseconds | 2000 | + +_If a hash can not be verified by the UBIRCH verification service, a possible reason is that the verification was +attempted too early after anchoring and that a subsequent request will be successful. Because of this, the client +retries the verification if it fails with an HTTP response code `404`._ + +- _`verifyServiceTimeoutMs` is the HTTP client timeout for each individual request to the verification service_ +- _`verificationTimeoutMs` is the max. duration that verification will be attempted repeatedly_ + +### Verification of UPPs from known identities only + +When the client receives a verification request for a UPP that was signed by an identity that is unknown to the client, +i.e. an external identity, the default behaviour is to request the public key of that identity from the UBIRCH identity +service in order to verify the signature locally. + +To disable that behaviour and only verify UPPs that were signed by a known identity, i.e. an identity for which the +public key exists in the database, + +- add the following key-value pair to your `config.json`: + ```json + "verifyFromKnownIdentitiesOnly": true + ``` +- or set the following environment variable: + ```console + UBIRCH_VERIFY_FROM_KNOWN_IDENTITIES_ONLY=true + ``` + +A simple workflow for setting up a system which locks out new data sources/identities, even if they are registered +with the UBIRCH backend, could be the following: + +> During setup and test operation, set `verifyFromKnownIdentitiesOnly` to false and make sure to test verification from +> all intended sending devices. The client will pull all necessary public keys from the backend and save them locally. +> +> As soon as the setup phase is over, set `verifyFromKnownIdentitiesOnly` to true to lock out any new devices. + +## Legacy file-based context migration + +Version 1 of the client used a file-based context management. In order to update the client from v1 to a newer version +(>v2) while keeping the existing context, the context can be migrated from the legacy context files into a database. + +First, add the new mandatory [configurations](#Configuration) to your existing configuration. + +To start the migration process, run the client with the command-line flag `--migrate`. + +```shell +docker run -v $(pwd):/data --network host ubirch/ubirch-client:v2.x.x /data --migrate +``` + +After successful migration, the process will exit with status `0`. In case of failed migration, the exit status is `1`. + +Lastly, the legacy context files can be deleted. + +```shell +rm -rf keys.json keys.json.bck signatures +``` + ## Quick Start 1. Configuration - First, you will need a device UUID, that is registered with the UBIRCH backend, and a corresponding auth token for - that device. You will also need a secret to encrypt the locally stored private keys: - 1. Generate a UUID for your device. On Linux/macOS, simply enter `uuidgen` in your terminal. Alternatively, you can - use an [online tool](https://www.uuidtools.com/v4). - 2. Get your auth token: + First, you will need a device UUID, that is registered with the UBIRCH backend, and a corresponding authentication + token for that device. You will also need a secret to encrypt the locally stored private keys: + 1. Generate a UUID for your device. On Linux or macOS, simply enter `uuidgen` in your terminal. Alternatively, you + can use an [online tool](https://www.uuidtools.com/v4). + 2. Get your device auth token: - Create an account at the [**UBIRCH web UI**](https://console.prod.ubirch.com/) and log in. - Go to **Things** (in the menu on the left) and click the green `+ ADD NEW DEVICE`-button. - Enter your UUID to the **ID** field and, optionally, a description. Then click on `register`. - After successful registration, you can click on your UUID to open the settings and copy the **"password"** - from the `apiConfig` as your auth token. - 3. Generate a 16 byte secret in base64 format. You can enter `head -c 16 /dev/urandom | base64` in a Linux/macOS - terminal or encode 16 ASCII characters in an [online base64 encoder](https://www.base64encode.org/). + from the `apiConfig` as your device auth token. + 3. Generate a 32 byte secret in base64 format. You can enter `head -c 32 /dev/urandom | base64` in a Linux/macOS + terminal or encode 32 ASCII characters in an [online base64 encoder](https://www.base64encode.org/). Create a file `config.json` in your working directory with the following content: ```json { "devices": { - "": "" + "": "" }, - "secret": "", - "logTextFormat": true + "secret32": "", + "dbDriver": "sqlite", + "logTextFormat": true, + "logKnownIdentities": true + } + ``` + - Replace `` with your device UUID from step 1.1. + - Replace `` with your device auth token from step 1.2. + - Replace `` with your secret from step 1.3. + + Your `config.json` should now look like this: + ```json + { + "devices": { + "e5085a89-a881-4397-902e-a630f021afd8": "f83a888f-cbf8-4d78-82a2-3e3f253f181d" + }, + "secret32": "kwNWDv1K8z/T4Muk8La4uzoUl2Q1G923rmm7kA5NrIE=", + "dbDriver": "sqlite", + "logTextFormat": true, + "logKnownIdentities": true } ``` - - Replace `` with your UUID from step 1.1. - - Replace `` with your auth token from step 1.2. - - Replace `` with your secret from step 1.3. - > [Here](main/config/example_config.json) is an example of how it should look like. 2. Run the client To run the dockerized UBIRCH client, you will need to have [Docker](https://docs.docker.com/) installed on your computer. Then enter the following two lines in the terminal in your working directory: - ```console - docker pull ubirch/ubirch-client:v1.2.2 - docker run -v $(pwd):/data -p 8080:8080 ubirch/ubirch-client:v1.2.2 + ```shell + docker pull ubirch/ubirch-client:v3.0.0 + docker run -v $(pwd):/data -p 8080:8080 ubirch/ubirch-client:v3.0.0 ``` When the client is first started, it will create an ECDSA key pair for your device and register the public key at the @@ -753,14 +1051,22 @@ By default, the log of the client is in JSON format. To change it to a (more hum You should see a console output like this: ```console - {"level":"info","msg":"UBIRCH client (v2.0.0, build=local)","time":"2021-03-01T18:41:20+01:00"} - {"level":"info","msg":"loading configuration from file: config.json","time":"2021-03-01T18:41:20+01:00"} - INFO[2021-03-01 18:41:20.291 +0100] 1 known UUID(s) - INFO[2021-03-01 18:41:20.291 +0100] UBIRCH backend environment: prod - INFO[2021-03-01 18:41:20.291 +0100] protocol context will be stored in local file system - INFO[2021-03-01 18:41:20.291 +0100] generating new key pair for UUID 50b1a5bb-83cd-4251-b674-b3c71a058fc3 - INFO[2021-03-01 18:41:20.664 +0100] 50b1a5bb-83cd-4251-b674-b3c71a058fc3: registering public key at key service: https://key.prod.ubirch.com/api/keyService/v1/pubkey - INFO[2021-03-01 18:41:22.130 +0100] starting HTTP service + {"level":"info","message":"UBIRCH client (version=devbuild, revision=0000000)","time":"2022-10-31T07:36:28Z"} + {"level":"info","message":"arg #1: /data","time":"2022-10-31T07:36:28Z"} + {"level":"info","message":"loading configuration from file: /data/config.json","time":"2022-10-31T07:36:28Z"} + time="2022-10-31 07:36:28.152 +0000" level=warning msg="identity registration endpoint disabled. To enable, set json:\"enableRegistrationEndpoint\" env:\"UBIRCH_ENABLE_REGISTRATION_ENDPOINT\" =true" + time="2022-10-31 07:36:28.153 +0000" level=warning msg="CSR creation endpoint disabled. To enable, set json:\"enableCSRCreationEndpoint\" env:\"UBIRCH_ENABLE_CSR_CREATION_ENDPOINT\" =true" + time="2022-10-31 07:36:28.154 +0000" level=warning msg="key deactivation endpoint disabled. To enable, set json:\"enableDeactivationEndpoint\" env:\"ENABLE_DEACTIVATION_ENDPOINT\" =true" + time="2022-10-31 07:36:28.155 +0000" level=info msg="UBIRCH backend environment: prod" + time="2022-10-31 07:36:28.155 +0000" level=info msg="initializing sqlite database connection" + time="2022-10-31 07:36:28.285 +0000" level=info msg="0 known internal identities (signing and verification)" + time="2022-10-31 07:36:28.287 +0000" level=info msg="0 known external identities (verification only)" + time="2022-10-31 07:36:28.296 +0000" level=info msg="e5085a89-a881-4397-902e-a630f021afd8: initializing identity" + time="2022-10-31 07:36:28.750 +0000" level=info msg="e5085a89-a881-4397-902e-a630f021afd8: key certificate: {\"pubKeyInfo\":{\"algorithm\":\"ecdsa-p256v1\",\"created\":\"2022-10-31T07:36:28.739Z\",\"hwDeviceId\":\"e5085a89-a881-4397-902e-a630f021afd8\",\"pubKey\":\"//3eUKJOrGaYCoPBOMMUquX3cn+EXHMqCKu7IJWu/Xs1x7oJ4HU6LLWksf8toG0ir1VreFo8A5tJEGvxmQbe0w==\",\"pubKeyId\":\"//3eUKJOrGaYCoPBOMMUquX3cn+EXHMqCKu7IJWu/Xs1x7oJ4HU6LLWksf8toG0ir1VreFo8A5tJEGvxmQbe0w==\",\"validNotAfter\":\"2032-10-28T07:36:28.739Z\",\"validNotBefore\":\"2022-10-31T07:36:28.739Z\"},\"signature\":\"GpGZzgTtvZ0InzvqNlNh3CEMkNxLY+G/og1qBe8J/ouhHs4OS5us1JEenzyym+cKJaHAaNYMscZA3jdrFxnZ+w==\"}" + time="2022-10-31 07:36:30.231 +0000" level=info msg="e5085a89-a881-4397-902e-a630f021afd8: creating CSR" + time="2022-10-31 07:36:30.257 +0000" level=info msg="e5085a89-a881-4397-902e-a630f021afd8: CSR [PEM]: -----BEGIN CERTIFICATE REQUEST-----\nMIIBDjCBtAIBADBSMQswCQYDVQQGEwJERTEUMBIGA1UEChMLdWJpcmNoIEdtYkgx\nLTArBgNVBAMTJGU1MDg1YTg5LWE4ODEtNDM5Ny05MDJlLWE2MzBmMDIxYWZkODBZ\nMBMGByqGSM49AgEGCCqGSM49AwEHA0IABP/93lCiTqxmmAqDwTjDFKrl93J/hFxz\nKgiruyCVrv17Nce6CeB1Oiy1pLH/LaBtIq9Va3haPAObSRBr8ZkG3tOgADAKBggq\nhkjOPQQDAgNJADBGAiEA8UANAK6JLUk+TQMZ4FtWsJQJT/dWyhonF/ZbUuV03n0C\nIQCQj7U/la0wf9FuBYvn813sQ3FE/P1E43fwLni0pxTH2g==\n-----END CERTIFICATE REQUEST-----\n" + time="2022-10-31 07:36:30.270 +0000" level=info msg="starting HTTP server" + ``` That means the client is running and ready! @@ -769,40 +1075,38 @@ By default, the log of the client is in JSON format. To change it to a (more hum --- **WARNING** - The client stores the encrypted signing keys in a local file `keys.json`, which will be created in the - working directory upon first start-up. **Do not delete this file**, as our backend will not accept new key - registrations once a device already has a registered key. - - The client also creates a subdirectory `/signatures`, which contains the previous UPP signatures for chaining. + The client stores the encrypted signing keys in a local file `sqlite.db`, which will be created in the + working directory upon first start-up. **Do not delete this file**, as our backend will not accept the + registration of a new key once a device already has a registered key. --- 3. Seal your data - The client is now listening for HTTP requests on port `8080`. You can either... - - send JSON data to the `/`-endpoint with `Content-Type: application/json`-header, or - - send hashes to the `//hash`-endpoint with `Content-Type: application/octet-stream`-header. + The client is now listening for HTTP requests on port `8080`. You can send either... + - JSON data packages to the `/`-endpoint with `Content-Type: application/json`-header, or + - SHA256 hashes of your data to the `//hash`-endpoint with `Content-Type: application/octet-stream`-header. Since the data hash for every UPP must be unique, ensure that the body of each request has a unique content. You can do that, for example, by adding an ID and a timestamp to the JSON data package. For more information see [Uniqueness of hashes](#Uniqueness-of-hashes). - **Floating-point numbers and integers greater than 253 are not allowed as values for the JSON data - package!** + Floating-point numbers and integers greater than 253 are not allowed as values for the JSON data + package! You also need to set the `X-Auth-Token`-header with your UBIRCH backend auth token from step 1. Here is an example of how a request to the client would look like using `CURL`: - ```console - curl localhost:8080/ \ - -H "X-Auth-Token: " \ + ```shell + curl localhost:8080/ \ + -H "X-Auth-Token: " \ -H "Content-Type: application/json" \ - -d '{"id": "50b1a5bb-83cd-4251-b674-b3c71a058fc3", "ts": 1614621028, "data": "1234567890"}' \ + -d '{"id": "e5085a89-a881-4397-902e-a630f021afd8", "ts": 1667202152, "data": "1234567890"}' \ -i -s ``` - > Insert `` and `` and a request body with your own unique content to ensure a unique - hash! + > Insert `` and `` and a request body with your own unique content to + > ensure a unique hash! In case of hash collision, the request will fail with status code `409`. When the client receives a request, it hashes the data from the request body and creates a chained Ubirch Protocol Package (UPP) with the data hash as payload. The UPP will be signed with the private key of the device and sent to @@ -810,11 +1114,12 @@ By default, the log of the client is in JSON format. To change it to a (more hum The console output of the client should look like this: ```console - INFO[2021-03-01 18:52:59.471 +0100] 50b1a5bb-83cd-4251-b674-b3c71a058fc3: anchoring hash: CDUvtOIBnnZ8im/UXQn5G/q5EK9l2Bqy+HyMgSzPZoA= - INFO[2021-03-01 18:53:00.313 +0100] 50b1a5bb-83cd-4251-b674-b3c71a058fc3: request ID: 0f11686e-aee3-4e97-8d0d-793a0c31d969 + time="2022-10-31 07:44:48.178 +0000" level=info msg="create UPP: uuid: e5085a89-a881-4397-902e-a630f021afd8, hash: 5snVjoqWbqLbhABMD1L5OguJyvcsxbJOECQSurDqs5k=, operation: chain, offline: false" + time="2022-10-31 07:44:48.909 +0000" level=info msg="e5085a89-a881-4397-902e-a630f021afd8: request ID: 3c6e0e19-63b6-4d42-b316-3f46481f14cc" + ``` - Take note of the hash for [verification](#verification). + > Take note of the hash for [verification](#verification). If your request was successful, you'll get the HTTP response code `200`. @@ -824,31 +1129,35 @@ By default, the log of the client is in JSON format. To change it to a (more hum this [MessagePack to JSON Converter](https://toolslick.com/conversion/data/messagepack-to-json). You can read more about UPPs [here](https://developer.ubirch.com/utp). - ```json + ```json { - "hash": "CDUvtOIBnnZ8im/UXQn5G/q5EK9l2Bqy+HyMgSzPZoA=", - "upp": "liPEEFCxpbuDzUJRtnSzxxoFj8PEQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAxCAINS+04gGednyKb9RdCfkb+rkQr2XYGrL4fIyBLM9mgMRAIVlhgxobRl7ApJerXUyJ5cBxBJJ7gwPUN9AKgKJWxAxkWMWufRp8jW9Ha79s5hYbNp9+bn94cMflWyAyyjy4Ew==", - "response": { - "statusCode": 200, - "header": { - "Content-Length": [ - "187" - ], - "Content-Type": [ - "application/octet-stream" - ], - "Date": [ - "Mon, 01 Mar 2021 17:53:00 GMT" - ], - "Server": [ - "ubirch-trust-service/1.0" - ] - }, - "content": "liPEEJ08eP8i80RBpdGFxjbUhv/EQCFZYYMaG0ZewKSXq11MieXAcQSSe4MD1DfQCoCiVsQMZFjFrn0afI1vR2u/bOYWGzaffm5/eHDH5VsgMso8uBMAxCAPEWhuruNOl40NeToMMdlpAAAAAAAAAAAAAAAAAAAAAMRA+IFgAugN6CY1xPSch1TwhFdac8yRA1QhPRXOhUt7rudrwrNv0NAEJGlLw1wUSpcSLmBFQaoRb9EezmYxmtF7iA==" + "hash": "5snVjoqWbqLbhABMD1L5OguJyvcsxbJOECQSurDqs5k=", + "upp": "liPEEOUIWomogUOXkC6mMPAhr9jEQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAxCDmydWOipZuotuEAEwPUvk6C4nK9yzFsk4QJBK6sOqzmcRA0fYMXgkdjoAOPE9jXV/gfBxb9kl9WierPozz+usi+WLUNTD98al0QX6TWB3i1pg43XDL0/lHf8E+4AhWfFFlCQ==", + "publicKey": "//3eUKJOrGaYCoPBOMMUquX3cn+EXHMqCKu7IJWu/Xs1x7oJ4HU6LLWksf8toG0ir1VreFo8A5tJEGvxmQbe0w==", + "response": { + "statusCode": 200, + "header": { + "Content-Length": [ + "187" + ], + "Content-Type": [ + "application/octet-stream" + ], + "Date": [ + "Mon, 31 Oct 2022 07:44:48 GMT" + ], + "Server": [ + "ubirch-trust-service/1.0" + ], + "Strict-Transport-Security": [ + "max-age=15552000; includeSubDomains; preload" + ] }, - "requestID": "0f11686e-aee3-4e97-8d0d-793a0c31d969" + "content": "liPEEBCy4aRWs0//mtrMjCD5MBbEQNH2DF4JHY6ADjxPY11f4HwcW/ZJfVonqz6M8/rrIvli1DUw/fGpdEF+k1gd4taYON1wy9P5R3/BPuAIVnxRZQkAxCA8bg4ZY7ZNQrMWP0ZIHxTMAAAAAAAAAAAAAAAAAAAAAMRAtCx79HokXAELQRbiEoE9YVPLfx4Zdh9fC93QO4X4e60HXseQUdVFtbuQBQiz2yqHBuoQyMQVsu2fBPreNihifA==" + }, + "requestID": "3c6e0e19-63b6-4d42-b316-3f46481f14cc" } - ``` + ``` If you get a response code other than `200`, it means that something went wrong. In this case the client will respond with an error message. You can also find error messages in the console output of the client. @@ -870,7 +1179,7 @@ UBIRCH verification service: https://verify.prod.ubirch.com/api/upp/verify/anchor ``` -> e.g. `curl -d '' https://verify.prod.ubirch.com/api/upp/verify/anchor` +> e.g. `curl -d '5snVjoqWbqLbhABMD1L5OguJyvcsxbJOECQSurDqs5k=' https://verify.prod.ubirch.com/api/upp/verify/anchor` This endpoint checks if the *UPP*, which contains the data hash has arrived correctly and was verifiable, gives information about the chain (*prev*ious UPP) as well as blockchain info on the time frame (the upper and lower bounds) @@ -881,27 +1190,25 @@ If the verification was successful, the service will send a *200* response with ```json { - "upp": "liPEEJnqh/TPxEVni9ELTBXq9V7EQGOMAcwCV4rbHGZT+A8sd2DOpRB2mdUyZSSg7wB5hYNix5CszzbhRksmDTP/mADH1EBEPnUgfXbo6Y6dbFBL6CgAxCAy+oS7kDq+fc74gcKSX1UsG0iuOx5iwkW/MyED7Df9PcRAQ9hNm3gkM5vyeIX8zwI+7D/VbsgpLV5o4oYLFo7FilA8Urj5ELQNrC0PKYKco0LoC7xNbVoIhrvOnLNZVyme3w==", - "prev": "liPEEJnqh/TPxEVni9ELTBXq9V7EQFMVGwqOGvuiYahX5+1E9Le/Jse778baMOWX4kPCuvTQnwzCoFOvHY09aor7Wl0Hn7h2mPg7kdJ6N2ZRGKNtXB0AxCCPcQmVZAl1b++fj5h0r17cb1+zPJS3WnjqYt+JsmrZoMRAY4wBzAJXitscZlP4Dyx3YM6lEHaZ1TJlJKDvAHmFg2LHkKzPNuFGSyYNM/+YAMfUQEQ+dSB9dujpjp1sUEvoKA==", + "upp": "liPEEOUIWomogUOXkC6mMPAhr9jEQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAxCDmydWOipZuotuEAEwPUvk6C4nK9yzFsk4QJBK6sOqzmcRA0fYMXgkdjoAOPE9jXV/gfBxb9kl9WierPozz+usi+WLUNTD98al0QX6TWB3i1pg43XDL0/lHf8E+4AhWfFFlCQ==", + "prev": null, "anchors": [ { "label": "PUBLIC_CHAIN", "properties": { - "timestamp": "2020-04-16T22:09:17.836Z", - "hash": "CAGRDRTQBNNHHQONUHBMWPHMUTMCYJ9XNKJJNTHMBUZXYKEUKTERIFMNNFBKWUAMAMXERJBQQFNQWA999", - "public_chain": "IOTA_TESTNET_IOTA_TESTNET_NETWORK", - "prev_hash": "ca6d36581d1265d38d7cb69a6a410aefb5142cbd31c3004cb7bbe6ec83457d9c683eb0a2e498083699e9e6dc233356be0df6f9fb2e1810d65e71b1bd155b3580", - "type": "PUBLIC_CHAIN" + "timestamp": "2022-10-31T07:45:21.625Z", + "hash": "5d9f841fed2d4b7693b91586d8b7312d681dcfb8e070ba7153a8032835bb109d", + "public_chain": "IOTA_MAINNET_IOTA_MAINNET_NETWORK", + "prev_hash": "65cf37759c94accebf1d7344a9d5a5bcafef3b5198772fff8b63ed7458ee155d5735e2e7d9eb6f09c7517115038dc72b15d5ed997a55bf7b59ccef708636c394" } }, { "label": "PUBLIC_CHAIN", "properties": { - "timestamp": "2020-04-16T22:09:25.614Z", - "hash": "0x229d8e167a45efe8a552fff884ca2ca540d331dbd51a427107d8ac12f184dc25", - "public_chain": "ETHEREUM_TESTNET_RINKEBY_TESTNET_NETWORK", - "prev_hash": "ca6d36581d1265d38d7cb69a6a410aefb5142cbd31c3004cb7bbe6ec83457d9c683eb0a2e498083699e9e6dc233356be0df6f9fb2e1810d65e71b1bd155b3580", - "type": "PUBLIC_CHAIN" + "timestamp": "2022-10-31T07:47:03.100Z", + "hash": "0xe87fdf636a781638ff6e1be573172699986f82ea2f8d06595f39a39e433c420b", + "public_chain": "ETHEREUM-CLASSIC_MAINNET_ETHERERUM_CLASSIC_MAINNET_NETWORK", + "prev_hash": "3f2129956066cdb62f690bb080d7c2c029d64a657db88ec3b043f0b9472400f946f9d28c4af49c6005424f33dc1349140df9a2020fa470c58c955d80dac297e5" } } ] diff --git a/integration-test/README.md b/integration-test/README.md new file mode 100644 index 00000000..48aff6b5 --- /dev/null +++ b/integration-test/README.md @@ -0,0 +1,30 @@ +# Integration test (python) + +The integration test is written in python, using the `pytest` module. + +## Configuration + +The test identity must be registered at the ubirch console / thing API in advance. + +`config.json`: + +```json +{ + "host": "", + "staticAuth": "", + "testDevice": { + "uuid": "", + "password": "" + }, + "env": "" +} +``` + +## Run integration test + + ```shell + python3 -m venv venv && \ + . venv/bin/activate && \ + pip install -r requirements.txt && \ + pytest -v + ``` diff --git a/integration-test/helpers.py b/integration-test/helpers.py new file mode 100644 index 00000000..511bedbf --- /dev/null +++ b/integration-test/helpers.py @@ -0,0 +1,70 @@ +import hashlib +import json +import random +import time +import uuid +from binascii import b2a_base64, a2b_base64 + +import ecdsa + +symbols = ("a", "b", "c", "d", "e", "f", "A", "B", "C", "D", "E", "F", + "ä", "ë", "ï", "ö", "ü", "ÿ", "Ä", "Ë", "Ï", "Ö", "Ü", "Ÿ", + "`", "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "-", "=", + "[", "]", ";", "'", "#", ",", ".", "/", "\\", + "¬", "!", '''"''', "£", "$", "%", "^", "*", "(", ")", "_", "+", + "{", "}", ":", "@", "~", "?", " |", + "&", "<", ">", "™", + "®", "™", "U+2122", "%20", "\\n", "", "\ +") + + +def get_random_json() -> dict: + """generates a random JSON message""" + return { + "id": str(uuid.uuid4()), + "ts": int(time.time()), + "big": random.getrandbits(53), + "tpl": (random.getrandbits(32), "".join(random.choices(symbols, k=4)), + random.getrandbits(8), "".join(random.choices(symbols, k=8)), + random.getrandbits(16), "".join(random.choices(symbols, k=2)), + random.getrandbits(4), "".join(random.choices(symbols, k=16)) + ), + "lst": random.choices(symbols, k=8), + "map": { + random.choice(symbols): random.getrandbits(4), + random.choice(symbols): random.getrandbits(16), + random.choice(symbols): random.getrandbits(8), + random.choice(symbols): random.getrandbits(32) + }, + "str": "".join(random.choices(symbols, k=128)) + } + + +def serialize(msg: dict) -> bytes: + return json.dumps(msg, separators=(',', ':'), sort_keys=True, ensure_ascii=False).encode() + + +def get_hash(serialized: bytes) -> bytes: + return hashlib.sha256(serialized).digest() + + +def to_base64(hash_bytes: bytes) -> str: + return b2a_base64(hash_bytes, newline=False).decode() + + +def get_random_hash_base64(): + """return 32 random bytes in base64 encoding""" + # return to_base64(random.randbytes(32)) # randbytes() is new in version 3.9 + return to_base64(bytearray(random.getrandbits(8) for _ in range(32))) + + +def verify_upp_signature(upp_bytes: bytes, pubkey_bas64: bytes) -> bool: + pubkey_bytes = a2b_base64(pubkey_bas64) + + vk = ecdsa.VerifyingKey.from_string(pubkey_bytes, curve=ecdsa.NIST256p, hashfunc=hashlib.sha256) + + try: + vk.verify(upp_bytes[-64:], upp_bytes[:-66]) + return True + except ecdsa.BadSignatureError: + return False diff --git a/integration-test/requirements.txt b/integration-test/requirements.txt new file mode 100644 index 00000000..2188c538 --- /dev/null +++ b/integration-test/requirements.txt @@ -0,0 +1,4 @@ +ecdsa +msgpack +pytest +requests diff --git a/integration-test/test_integration.py b/integration-test/test_integration.py new file mode 100644 index 00000000..ca925d2e --- /dev/null +++ b/integration-test/test_integration.py @@ -0,0 +1,699 @@ +import msgpack +import pytest +import requests + +from helpers import * + + +class TestIntegration: + with open("config.json", "r") as f: + config = json.load(f) + + host = config["host"] + auth = config["staticAuth"] + uuid = config["testDevice"]["uuid"] + pwd = config["testDevice"]["password"] + env = config["env"] + + pubkey_url = f"https://identity.{env}.ubirch.com/api/keyService/v1/pubkey/current/hardwareId/{uuid}" + verify_url = f"https://verify.{env}.ubirch.com/api/upp" + + test_json = {"d": 0, "a": 1, "c": 2, "b": 3} + test_hash = "6zTRVetfJZONC3QdipR12hIdF7YJL34AWVUSAELrk1Y=" + + def test_health(self): + url = self.host + "/healthz" + + res = requests.get(url) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.content == b'OK\n' + + def test_ready(self): + url = self.host + "/readyz" + + res = requests.get(url) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.content == b'OK\n' + + def test_metrics(self): + url = self.host + "/metrics" + + res = requests.get(url) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.content.__contains__(b'# TYPE http_requests_total counter') \ + and res.content.__contains__(b'# TYPE http_response_time_seconds histogram') \ + and res.content.__contains__(b'# TYPE response_status counter') + + def test_register(self): + url = self.host + "/register" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.auth} + body = {"uuid": self.uuid, "password": self.pwd} + + res = requests.put(url, json=body, headers=header) + + assert (res.status_code == 200 + and res.content.startswith(b'-----BEGIN CERTIFICATE REQUEST-----\n') + and res.content.endswith(b'-----END CERTIFICATE REQUEST-----\n')) \ + or (res.status_code == 409 + and res.content == b'identity already registered\n'), f"request failed: [{res.status_code}] {res.content}" + + # check if key was registered at ubirch identity service + pubkey_res = requests.get(self.pubkey_url) + + assert len(pubkey_res.json()) == 1 + + def test_csr(self): + url = self.host + f"/{self.uuid}/csr" + header = {'X-Auth-Token': self.auth} + + res = requests.get(url, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.content.startswith(b'-----BEGIN CERTIFICATE REQUEST-----\n') \ + and res.content.endswith(b'-----END CERTIFICATE REQUEST-----\n') + + def test_deactivate(self): + url = self.host + "/device/updateActive" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.auth} + body = {"id": self.uuid, "active": False} + + res = requests.put(url, json=body, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.content == b'key deactivation successful\n' + + # check if key was deleted at ubirch identity service + pubkey_res = requests.get(self.pubkey_url) + + assert len(pubkey_res.json()) == 0 + + def test_reactivate(self): + url = self.host + "/device/updateActive" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.auth} + body = {"id": self.uuid, "active": True} + + res = requests.put(url, json=body, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.content == b'key reactivation successful\n' + + # check if key was registered at ubirch identity service + pubkey_res = requests.get(self.pubkey_url) + + assert len(pubkey_res.json()) == 1 + + def test_chain(self): + url = self.host + f"/{self.uuid}" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.pwd} + data_json = get_random_json() + data_hash_64 = to_base64(get_hash(serialize(data_json))) + + res = requests.post(url, json=data_json, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 6 + assert unpacked[0] == 0x23 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[3] == 0x00 + assert unpacked[4] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # check if hash is known by ubirch verification service + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 200, f"hash not found at {self.verify_url}" if verify_res.status_code == 404 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + assert verify_res.json()["upp"] == res.json()["upp"] + + # check if consecutive requests to this endpoint result in correctly chained UPPs + prev_signature = unpacked[5] + for i in range(10): + res = requests.post(url, json=get_random_json(), headers=header) + + assert res.status_code == 200, f"request failed ({i}): [{res.status_code}] {res.content}" + + unpacked = msgpack.unpackb(a2b_base64(res.json()["upp"])) + assert unpacked[2] == prev_signature, f"chain check failed in loop {i}" + + prev_signature = unpacked[5] + + def test_chain_hash(self): + url = self.host + f"/{self.uuid}/hash" + header = {'Content-Type': 'text/plain', 'X-Auth-Token': self.pwd} + data_hash_64 = get_random_hash_base64() + + res = requests.post(url, data=data_hash_64, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 6 + assert unpacked[0] == 0x23 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[3] == 0x00 + assert unpacked[4] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # check if hash is known by ubirch verification service + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 200, f"hash not found at {self.verify_url}" if verify_res.status_code == 404 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + assert verify_res.json()["upp"] == res.json()["upp"] + + # check if consecutive requests to this endpoint result in correctly chained UPPs + prev_signature = unpacked[5] + for i in range(10): + data_hash_64 = get_random_hash_base64() + + res = requests.post(url, data=data_hash_64, headers=header) + + assert res.status_code == 200, f"request failed ({i}): [{res.status_code}] {res.content}" + + unpacked = msgpack.unpackb(a2b_base64(res.json()["upp"])) + assert unpacked[2] == prev_signature, f"chain check failed in loop {i}" + + prev_signature = unpacked[5] + + def test_chain_offline(self): + url = self.host + f"/{self.uuid}/offline" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.pwd} + data_json = get_random_json() + data_hash_64 = to_base64(get_hash(serialize(data_json))) + + res = requests.post(url, json=data_json, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + with pytest.raises(KeyError): + res.json()["response"] + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 6 + assert unpacked[0] == 0x23 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[3] == 0x00 + assert unpacked[4] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # make sure hash is unknown by ubirch verification service + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 404, f"hash found at {self.verify_url}" if verify_res.status_code == 200 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + # check if consecutive requests to this endpoint result in correctly chained UPPs + prev_signature = unpacked[5] + for i in range(10): + res = requests.post(url, json=get_random_json(), headers=header) + + assert res.status_code == 200, f"request failed ({i}): [{res.status_code}] {res.content}" + + unpacked = msgpack.unpackb(a2b_base64(res.json()["upp"])) + assert unpacked[2] == prev_signature, f"chain check failed in loop {i}" + + prev_signature = unpacked[5] + + def test_chain_offline_hash(self): + url = self.host + f"/{self.uuid}/offline/hash" + header = {'Content-Type': 'text/plain', 'X-Auth-Token': self.pwd} + data_hash_64 = get_random_hash_base64() + + res = requests.post(url, data=data_hash_64, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + with pytest.raises(KeyError): + res.json()["response"] + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 6 + assert unpacked[0] == 0x23 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[3] == 0x00 + assert unpacked[4] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # make sure hash is unknown by ubirch verification service + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 404, f"hash found at {self.verify_url}" if verify_res.status_code == 200 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + # check if consecutive requests to this endpoint result in correctly chained UPPs + prev_signature = unpacked[5] + for i in range(10): + data_hash_64 = get_random_hash_base64() + + res = requests.post(url, data=data_hash_64, headers=header) + + assert res.status_code == 200, f"request failed ({i}): [{res.status_code}] {res.content}" + + unpacked = msgpack.unpackb(a2b_base64(res.json()["upp"])) + assert unpacked[2] == prev_signature, f"chain check failed in loop {i}" + + prev_signature = unpacked[5] + + def test_anchor(self): + url = self.host + f"/{self.uuid}/anchor" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.pwd} + data_json = self.test_json + data_hash_64 = to_base64(get_hash(serialize(data_json))) + + res = requests.post(url, json=data_json, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0x00 + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # check if hash is known by ubirch verification service + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 200, f"hash not found at {self.verify_url}" if verify_res.status_code == 404 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + assert verify_res.json()["upp"] == res.json()["upp"] + + def test_anchor_hash(self): + url = self.host + f"/{self.uuid}/anchor/hash" + header = {'Content-Type': 'text/plain', 'X-Auth-Token': self.pwd} + data_hash_64 = self.test_hash + + res = requests.post(url, data=data_hash_64, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0x00 + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # check if hash is known by ubirch verification service + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 200, f"hash not found at {self.verify_url}" if verify_res.status_code == 404 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + assert verify_res.json()["upp"] == res.json()["upp"] + + def test_disable(self): + url = self.host + f"/{self.uuid}/disable" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.pwd} + data_json = self.test_json + data_hash_64 = to_base64(get_hash(serialize(data_json))) + + res = requests.post(url, json=data_json, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0xFA + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # assert hash has been disabled in ubirch backend + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 404, f"hash found at {self.verify_url}" if verify_res.status_code == 200 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + def test_disable_hash(self): + url = self.host + f"/{self.uuid}/disable/hash" + header = {'Content-Type': 'text/plain', 'X-Auth-Token': self.pwd} + data_hash_64 = self.test_hash + + res = requests.post(url, data=data_hash_64, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0xFA + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # assert hash has been disabled in ubirch backend + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 404, f"hash found at {self.verify_url}" if verify_res.status_code == 200 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + def test_enable(self): + url = self.host + f"/{self.uuid}/enable" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.pwd} + data_json = self.test_json + data_hash_64 = to_base64(get_hash(serialize(data_json))) + + res = requests.post(url, json=data_json, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0xFB + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # assert hash has been enabled in ubirch backend + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 200, f"hash not found at {self.verify_url}" if verify_res.status_code == 404 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + def test_enable_hash(self): + url = self.host + f"/{self.uuid}/enable/hash" + header = {'Content-Type': 'text/plain', 'X-Auth-Token': self.pwd} + data_hash_64 = self.test_hash + + res = requests.post(url, data=data_hash_64, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0xFB + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # assert hash has been enabled in ubirch backend + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 200, f"hash not found at {self.verify_url}" if verify_res.status_code == 404 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + def test_delete(self): + url = self.host + f"/{self.uuid}/delete" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.pwd} + data_json = self.test_json + data_hash_64 = to_base64(get_hash(serialize(data_json))) + + res = requests.post(url, json=data_json, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0xFC + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # assert hash has been deleted in ubirch backend + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 404, f"hash found at {self.verify_url}" if verify_res.status_code == 200 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + def test_delete_hash(self): + url = self.host + f"/{self.uuid}/delete/hash" + header = {'Content-Type': 'text/plain', 'X-Auth-Token': self.pwd} + data_hash_64 = self.test_hash + + res = requests.post(url, data=data_hash_64, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + assert res.json()["response"]["statusCode"] == 200 + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0xFC + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # assert hash has been deleted in ubirch backend + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 404, f"hash found at {self.verify_url}" if verify_res.status_code == 200 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + def test_anchor_offline(self): + url = self.host + f"/{self.uuid}/anchor/offline" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.pwd} + data_json = get_random_json() + data_hash_64 = to_base64(get_hash(serialize(data_json))) + + res = requests.post(url, json=data_json, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + with pytest.raises(KeyError): + res.json()["response"] + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0x00 + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # make sure hash is unknown by ubirch verification service + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 404, f"hash found at {self.verify_url}" if verify_res.status_code == 200 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + def test_anchor_offline_hash(self): + url = self.host + f"/{self.uuid}/anchor/offline/hash" + header = {'Content-Type': 'text/plain', 'X-Auth-Token': self.pwd} + data_hash_64 = get_random_hash_base64() + + res = requests.post(url, data=data_hash_64, headers=header) + + assert res.status_code == 200, f"request failed: [{res.status_code}] {res.content}" + assert res.json()["hash"] == data_hash_64 + with pytest.raises(KeyError): + res.json()["response"] + + upp = a2b_base64(res.json()["upp"]) + unpacked = msgpack.unpackb(upp) + assert len(unpacked) == 5 + assert unpacked[0] == 0x22 + assert unpacked[1] == uuid.UUID(self.uuid).bytes + assert unpacked[2] == 0x00 + assert unpacked[3] == a2b_base64(data_hash_64) + + registered_pubkey = requests.get(self.pubkey_url).json()[0]["pubKeyInfo"]["pubKey"] + assert res.json()["publicKey"] == registered_pubkey + + # verify UPP signature locally + assert verify_upp_signature(upp, registered_pubkey), "invalid UPP signature" + + # make sure hash is unknown by ubirch verification service + verify_res = requests.post(self.verify_url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 404, f"hash found at {self.verify_url}" if verify_res.status_code == 200 \ + else f"request failed: [{verify_res.status_code}] {verify_res.content}" + + def test_verify(self): + # anchor data to verify + url = self.host + f"/{self.uuid}" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.pwd} + data_json = get_random_json() + data_hash_64 = to_base64(get_hash(serialize(data_json))) + + signing_res = requests.post(url, json=data_json, headers=header) + + assert signing_res.status_code == 200, f"request failed: [{signing_res.status_code}] {signing_res.content}" + + # since the UPP-signer does not use the quick verify endpoint, we need + # to sleep after anchoring to ensure the hash can be verified + time.sleep(2) + + # verify data + url = self.host + "/verify" + verify_res = requests.post(url, json=data_json, headers={'Content-Type': 'application/json'}) + + assert verify_res.status_code == 200, f"request failed: [{verify_res.status_code}] {verify_res.content}" + assert verify_res.json()["hash"] == data_hash_64 + assert verify_res.json()["upp"] == signing_res.json()["upp"] + assert verify_res.json()["uuid"] == self.uuid + assert verify_res.json()["pubKey"] == signing_res.json()["publicKey"] + + def test_verify_hash(self): + # anchor hash to verify + url = self.host + f"/{self.uuid}/hash" + header = {'Content-Type': 'text/plain', 'X-Auth-Token': self.pwd} + data_hash_64 = get_random_hash_base64() + + signing_res = requests.post(url, data=data_hash_64, headers=header) + + assert signing_res.status_code == 200, f"request failed: [{signing_res.status_code}] {signing_res.content}" + + # since the UPP-signer does not use the quick verify endpoint, we need + # to sleep after anchoring to ensure the hash can be verified + time.sleep(2) + + # verify hash + url = self.host + "/verify/hash" + verify_res = requests.post(url, data=data_hash_64, headers={'Content-Type': 'text/plain'}) + + assert verify_res.status_code == 200, f"request failed: [{verify_res.status_code}] {verify_res.content}" + assert verify_res.json()["hash"] == data_hash_64 + assert verify_res.json()["upp"] == signing_res.json()["upp"] + assert verify_res.json()["uuid"] == self.uuid + assert verify_res.json()["pubKey"] == signing_res.json()["publicKey"] + + def test_verify_offline(self): + # sign data to verify + url = self.host + f"/{self.uuid}/offline" + header = {'Content-Type': 'application/json', 'X-Auth-Token': self.pwd} + data_json = get_random_json() + data_hash_64 = to_base64(get_hash(serialize(data_json))) + + signing_res = requests.post(url, json=data_json, headers=header) + + assert signing_res.status_code == 200, f"request failed: [{signing_res.status_code}] {signing_res.content}" + + # verify data offline + url = self.host + "/verify/offline" + header = {'Content-Type': 'application/json', 'X-Ubirch-UPP': signing_res.json()["upp"]} + verify_res = requests.post(url, json=data_json, headers=header) + + assert verify_res.status_code == 200, f"request failed: [{verify_res.status_code}] {verify_res.content}" + assert verify_res.json()["hash"] == data_hash_64 + assert verify_res.json()["upp"] == signing_res.json()["upp"] + assert verify_res.json()["uuid"] == self.uuid + assert verify_res.json()["pubKey"] == signing_res.json()["publicKey"] + + def test_verify_offline_hash(self): + # sign hash to verify + url = self.host + f"/{self.uuid}/offline/hash" + header = {'Content-Type': 'text/plain', 'X-Auth-Token': self.pwd} + data_hash_64 = get_random_hash_base64() + + signing_res = requests.post(url, data=data_hash_64, headers=header) + + assert signing_res.status_code == 200, f"request failed: [{signing_res.status_code}] {signing_res.content}" + + # verify hash offline + url = self.host + "/verify/offline/hash" + header = {'Content-Type': 'text/plain', 'X-Ubirch-UPP': signing_res.json()["upp"]} + verify_res = requests.post(url, data=data_hash_64, headers=header) + + assert verify_res.status_code == 200, f"request failed: [{verify_res.status_code}] {verify_res.content}" + assert verify_res.json()["hash"] == data_hash_64 + assert verify_res.json()["upp"] == signing_res.json()["upp"] + assert verify_res.json()["uuid"] == self.uuid + assert verify_res.json()["pubKey"] == signing_res.json()["publicKey"] diff --git a/load-test/config.go b/load-test/config.go index fb0304a3..614f331d 100644 --- a/load-test/config.go +++ b/load-test/config.go @@ -2,33 +2,70 @@ package main import ( "encoding/json" + "flag" + "fmt" "os" + "path/filepath" + + log "github.com/sirupsen/logrus" + urlpkg "net/url" ) type Config struct { + Url string `json:"url"` Devices map[string]string `json:"devices"` RegisterAuth string `json:"registerAuth"` + url *urlpkg.URL } -func (c *Config) Load(filename string) error { - fileHandle, err := os.Open(filename) +func (c *Config) load() error { + log.SetFormatter(&log.TextFormatter{FullTimestamp: true, TimestampFormat: "2006-01-02 15:04:05.000 -0700"}) + + flag.Parse() + if len(*configFile) == 0 { + *configFile = defaultConfigFile + } + log.Infof("loading config: %s", *configFile) + + fileHandle, err := os.Open(filepath.Clean(*configFile)) if err != nil { return err } defer fileHandle.Close() - return json.NewDecoder(fileHandle).Decode(c) + err = json.NewDecoder(fileHandle).Decode(c) + if err != nil { + return fmt.Errorf("decoding config failed: %v", err) + } + + if c.Url == "" { + return fmt.Errorf("missing client base URL (\"url\") in config") + } + + c.url, err = urlpkg.Parse(c.Url) + if err != nil { + return fmt.Errorf("client base URL could not be parsed: %v", err) + } + + return nil } -func (c *Config) GetTestIdentities() map[string]string { - testIdentities := make(map[string]string, numberOfTestIDs) +func (c *Config) initTestIdentities(sender *Sender) (testIdentities map[string]string, err error) { + testIdentities = make(map[string]string, numberOfTestIDs) for uid, auth := range c.Devices { + + err = sender.register(*c.url, uid, auth, c.RegisterAuth) + if err != nil { + log.Fatal(err) + } + testIdentities[uid] = auth + if len(testIdentities) == numberOfTestIDs { break } } - return testIdentities + return testIdentities, nil } diff --git a/load-test/main.go b/load-test/main.go index 766aa8be..d0848aa8 100644 --- a/load-test/main.go +++ b/load-test/main.go @@ -1,6 +1,7 @@ package main import ( + "flag" "sync" "time" @@ -8,35 +9,39 @@ import ( ) const ( - clientBaseURL = "http://localhost:8080" - configFile = "config.json" - numberOfTestIDs = 100 - numberOfRequestsPerID = 100 + numberOfTestIDs = 10 requestsPerSecondPerID = 1 + numberOfRequestsPerID = 100 + + httpConnectionPoolSize = 50 + httpClientTimeoutSec = 2 ) -func main() { - log.SetFormatter(&log.TextFormatter{FullTimestamp: true, TimestampFormat: "2006-01-02 15:04:05.000 -0700"}) +var ( + defaultConfigFile = "config.json" + configFile = flag.String("config", "", "file name of the configuration file. if omitted, configuration is read from \"config.json\".") +) +func main() { c := Config{} - err := c.Load(configFile) + err := c.load() if err != nil { - log.Fatalf("ERROR: unable to load configuration: %s", err) + log.Fatalf("could not load configuration: %v", err) } - identities := c.GetTestIdentities() sender := NewSender() - for id, auth := range identities { - err := sender.register(id, auth, c.RegisterAuth) - if err != nil { - log.Fatal(err) - } + identities, err := c.initTestIdentities(sender) + if err != nil { + log.Fatalf("could not initialize identities: %v", err) } - log.Infof("%d identities, %d requests each => sending [ %d ] requests", len(identities), numberOfRequestsPerID, len(identities)*numberOfRequestsPerID) + totalNumberOfRequests := len(identities) * numberOfRequestsPerID + log.Infof("%d identities, %d requests each => sending [ %d ] requests", len(identities), numberOfRequestsPerID, totalNumberOfRequests) log.Infof("%3d requests per second per identity", requestsPerSecondPerID) log.Infof("%3d requests per second overall", len(identities)*requestsPerSecondPerID) + log.Infof("http connection pool size: %3d", httpConnectionPoolSize) + log.Infof(" http client timeout [s]: %3d", httpClientTimeoutSec) wg := &sync.WaitGroup{} start := time.Now() @@ -48,7 +53,7 @@ func main() { i += 1 wg.Add(1) - go sender.sendRequests(uid, auth, offset, wg) + go sender.sendRequests(*c.url, uid, auth, offset, wg) } wg.Wait() @@ -56,15 +61,17 @@ func main() { sender.chainChecker.finish() - log.Infof("[ %6d ] requests done after [ %7.3f ] seconds ", len(identities)*numberOfRequestsPerID, duration.Seconds()) + log.Infof("[ %6d ] requests done after [ %7.3f ] seconds ", totalNumberOfRequests, duration.Seconds()) for status, count := range sender.statusCounter { log.Infof("[ %6d ] x %s", count, status) } - log.Infof("avg response time: %s", sender.getAvgRequestDuration().String()) - avgReqsPerSec := float64(len(identities)*numberOfRequestsPerID) / duration.Seconds() - log.Infof("avg total throughput: %7.3f requests/second", avgReqsPerSec) - avgReqsPerSecSuccess := float64(sender.statusCounter["200 OK"]) / duration.Seconds() - log.Infof("avg successful throughput: %7.3f requests/second", avgReqsPerSecSuccess) + successCount := sender.statusCounter["200 OK"] + successRate := (float32(successCount) / float32(totalNumberOfRequests)) * 100. + + log.Infof(" error rate: %3.2f", 100.-successRate) + log.Infof(" avg response time: %s", sender.getAvgRequestDuration().String()) + log.Infof(" avg total throughput: %7.3f requests/second", float64(totalNumberOfRequests)/duration.Seconds()) + log.Infof("avg successful throughput: %7.3f requests/second", float64(successCount)/duration.Seconds()) } diff --git a/load-test/sender.go b/load-test/sender.go index 407c8cd4..31e58427 100644 --- a/load-test/sender.go +++ b/load-test/sender.go @@ -5,27 +5,19 @@ import ( "crypto/rand" "encoding/json" "fmt" - "io/ioutil" + "io" "net/http" + "path" "sync" "time" log "github.com/sirupsen/logrus" + urlpkg "net/url" ) type SigningResponse struct { - Error string `json:"error,omitempty"` - Operation string `json:"operation,omitempty"` - Hash []byte `json:"hash,omitempty"` - UPP []byte `json:"upp,omitempty"` - Response HTTPResponse `json:"response,omitempty"` - RequestID string `json:"requestID,omitempty"` -} - -type HTTPResponse struct { - StatusCode int `json:"statusCode"` - Header http.Header `json:"header"` - Content []byte `json:"content"` + Hash []byte + UPP []byte } type Sender struct { @@ -39,8 +31,16 @@ type Sender struct { } func NewSender() *Sender { + transport := http.DefaultTransport.(*http.Transport).Clone() + transport.MaxIdleConns = httpConnectionPoolSize + transport.MaxConnsPerHost = httpConnectionPoolSize + transport.MaxIdleConnsPerHost = httpConnectionPoolSize + return &Sender{ - httpClient: &http.Client{Timeout: 30 * time.Second}, + httpClient: &http.Client{ + Timeout: httpClientTimeoutSec * time.Second, + Transport: transport, + }, chainChecker: NewChainChecker(), statusCounter: map[string]int{}, statusCounterMtx: &sync.Mutex{}, @@ -48,29 +48,24 @@ func NewSender() *Sender { } } -func (s *Sender) register(id, auth, registerAuth string) error { - url := clientBaseURL + "/register" - - header := http.Header{} - header.Set("Content-Type", "application/json") - header.Set("X-Auth-Token", registerAuth) +func (s *Sender) register(url urlpkg.URL, id, auth, registerAuth string) error { + url.Path = path.Join(url.Path, "register") - registrationData := map[string]string{ + body, err := json.Marshal(map[string]string{ "uuid": id, "password": auth, - } - - body, err := json.Marshal(registrationData) + }) if err != nil { return err } - req, err := http.NewRequest(http.MethodPut, url, bytes.NewBuffer(body)) + req, err := http.NewRequest(http.MethodPut, url.String(), bytes.NewBuffer(body)) if err != nil { return err } - req.Header = header + req.Header.Set("Content-Type", "application/json") + req.Header.Set("X-Auth-Token", registerAuth) resp, err := s.httpClient.Do(req) if err != nil { @@ -92,10 +87,11 @@ func (s *Sender) register(id, auth, registerAuth string) error { return nil } -func (s *Sender) sendRequests(id, auth string, offset time.Duration, wg *sync.WaitGroup) { +func (s *Sender) sendRequests(url urlpkg.URL, uid, auth string, offset time.Duration, wg *sync.WaitGroup) { defer wg.Done() - clientURL := clientBaseURL + fmt.Sprintf("/%s/hash", id) + url.Path = path.Join(url.Path, uid, "hash") + header := http.Header{} header.Set("Content-Type", "application/octet-stream") header.Set("X-Auth-Token", auth) @@ -104,7 +100,7 @@ func (s *Sender) sendRequests(id, auth string, offset time.Duration, wg *sync.Wa for i := 0; i < numberOfRequestsPerID; i++ { wg.Add(1) - go s.sendAndCheckResponse(clientURL, header, wg) + go s.sendAndCheckResponse(url.String(), header, wg) time.Sleep(time.Second / requestsPerSecondPerID) } @@ -149,7 +145,7 @@ func (s *Sender) sendRequest(clientURL string, header http.Header, hash []byte) return SigningResponse{}, err } - duration := time.Now().Sub(start) + duration := time.Since(start) //noinspection GoUnhandledErrorResult defer resp.Body.Close() @@ -159,7 +155,7 @@ func (s *Sender) sendRequest(clientURL string, header http.Header, hash []byte) if resp.StatusCode == http.StatusOK { s.addTime(duration) } else { - respBody, _ := ioutil.ReadAll(resp.Body) + respBody, _ := io.ReadAll(resp.Body) return SigningResponse{}, fmt.Errorf("%d: %s", resp.StatusCode, respBody) } diff --git a/main/adapters/clients/ubirch_authentication_client.go b/main/adapters/clients/ubirch_authentication_client.go index 149cbcc0..6fcdd51b 100644 --- a/main/adapters/clients/ubirch_authentication_client.go +++ b/main/adapters/clients/ubirch_authentication_client.go @@ -16,18 +16,26 @@ package clients import ( "encoding/base64" + "net/http" + "time" "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" h "github.com/ubirch/ubirch-client-go/main/adapters/http_server" + prom "github.com/ubirch/ubirch-client-go/main/prometheus" ) type AuthenticationServiceClient struct { - AuthServiceURL string + AuthServiceURL string + AuthServiceTimeout time.Duration } func (c *AuthenticationServiceClient) SendToAuthService(uid uuid.UUID, auth string, upp []byte) (h.HTTPResponse, error) { - return Post(c.AuthServiceURL, upp, ubirchHeader(uid, auth)) + timer := prometheus.NewTimer(prom.UpstreamResponseDuration) + defer timer.ObserveDuration() + + return sendRequest(http.MethodPost, c.AuthServiceURL, upp, ubirchHeader(uid, auth), c.AuthServiceTimeout) } func ubirchHeader(uid uuid.UUID, auth string) map[string]string { diff --git a/main/adapters/clients/ubirch_identity_client.go b/main/adapters/clients/ubirch_identity_client.go index aea9d662..c9d2c827 100644 --- a/main/adapters/clients/ubirch_identity_client.go +++ b/main/adapters/clients/ubirch_identity_client.go @@ -18,9 +18,10 @@ import ( "encoding/base64" "encoding/json" "fmt" - "io/ioutil" + "io" "net/http" "path" + "time" "github.com/google/uuid" "github.com/ubirch/ubirch-protocol-go/ubirch/v2" @@ -31,8 +32,9 @@ import ( ) type IdentityServiceClient struct { - KeyServiceURL string - IdentityServiceURL string + KeyServiceURL string + IdentityServiceURL string + IdentityServiceTimeout time.Duration } // RequestPublicKeys requests a devices public keys at the identity service @@ -55,11 +57,11 @@ func (c *IdentityServiceClient) RequestPublicKeys(id uuid.UUID) ([]ubirch.Signed } if h.HttpFailed(resp.StatusCode) { - respContent, _ := ioutil.ReadAll(resp.Body) + respContent, _ := io.ReadAll(resp.Body) return nil, fmt.Errorf("retrieving public key info from %s failed: (%s) %s", url, resp.Status, string(respContent)) } - respBodyBytes, err := ioutil.ReadAll(resp.Body) + respBodyBytes, err := io.ReadAll(resp.Body) if err != nil { return nil, fmt.Errorf("unable to read response body: %v", err) } @@ -90,16 +92,16 @@ func (c *IdentityServiceClient) IsKeyRegistered(id uuid.UUID, pubKey []byte) (bo } func (c *IdentityServiceClient) SubmitKeyRegistration(uid uuid.UUID, cert []byte) error { - log.Debugf("%s: registering public key at key service", uid) + log.Debugf("%s: registering public key at key service: %s, cert: %s", uid, c.KeyServiceURL, cert) keyRegHeader := map[string]string{"content-type": "application/json"} - resp, err := Post(c.KeyServiceURL, cert, keyRegHeader) + resp, err := sendRequest(http.MethodPost, c.KeyServiceURL, cert, keyRegHeader, c.IdentityServiceTimeout) if err != nil { return fmt.Errorf("error sending key registration: %v", err) } if h.HttpFailed(resp.StatusCode) { - return fmt.Errorf("key registration failed: (%d) %q", resp.StatusCode, resp.Content) + return fmt.Errorf("key registration unsuccessful: request to %s failed: (%d) %q", c.KeyServiceURL, resp.StatusCode, resp.Content) } log.Debugf("%s: key registration successful: (%d) %s", uid, resp.StatusCode, string(resp.Content)) return nil @@ -110,29 +112,29 @@ func (c *IdentityServiceClient) RequestKeyDeletion(uid uuid.UUID, cert []byte) e keyDelHeader := map[string]string{"content-type": "application/json"} - resp, err := Delete(c.KeyServiceURL, cert, keyDelHeader) + resp, err := sendRequest(http.MethodDelete, c.KeyServiceURL, cert, keyDelHeader, c.IdentityServiceTimeout) if err != nil { return fmt.Errorf("error sending key deletion request: %v", err) } if h.HttpFailed(resp.StatusCode) { - return fmt.Errorf("key deletion failed: (%d) %q", resp.StatusCode, resp.Content) + return fmt.Errorf("key deletion unsuccessful: request to %s failed: (%d) %q", c.KeyServiceURL, resp.StatusCode, resp.Content) } log.Debugf("%s: key deletion successful: (%d) %s", uid, resp.StatusCode, string(resp.Content)) return nil } -// SubmitCSR submits a X.509 Certificate Signing Request for the public key to the identity service +// SubmitCSR submits an X.509 Certificate Signing Request for the public key to the identity service func (c *IdentityServiceClient) SubmitCSR(uid uuid.UUID, csr []byte) error { log.Debugf("%s: submitting CSR to identity service", uid) CSRHeader := map[string]string{"content-type": "application/octet-stream"} - resp, err := Post(c.IdentityServiceURL, csr, CSRHeader) + resp, err := sendRequest(http.MethodPost, c.IdentityServiceURL, csr, CSRHeader, c.IdentityServiceTimeout) if err != nil { return fmt.Errorf("error sending CSR: %v", err) } if h.HttpFailed(resp.StatusCode) { - return fmt.Errorf("request to %s failed: (%d) %q", c.IdentityServiceURL, resp.StatusCode, resp.Content) + return fmt.Errorf("CSR submission unsuccessful: request to %s failed: (%d) %q", c.IdentityServiceURL, resp.StatusCode, resp.Content) } log.Debugf("%s: CSR submitted: (%d) %s", uid, resp.StatusCode, string(resp.Content)) return nil diff --git a/main/adapters/clients/ubirch_service_client.go b/main/adapters/clients/ubirch_service_client.go index 8026520f..545508c1 100644 --- a/main/adapters/clients/ubirch_service_client.go +++ b/main/adapters/clients/ubirch_service_client.go @@ -2,9 +2,11 @@ package clients import ( "bytes" + "context" "fmt" - "io/ioutil" + "io" "net/http" + "time" h "github.com/ubirch/ubirch-client-go/main/adapters/http_server" ) @@ -15,20 +17,11 @@ type UbirchServiceClient struct { VerificationServiceClient } -// Post submits a message to a backend service -// returns the response or encountered errors -func Post(serviceURL string, data []byte, header map[string]string) (h.HTTPResponse, error) { - return sendRequest(http.MethodPost, serviceURL, data, header) -} - -func Delete(serviceURL string, data []byte, header map[string]string) (h.HTTPResponse, error) { - return sendRequest(http.MethodDelete, serviceURL, data, header) -} - -func sendRequest(method string, serviceURL string, data []byte, header map[string]string) (h.HTTPResponse, error) { - client := &http.Client{Timeout: h.BackendRequestTimeout} +func sendRequest(method string, serviceURL string, data []byte, header map[string]string, timeout time.Duration) (h.HTTPResponse, error) { + ctx, cancel := context.WithTimeout(context.Background(), timeout) + defer cancel() - req, err := http.NewRequest(method, serviceURL, bytes.NewBuffer(data)) + req, err := http.NewRequestWithContext(ctx, method, serviceURL, bytes.NewBuffer(data)) if err != nil { return h.HTTPResponse{}, fmt.Errorf("can't make new post request: %v", err) } @@ -37,7 +30,7 @@ func sendRequest(method string, serviceURL string, data []byte, header map[strin req.Header.Set(k, v) } - resp, err := client.Do(req) + resp, err := http.DefaultClient.Do(req) if err != nil { return h.HTTPResponse{}, err } @@ -45,7 +38,7 @@ func sendRequest(method string, serviceURL string, data []byte, header map[strin //noinspection GoUnhandledErrorResult defer resp.Body.Close() - respBodyBytes, err := ioutil.ReadAll(resp.Body) + respBodyBytes, err := io.ReadAll(resp.Body) if err != nil { return h.HTTPResponse{}, err } diff --git a/main/adapters/clients/ubirch_verification_client.go b/main/adapters/clients/ubirch_verification_client.go index c9ec82d9..65a95adf 100644 --- a/main/adapters/clients/ubirch_verification_client.go +++ b/main/adapters/clients/ubirch_verification_client.go @@ -15,13 +15,18 @@ package clients import ( + "net/http" + "time" + h "github.com/ubirch/ubirch-client-go/main/adapters/http_server" ) type VerificationServiceClient struct { - VerifyServiceURL string + VerifyServiceURL string + VerifyServiceTimeout time.Duration } func (c *VerificationServiceClient) RequestHash(hashBase64 string) (h.HTTPResponse, error) { - return Post(c.VerifyServiceURL, []byte(hashBase64), map[string]string{"content-type": "text/plain"}) + return sendRequest(http.MethodPost, c.VerifyServiceURL, []byte(hashBase64), + map[string]string{"content-type": "text/plain"}, c.VerifyServiceTimeout) } diff --git a/main/adapters/handlers/common_test.go b/main/adapters/handlers/common_test.go new file mode 100644 index 00000000..bf31714f --- /dev/null +++ b/main/adapters/handlers/common_test.go @@ -0,0 +1,35 @@ +package handlers + +import ( + "encoding/base64" + "net/http" + + "github.com/google/uuid" + "github.com/ubirch/ubirch-client-go/main/config" + "github.com/ubirch/ubirch-protocol-go/ubirch/v2" + + h "github.com/ubirch/ubirch-client-go/main/adapters/http_server" +) + +var ( + testUuid = uuid.UUID{0xaa, 0x9a, 0xbf, 0xf7, 0xc0, 0x0, 0x45, 0x7a, 0xaa, 0xb6, 0x18, 0xf6, 0x69, 0x0, 0xe6, 0x66} + testAuth = "123456" + testSecret, _ = base64.StdEncoding.DecodeString("ZQJt1OC9+4OZtgZLLT9mX25BbrZdxtOQBjK4GyRF2fQ=") + conf = &config.Config{SecretBytes32: testSecret} + testHash = h.Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e} + testSignature = []byte{0xb6, 0x2b, 0xc0, 0x1a, 0xc9, 0xe5, 0xb1, 0xd8, 0x97, 0x73, 0x6f, 0xf9, 0x87, 0x7b, 0x43, 0x75, 0x3c, 0xb7, 0xbd, 0x57, 0xb1, 0xb0, 0x47, 0x7e, 0x87, 0xdc, 0x47, 0x34, 0x20, 0x25, 0x94, 0xf5, 0x4a, 0xfb, 0x78, 0x28, 0x3e, 0xf8, 0x9, 0xbf, 0x9f, 0x72, 0xbc, 0x5d, 0x55, 0x6f, 0x66, 0x5b, 0xb1, 0xff, 0x11, 0x7e, 0x59, 0x22, 0x1d, 0xe3, 0xea, 0x3a, 0xb3, 0x57, 0x3e, 0x5f, 0xe9, 0xd0} + testPublicKey = []byte{0x05, 0x0b, 0xd7, 0xfb, 0x9d, 0x9f, 0x3d, 0x17, 0x7d, 0x9f, 0x1c, 0x18, 0x0d, 0x1e, 0xe4, 0x7a, 0xe6, 0x53, 0xd6, 0x46, 0x19, 0xb9, 0x98, 0x9f, 0xa2, 0x76, 0x03, 0xfa, 0x18, 0xe3, 0x74, 0xc7, 0x71, 0x4f, 0x96, 0xe2, 0x2c, 0x61, 0xc8, 0x17, 0x9b, 0x1a, 0x10, 0x29, 0x45, 0x1d, 0x5c, 0xc6, 0xfc, 0x3e, 0xa9, 0x0d, 0x9b, 0x30, 0x39, 0xf6, 0x2b, 0x36, 0xe1, 0x3d, 0xa2, 0xc3, 0x7f, 0x3e} + testPublicKeyPEM = []byte{0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x4d, 0x46, 0x6b, 0x77, 0x45, 0x77, 0x59, 0x48, 0x4b, 0x6f, 0x5a, 0x49, 0x7a, 0x6a, 0x30, 0x43, 0x41, 0x51, 0x59, 0x49, 0x4b, 0x6f, 0x5a, 0x49, 0x7a, 0x6a, 0x30, 0x44, 0x41, 0x51, 0x63, 0x44, 0x51, 0x67, 0x41, 0x45, 0x42, 0x51, 0x76, 0x58, 0x2b, 0x35, 0x32, 0x66, 0x50, 0x52, 0x64, 0x39, 0x6e, 0x78, 0x77, 0x59, 0x44, 0x52, 0x37, 0x6b, 0x65, 0x75, 0x5a, 0x54, 0x31, 0x6b, 0x59, 0x5a, 0x0a, 0x75, 0x5a, 0x69, 0x66, 0x6f, 0x6e, 0x59, 0x44, 0x2b, 0x68, 0x6a, 0x6a, 0x64, 0x4d, 0x64, 0x78, 0x54, 0x35, 0x62, 0x69, 0x4c, 0x47, 0x48, 0x49, 0x46, 0x35, 0x73, 0x61, 0x45, 0x43, 0x6c, 0x46, 0x48, 0x56, 0x7a, 0x47, 0x2f, 0x44, 0x36, 0x70, 0x44, 0x5a, 0x73, 0x77, 0x4f, 0x66, 0x59, 0x72, 0x4e, 0x75, 0x45, 0x39, 0x6f, 0x73, 0x4e, 0x2f, 0x50, 0x67, 0x3d, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a} + testSignedUPP = []byte{0x95, 0x22, 0xc4, 0x10, 0xaa, 0x9a, 0xbf, 0xf7, 0xc0, 0x00, 0x45, 0x7a, 0xaa, 0xb6, 0x18, 0xf6, 0x69, 0x00, 0xe6, 0x66, 0x00, 0xc4, 0x20, 0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5f, 0xc4, 0x40, 0xf7, 0x49, 0x01, 0xb5, 0x35, 0x5b, 0x77, 0x8a, 0x9c, 0xb1, 0x02, 0xa5, 0x53, 0xf1, 0xad, 0xe1, 0x26, 0xa9, 0x30, 0xc6, 0x0f, 0xd0, 0xd9, 0xc7, 0xb9, 0x24, 0x58, 0x0d, 0x45, 0xd3, 0x05, 0x50, 0xb1, 0xb0, 0xc0, 0xe0, 0x38, 0x6d, 0x07, 0x0f, 0x1a, 0x48, 0x7e, 0xb3, 0x56, 0x98, 0x95, 0x71, 0x13, 0xda, 0x67, 0x1a, 0xa8, 0xd3, 0x3c, 0xe6, 0xd4, 0xf1, 0x63, 0xa0, 0xee, 0xa5, 0x51, 0xe5} + testChainedUPP = []byte{0x96, 0x23, 0xc4, 0x10, 0x7e, 0x41, 0xc4, 0x21, 0xac, 0xad, 0x46, 0xe5, 0x95, 0xf3, 0x20, 0x70, 0xcf, 0x78, 0x29, 0x2b, 0xc4, 0x40, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc4, 0x20, 0x95, 0xd5, 0x80, 0x47, 0xc6, 0x72, 0xe, 0xb, 0xaa, 0x1e, 0xbf, 0xc5, 0xcc, 0xf4, 0xe7, 0xa4, 0x66, 0x68, 0xc2, 0x36, 0x32, 0x31, 0x4d, 0x6e, 0x2a, 0x82, 0xfb, 0x47, 0x7d, 0xa2, 0xc4, 0x32, 0xc4, 0x40, 0x1e, 0xa6, 0x34, 0x30, 0x38, 0x64, 0xa2, 0x28, 0xf4, 0x86, 0x5, 0x44, 0x23, 0xb9, 0xc5, 0x61, 0x70, 0x1b, 0x5c, 0x3c, 0x32, 0x96, 0xb2, 0x9a, 0xdc, 0x88, 0xd9, 0xd2, 0xde, 0x9, 0x43, 0xfd, 0xeb, 0xf2, 0xfc, 0x3c, 0xa3, 0x12, 0x94, 0xbd, 0x74, 0xc3, 0x2d, 0xac, 0xfe, 0x1e, 0x36, 0xa2, 0xb0, 0x3e, 0x9b, 0x1, 0xb8, 0x5e, 0xa3, 0x9a, 0x38, 0xfb, 0xf4, 0x2c, 0xd1, 0xa4, 0xf3, 0x3a} + testBckndRespUPP = []byte{0x96, 0x23, 0xc4, 0x10, 0x10, 0xb2, 0xe1, 0xa4, 0x56, 0xb3, 0x4f, 0xff, 0x9a, 0xda, 0xcc, 0x8c, 0x20, 0xf9, 0x30, 0x16, 0xc4, 0x40, 0x1e, 0xa6, 0x34, 0x30, 0x38, 0x64, 0xa2, 0x28, 0xf4, 0x86, 0x5, 0x44, 0x23, 0xb9, 0xc5, 0x61, 0x70, 0x1b, 0x5c, 0x3c, 0x32, 0x96, 0xb2, 0x9a, 0xdc, 0x88, 0xd9, 0xd2, 0xde, 0x9, 0x43, 0xfd, 0xeb, 0xf2, 0xfc, 0x3c, 0xa3, 0x12, 0x94, 0xbd, 0x74, 0xc3, 0x2d, 0xac, 0xfe, 0x1e, 0x36, 0xa2, 0xb0, 0x3e, 0x9b, 0x1, 0xb8, 0x5e, 0xa3, 0x9a, 0x38, 0xfb, 0xf4, 0x2c, 0xd1, 0xa4, 0xf3, 0x3a, 0x0, 0xc4, 0x20, 0x2e, 0x33, 0x60, 0x93, 0x4f, 0xd0, 0x4e, 0x61, 0x8f, 0x49, 0xcb, 0x19, 0x3c, 0xbb, 0x42, 0xf8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xc4, 0x40, 0x99, 0xc9, 0xd9, 0x2e, 0xd, 0xf1, 0x19, 0xdf, 0x11, 0x3, 0xe6, 0x2c, 0xe4, 0x25, 0x60, 0xd8, 0x2f, 0x3f, 0x5b, 0x3, 0x6a, 0x38, 0x9f, 0xc7, 0x1e, 0x23, 0xf3, 0x54, 0x59, 0x6c, 0x51, 0xb0, 0x3, 0x44, 0x27, 0xad, 0xc1, 0x6a, 0x9c, 0xf9, 0x12, 0x2b, 0x1d, 0x21, 0xfc, 0xe5, 0x2a, 0xf6, 0xaf, 0x63, 0x98, 0xff, 0xd8, 0xdc, 0x4b, 0xe3, 0x10, 0x31, 0x12, 0x4e, 0xc, 0x8e, 0x76, 0x52} + testRequestID = "2e336093-4fd0-4e61-8f49-cb193cbb42f8" + testBckndResp = h.HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{"test": []string{"header"}}, + Content: testBckndRespUPP, + } + testVerificationResp = []byte("{\"upp\":\"lSLEEKqav/fAAEV6qrYY9mkA5mYAxCCAyYPC+mF1Gy94QqOjOTT8vtHEOqJco7Y5XBL1U+LwXsRAGhsRO1aNd17o3ur81fpT1CrU/CwJZavgV0AvkQiJjhgQF5fiamxBvRcuQm/PoOVBYIbWDftRafG99yP76VlHCQ==\",\"prev\":null,\"anchors\":null}") + testVerificationUPP = []byte{0x95, 0x22, 0xc4, 0x10, 0xaa, 0x9a, 0xbf, 0xf7, 0xc0, 0x00, 0x45, 0x7a, 0xaa, 0xb6, 0x18, 0xf6, 0x69, 0x00, 0xe6, 0x66, 0x00, 0xc4, 0x20, 0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e, 0xc4, 0x40, 0x1a, 0x1b, 0x11, 0x3b, 0x56, 0x8d, 0x77, 0x5e, 0xe8, 0xde, 0xea, 0xfc, 0xd5, 0xfa, 0x53, 0xd4, 0x2a, 0xd4, 0xfc, 0x2c, 0x09, 0x65, 0xab, 0xe0, 0x57, 0x40, 0x2f, 0x91, 0x08, 0x89, 0x8e, 0x18, 0x10, 0x17, 0x97, 0xe2, 0x6a, 0x6c, 0x41, 0xbd, 0x17, 0x2e, 0x42, 0x6f, 0xcf, 0xa0, 0xe5, 0x41, 0x60, 0x86, 0xd6, 0x0d, 0xfb, 0x51, 0x69, 0xf1, 0xbd, 0xf7, 0x23, 0xfb, 0xe9, 0x59, 0x47, 0x09} + testKeyRegs = []ubirch.SignedKeyRegistration{{PubKeyInfo: ubirch.KeyRegistration{PubKey: base64.StdEncoding.EncodeToString(testPublicKey)}}} +) diff --git a/main/adapters/handlers/identity_handler.go b/main/adapters/handlers/identity_handler.go index 3b32a56d..3e156439 100644 --- a/main/adapters/handlers/identity_handler.go +++ b/main/adapters/handlers/identity_handler.go @@ -50,6 +50,7 @@ func (i *IdentityHandler) InitIdentities(identities map[string]string) error { _, err = i.InitIdentity(uid, auth) if err == h.ErrAlreadyInitialized { + log.Infof("%s: identity already initialized", uid) continue } if err != nil { @@ -146,6 +147,8 @@ func (i *IdentityHandler) createCSR(uid uuid.UUID) (csrPEM []byte, err error) { csrPEM = pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE REQUEST", Bytes: csr}) + log.Infof("%s: CSR [PEM]: %s", uid, csrPEM) + return csrPEM, nil } diff --git a/main/adapters/handlers/identity_handler_test.go b/main/adapters/handlers/identity_handler_test.go index 894be420..1e995fd1 100644 --- a/main/adapters/handlers/identity_handler_test.go +++ b/main/adapters/handlers/identity_handler_test.go @@ -3,7 +3,6 @@ package handlers import ( "context" "crypto/x509" - "encoding/base64" "encoding/pem" "errors" "math/rand" @@ -12,30 +11,22 @@ import ( "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/ubirch/ubirch-client-go/main/config" "github.com/ubirch/ubirch-protocol-go/ubirch/v2" h "github.com/ubirch/ubirch-client-go/main/adapters/http_server" r "github.com/ubirch/ubirch-client-go/main/adapters/repository" ) -var ( - testUuid = uuid.New() - testAuth = "123456" - testSecret, _ = base64.StdEncoding.DecodeString("ZQJt1OC9+4OZtgZLLT9mX25BbrZdxtOQBjK4GyRF2fQ=") - conf = &config.Config{SecretBytes32: testSecret} -) - func TestIdentityHandler_InitIdentity(t *testing.T) { - m := newMock() - p, err := r.NewExtendedProtocol(&r.MockCtxMngr{}, conf) require.NoError(t, err) + csrChan := make(chan []byte) + idHandler := &IdentityHandler{ Protocol: p, SubmitKeyRegistration: MockSubmitKeyRegistration, - SubmitCSR: m.MockSubmitCSR, + SubmitCSR: asynchMockSubmitCSR(csrChan), SubjectCountry: "AA", SubjectOrganization: "test GmbH", } @@ -51,7 +42,7 @@ func TestIdentityHandler_InitIdentity(t *testing.T) { t.Errorf("rest: %q", rest) } - submittedCSR := <-m.result + submittedCSR := <-csrChan assert.Equal(t, block.Bytes, submittedCSR) csr, err := x509.ParseCertificateRequest(block.Bytes) @@ -151,22 +142,22 @@ func TestIdentityHandler_InitIdentity_BadSubmitCSR(t *testing.T) { } func TestIdentityHandler_CreateCSR(t *testing.T) { - m := newMock() - p, err := r.NewExtendedProtocol(&r.MockCtxMngr{}, conf) require.NoError(t, err) + csrChan := make(chan []byte) + idHandler := &IdentityHandler{ Protocol: p, SubmitKeyRegistration: MockSubmitKeyRegistration, - SubmitCSR: m.MockSubmitCSR, + SubmitCSR: asynchMockSubmitCSR(csrChan), SubjectCountry: "AA", SubjectOrganization: "test GmbH", } _, err = idHandler.InitIdentity(testUuid, testAuth) require.NoError(t, err) - <-m.result + <-csrChan csrPEM, err := idHandler.CreateCSR(testUuid) require.NoError(t, err) @@ -179,7 +170,7 @@ func TestIdentityHandler_CreateCSR(t *testing.T) { t.Errorf("rest: %q", rest) } - submittedCSR := <-m.result + submittedCSR := <-csrChan assert.Equal(t, block.Bytes, submittedCSR) csr, err := x509.ParseCertificateRequest(block.Bytes) @@ -440,21 +431,13 @@ func MockSubmitKeyDeletionBad(uuid.UUID, []byte) error { return MockSubmitKeyDeletionErr } -type mock struct { - result chan []byte -} - -func newMock() *mock { - return &mock{ - result: make(chan []byte), +func asynchMockSubmitCSR(csrChan chan []byte) func(uid uuid.UUID, csr []byte) error { + return func(uid uuid.UUID, csr []byte) error { + csrChan <- csr + return nil } } -func (m *mock) MockSubmitCSR(uid uuid.UUID, csr []byte) error { - m.result <- csr - return nil -} - func MockSubmitCSR(uuid.UUID, []byte) error { return nil } diff --git a/main/adapters/handlers/signer.go b/main/adapters/handlers/signer.go index eafb73cb..0115d099 100644 --- a/main/adapters/handlers/signer.go +++ b/main/adapters/handlers/signer.go @@ -44,24 +44,33 @@ var hintLookup = map[h.Operation]ubirch.Hint{ } type signingResponse struct { - Error string `json:"error,omitempty"` - Hash []byte `json:"hash"` - UPP []byte `json:"upp"` - PublicKey []byte `json:"publicKey"` - Response h.HTTPResponse `json:"response"` - RequestID string `json:"requestID"` + Hash []byte `json:"hash"` + UPP []byte `json:"upp"` + PublicKey []byte `json:"publicKey"` + Response *h.HTTPResponse `json:"response,omitempty"` + RequestID string `json:"requestID,omitempty"` +} + +type SignerProtocol interface { + LoadActiveFlag(uuid.UUID) (bool, error) + StartTransaction(context.Context) (repository.TransactionCtx, error) + LoadSignatureForUpdate(repository.TransactionCtx, uuid.UUID) ([]byte, error) + StoreSignature(repository.TransactionCtx, uuid.UUID, []byte) error + GetPublicKeyBytes(uuid.UUID) ([]byte, error) + SignatureLength() int + Sign(ubirch.UPP) ([]byte, error) } type Signer struct { - Protocol *repository.ExtendedProtocol + SignerProtocol SendToAuthService func(uid uuid.UUID, auth string, upp []byte) (h.HTTPResponse, error) } -// handle incoming messages, create, sign and send a chained ubirch protocol packet (UPP) to the ubirch backend -func (s *Signer) Chain(msg h.HTTPRequest, ctx context.Context) h.HTTPResponse { - log.Infof("%s: anchor hash [chained]: %s", msg.ID, base64.StdEncoding.EncodeToString(msg.Hash[:])) +func (s *Signer) Sign(msg h.HTTPRequest) h.HTTPResponse { + log.Infof("create UPP: uuid: %s, hash: %s, operation: %s, offline: %v", + msg.ID, base64.StdEncoding.EncodeToString(msg.Hash[:]), msg.Operation, msg.Offline) - active, err := s.Protocol.LoadActiveFlag(msg.ID) + active, err := s.SignerProtocol.LoadActiveFlag(msg.ID) if err != nil { log.Errorf("%s: could not load active flag: %v", msg.ID, err) return errorResponse(http.StatusInternalServerError, "") @@ -72,34 +81,54 @@ func (s *Signer) Chain(msg h.HTTPRequest, ctx context.Context) h.HTTPResponse { return errorResponse(http.StatusBadRequest, "key deactivated") } - tx, err := s.Protocol.StartTransaction(ctx) - if err != nil { - log.Errorf("%s: initializing transaction failed: %v", msg.ID, err) - return errorResponse(http.StatusServiceUnavailable, "") - } + var tx repository.TransactionCtx + var prevSignature, uppBytes []byte - prevSignature, err := s.Protocol.LoadSignatureForUpdate(tx, msg.ID) - if err != nil { - log.Errorf("%s: could not load signature: %v", msg.ID, err) - return errorResponse(http.StatusInternalServerError, "") + if msg.Operation == h.ChainHash { + tx, err = s.SignerProtocol.StartTransaction(msg.Ctx) + if err != nil { + log.Errorf("%s: initializing transaction failed: %v", msg.ID, err) + return errorResponse(http.StatusServiceUnavailable, "") + } + + prevSignature, err = s.SignerProtocol.LoadSignatureForUpdate(tx, msg.ID) + if err != nil { + log.Errorf("%s: could not load signature: %v", msg.ID, err) + return errorResponse(http.StatusInternalServerError, "") + } + + uppBytes, err = s.getChainedUPP(msg.ID, msg.Hash, prevSignature) + if err != nil { + log.Errorf("%s: could not create chained UPP: %v", msg.ID, err) + return errorResponse(http.StatusInternalServerError, "") + } + log.Debugf("%s: chained UPP: %x", msg.ID, uppBytes) + } else { + uppBytes, err = s.getSignedUPP(msg.ID, msg.Hash, msg.Operation) + if err != nil { + log.Errorf("%s: could not create signed UPP: %v", msg.ID, err) + return errorResponse(http.StatusInternalServerError, "") + } + log.Debugf("%s: signed UPP: %x", msg.ID, uppBytes) } - timer := prometheus.NewTimer(prom.SignatureCreationDuration) - uppBytes, err := s.getChainedUPP(msg.ID, msg.Hash, prevSignature) - timer.ObserveDuration() + pub, err := s.SignerProtocol.GetPublicKeyBytes(msg.ID) if err != nil { - log.Errorf("%s: could not create chained UPP: %v", msg.ID, err) - return errorResponse(http.StatusInternalServerError, "") + log.Warnf("%s: could not get public key: %v", msg.ID, err) } - log.Debugf("%s: chained UPP: %x", msg.ID, uppBytes) - resp := s.sendUPP(msg, uppBytes) + var resp h.HTTPResponse + if msg.Offline { + resp = getSigningResponse(http.StatusOK, msg.Hash[:], uppBytes, pub, nil, "") + } else { + resp = s.sendUPP(msg, uppBytes, pub) + } // persist last signature only if UPP was successfully received by ubirch backend - if h.HttpSuccess(resp.StatusCode) { - signature := uppBytes[len(uppBytes)-s.Protocol.SignatureLength():] + if msg.Operation == h.ChainHash && h.HttpSuccess(resp.StatusCode) { + signature := uppBytes[len(uppBytes)-s.SignerProtocol.SignatureLength():] - err = s.Protocol.StoreSignature(tx, msg.ID, signature) + err = s.SignerProtocol.StoreSignature(tx, msg.ID, signature) if err != nil { // this usually happens, if the request context was cancelled because the client already left (timeout or cancel) log.Errorf("%s: storing signature failed: %v", msg.ID, err) @@ -114,32 +143,11 @@ func (s *Signer) Chain(msg h.HTTPRequest, ctx context.Context) h.HTTPResponse { return resp } -func (s *Signer) Sign(msg h.HTTPRequest, op h.Operation) h.HTTPResponse { - log.Infof("%s: %s hash: %s", msg.ID, op, base64.StdEncoding.EncodeToString(msg.Hash[:])) - - active, err := s.Protocol.LoadActiveFlag(msg.ID) - if err != nil { - log.Errorf("%s: could not load active flag: %v", msg.ID, err) - return errorResponse(http.StatusInternalServerError, "") - } - - if !active { - log.Warnf("%s: key deactivated", msg.ID) - return errorResponse(http.StatusBadRequest, "key deactivated") - } - - uppBytes, err := s.getSignedUPP(msg.ID, msg.Hash, op) - if err != nil { - log.Errorf("%s: could not create signed UPP: %v", msg.ID, err) - return errorResponse(http.StatusInternalServerError, "") - } - log.Debugf("%s: signed UPP: %x", msg.ID, uppBytes) - - return s.sendUPP(msg, uppBytes) -} - func (s *Signer) getChainedUPP(id uuid.UUID, hash [32]byte, prevSignature []byte) ([]byte, error) { - return s.Protocol.Sign( + timer := prometheus.NewTimer(prom.SignatureCreationDuration) + defer timer.ObserveDuration() + + return s.SignerProtocol.Sign( &ubirch.ChainedUPP{ Version: ubirch.Chained, Uuid: id, @@ -155,7 +163,10 @@ func (s *Signer) getSignedUPP(id uuid.UUID, hash [32]byte, op h.Operation) ([]by return nil, fmt.Errorf("%s: invalid operation: \"%s\"", id, op) } - return s.Protocol.Sign( + timer := prometheus.NewTimer(prom.SignatureCreationDuration) + defer timer.ObserveDuration() + + return s.SignerProtocol.Sign( &ubirch.SignedUPP{ Version: ubirch.Signed, Uuid: id, @@ -164,18 +175,16 @@ func (s *Signer) getSignedUPP(id uuid.UUID, hash [32]byte, op h.Operation) ([]by }) } -func (s *Signer) sendUPP(msg h.HTTPRequest, upp []byte) h.HTTPResponse { +func (s *Signer) sendUPP(msg h.HTTPRequest, upp []byte, pub []byte) h.HTTPResponse { // send UPP to ubirch backend - timer := prometheus.NewTimer(prom.UpstreamResponseDuration) backendResp, err := s.SendToAuthService(msg.ID, msg.Auth, upp) - timer.ObserveDuration() if err != nil { if os.IsTimeout(err) { - log.Errorf("%s: request to UBIRCH Authentication Service timed out after %s: %v", msg.ID, h.BackendRequestTimeout.String(), err) + log.Errorf("%s: request to UBIRCH Authentication Service timed out: %v", msg.ID, err) return errorResponse(http.StatusGatewayTimeout, "") } else { log.Errorf("%s: sending request to UBIRCH Authentication Service failed: %v", msg.ID, err) - return errorResponse(http.StatusInternalServerError, "") + return errorResponse(http.StatusBadGateway, "") } } log.Debugf("%s: backend response: (%d) %x", msg.ID, backendResp.StatusCode, backendResp.Content) @@ -195,12 +204,13 @@ func (s *Signer) sendUPP(msg h.HTTPRequest, upp []byte) h.HTTPResponse { } } - pub, err := s.Protocol.GetPublicKeyBytes(msg.ID) - if err != nil { - log.Warnf("%s: could not get public key: %v", msg.ID, err) + resp := getSigningResponse(backendResp.StatusCode, msg.Hash[:], upp, pub, &backendResp, requestID) + + if h.HttpFailed(backendResp.StatusCode) { + log.Errorf("%s: request to ubirch authentication service (niomon) failed: (%d) %s", msg.ID, backendResp.StatusCode, string(resp.Content)) } - return getSigningResponse(msg, upp, pub, backendResp, requestID) + return resp } func getRequestID(respUPP ubirch.UPP) (string, error) { @@ -222,13 +232,13 @@ func errorResponse(code int, message string) h.HTTPResponse { return h.HTTPResponse{ StatusCode: code, Header: http.Header{"Content-Type": {"text/plain; charset=utf-8"}}, - Content: []byte(message), + Content: []byte(message + "\n"), } } -func getSigningResponse(msg h.HTTPRequest, upp []byte, pub []byte, backendResp h.HTTPResponse, requestID string) h.HTTPResponse { +func getSigningResponse(statusCode int, hash, upp, pub []byte, backendResp *h.HTTPResponse, requestID string) h.HTTPResponse { signingResp, err := json.Marshal(signingResponse{ - Hash: msg.Hash[:], + Hash: hash, UPP: upp, PublicKey: pub, Response: backendResp, @@ -238,12 +248,8 @@ func getSigningResponse(msg h.HTTPRequest, upp []byte, pub []byte, backendResp h log.Warnf("error serializing signing response: %v", err) } - if h.HttpFailed(backendResp.StatusCode) { - log.Errorf("%s: request to ubirch authentication service (niomon) failed: (%d) %s", msg.ID, backendResp.StatusCode, string(signingResp)) - } - return h.HTTPResponse{ - StatusCode: backendResp.StatusCode, + StatusCode: statusCode, Header: http.Header{"Content-Type": {"application/json"}}, Content: signingResp, } diff --git a/main/adapters/handlers/signer_test.go b/main/adapters/handlers/signer_test.go new file mode 100644 index 00000000..a1e99ba3 --- /dev/null +++ b/main/adapters/handlers/signer_test.go @@ -0,0 +1,281 @@ +package handlers + +import ( + "context" + "net/http" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/ubirch/ubirch-protocol-go/ubirch/v2" + + h "github.com/ubirch/ubirch-client-go/main/adapters/http_server" + r "github.com/ubirch/ubirch-client-go/main/adapters/repository" +) + +type mockProto struct { + mock *mock.Mock + done chan bool +} + +func (m *mockProto) LoadActiveFlag(uid uuid.UUID) (bool, error) { + args := m.mock.MethodCalled("LoadActiveFlag", uid) + return args.Bool(0), args.Error(1) +} + +func (m *mockProto) StartTransaction(ctx context.Context) (r.TransactionCtx, error) { + args := m.mock.MethodCalled("StartTransaction", ctx) + return args.Get(0).(*mockTx), args.Error(1) +} + +func (m *mockProto) LoadSignatureForUpdate(tx r.TransactionCtx, uid uuid.UUID) ([]byte, error) { + args := m.mock.MethodCalled("LoadSignatureForUpdate", tx, uid) + return args.Get(0).([]byte), args.Error(1) +} + +func (m *mockProto) StoreSignature(tx r.TransactionCtx, uid uuid.UUID, sig []byte) error { + args := m.mock.MethodCalled("StoreSignature", tx, uid, sig) + return args.Error(0) +} + +func (m *mockProto) GetPublicKeyBytes(uid uuid.UUID) ([]byte, error) { + args := m.mock.MethodCalled("GetPublicKeyBytes", uid) + return args.Get(0).([]byte), args.Error(1) +} + +func (m *mockProto) SignatureLength() int { + args := m.mock.MethodCalled("SignatureLength") + return args.Int(0) +} + +func (m *mockProto) Sign(upp ubirch.UPP) ([]byte, error) { + args := m.mock.MethodCalled("Sign", upp) + return args.Get(0).([]byte), args.Error(1) +} + +type mockTx struct{} + +func (m *mockTx) Commit() error { return nil } +func (m *mockTx) Rollback() error { return nil } + +func sendToAuthService(m *mock.Mock) func(uid uuid.UUID, auth string, upp []byte) (h.HTTPResponse, error) { + return func(uid uuid.UUID, auth string, upp []byte) (h.HTTPResponse, error) { + args := m.MethodCalled("sendToAuthService", uid, auth, upp) + return args.Get(0).(h.HTTPResponse), args.Error(1) + } +} + +func TestSigner_Sign(t *testing.T) { + + testCases := []struct { + name string + msg h.HTTPRequest + setMockBehavior func(m *mock.Mock) + tcChecks func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) + }{ + { + name: "chain online", + msg: h.HTTPRequest{ + Ctx: context.Background(), + ID: testUuid, + Auth: testAuth, + Hash: testHash, + Operation: h.ChainHash, + Offline: false, + }, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadActiveFlag", testUuid).Return(true, nil) + m.On("StartTransaction", mock.AnythingOfType("*context.emptyCtx")).Return(&mockTx{}, nil) + m.On("LoadSignatureForUpdate", &mockTx{}, testUuid).Return(testSignature, nil) + m.On("Sign", &ubirch.ChainedUPP{ + Version: ubirch.Chained, + Uuid: testUuid, + PrevSignature: testSignature, + Hint: ubirch.Binary, + Payload: testHash[:], + }).Return(testChainedUPP, nil) + m.On("GetPublicKeyBytes", testUuid).Return(testPublicKey, nil) + m.On("sendToAuthService", testUuid, testAuth, testChainedUPP).Return(testBckndResp, nil) + m.On("SignatureLength").Return(64) + m.On("StoreSignature", &mockTx{}, testUuid, testChainedUPP[len(testChainedUPP)-64:]).Return(nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getSigningResponse(http.StatusOK, testHash[:], testChainedUPP, testPublicKey, &testBckndResp, testRequestID), resp) + }, + }, + { + name: "chain offline", + msg: h.HTTPRequest{ + Ctx: context.Background(), + ID: testUuid, + Auth: testAuth, + Hash: testHash, + Operation: h.ChainHash, + Offline: true, + }, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadActiveFlag", testUuid).Return(true, nil) + m.On("StartTransaction", mock.AnythingOfType("*context.emptyCtx")).Return(&mockTx{}, nil) + m.On("LoadSignatureForUpdate", &mockTx{}, testUuid).Return(testSignature, nil) + m.On("Sign", &ubirch.ChainedUPP{ + Version: ubirch.Chained, + Uuid: testUuid, + PrevSignature: testSignature, + Hint: ubirch.Binary, + Payload: testHash[:], + }).Return(testChainedUPP, nil) + m.On("GetPublicKeyBytes", testUuid).Return(testPublicKey, nil) + m.On("SignatureLength").Return(64) + m.On("StoreSignature", &mockTx{}, testUuid, testChainedUPP[len(testChainedUPP)-64:]).Return(nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getSigningResponse(http.StatusOK, testHash[:], testChainedUPP, testPublicKey, nil, ""), resp) + }, + }, + { + name: "anchor online", + msg: h.HTTPRequest{ + Ctx: context.Background(), + ID: testUuid, + Auth: testAuth, + Hash: testHash, + Operation: h.AnchorHash, + Offline: false, + }, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadActiveFlag", testUuid).Return(true, nil) + m.On("Sign", &ubirch.SignedUPP{ + Version: ubirch.Signed, + Uuid: testUuid, + Hint: ubirch.Binary, + Payload: testHash[:], + }).Return(testSignedUPP, nil) + m.On("GetPublicKeyBytes", testUuid).Return(testPublicKey, nil) + m.On("sendToAuthService", testUuid, testAuth, testSignedUPP).Return(testBckndResp, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getSigningResponse(http.StatusOK, testHash[:], testSignedUPP, testPublicKey, &testBckndResp, testRequestID), resp) + }, + }, + { + name: "anchor offline", + msg: h.HTTPRequest{ + Ctx: context.Background(), + ID: testUuid, + Auth: testAuth, + Hash: testHash, + Operation: h.AnchorHash, + Offline: true, + }, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadActiveFlag", testUuid).Return(true, nil) + m.On("Sign", &ubirch.SignedUPP{ + Version: ubirch.Signed, + Uuid: testUuid, + Hint: ubirch.Binary, + Payload: testHash[:], + }).Return(testSignedUPP, nil) + m.On("GetPublicKeyBytes", testUuid).Return(testPublicKey, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getSigningResponse(http.StatusOK, testHash[:], testSignedUPP, testPublicKey, nil, ""), resp) + }, + }, + { + name: "disable", + msg: h.HTTPRequest{ + Ctx: context.Background(), + ID: testUuid, + Auth: testAuth, + Hash: testHash, + Operation: h.DisableHash, + }, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadActiveFlag", testUuid).Return(true, nil) + m.On("Sign", &ubirch.SignedUPP{ + Version: ubirch.Signed, + Uuid: testUuid, + Hint: ubirch.Disable, + Payload: testHash[:], + }).Return(testSignedUPP, nil) + m.On("GetPublicKeyBytes", testUuid).Return(testPublicKey, nil) + m.On("sendToAuthService", testUuid, testAuth, testSignedUPP).Return(testBckndResp, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getSigningResponse(http.StatusOK, testHash[:], testSignedUPP, testPublicKey, &testBckndResp, testRequestID), resp) + }, + }, + { + name: "enable", + msg: h.HTTPRequest{ + Ctx: context.Background(), + ID: testUuid, + Auth: testAuth, + Hash: testHash, + Operation: h.EnableHash, + }, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadActiveFlag", testUuid).Return(true, nil) + m.On("Sign", &ubirch.SignedUPP{ + Version: ubirch.Signed, + Uuid: testUuid, + Hint: ubirch.Enable, + Payload: testHash[:], + }).Return(testSignedUPP, nil) + m.On("GetPublicKeyBytes", testUuid).Return(testPublicKey, nil) + m.On("sendToAuthService", testUuid, testAuth, testSignedUPP).Return(testBckndResp, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getSigningResponse(http.StatusOK, testHash[:], testSignedUPP, testPublicKey, &testBckndResp, testRequestID), resp) + }, + }, + { + name: "delete", + msg: h.HTTPRequest{ + Ctx: context.Background(), + ID: testUuid, + Auth: testAuth, + Hash: testHash, + Operation: h.DeleteHash, + }, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadActiveFlag", testUuid).Return(true, nil) + m.On("Sign", &ubirch.SignedUPP{ + Version: ubirch.Signed, + Uuid: testUuid, + Hint: ubirch.Delete, + Payload: testHash[:], + }).Return(testSignedUPP, nil) + m.On("GetPublicKeyBytes", testUuid).Return(testPublicKey, nil) + m.On("sendToAuthService", testUuid, testAuth, testSignedUPP).Return(testBckndResp, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getSigningResponse(http.StatusOK, testHash[:], testSignedUPP, testPublicKey, &testBckndResp, testRequestID), resp) + }, + }, + } + for _, c := range testCases { + t.Run(c.name, func(t *testing.T) { + m := &mock.Mock{} + m.Test(t) + c.setMockBehavior(m) + + s := Signer{ + SignerProtocol: &mockProto{mock: m}, + SendToAuthService: sendToAuthService(m), + } + + resp := s.Sign(c.msg) + + c.tcChecks(t, resp, m) + }) + } +} diff --git a/main/adapters/handlers/verifier.go b/main/adapters/handlers/verifier.go index 137eac8f..39c3f373 100644 --- a/main/adapters/handlers/verifier.go +++ b/main/adapters/handlers/verifier.go @@ -18,20 +18,29 @@ package handlers import ( "bytes" + "context" "encoding/base64" "encoding/json" + "errors" "fmt" "net/http" + "os" "time" "github.com/google/uuid" "github.com/ubirch/ubirch-client-go/main/adapters/repository" + "github.com/ubirch/ubirch-client-go/main/ent" "github.com/ubirch/ubirch-protocol-go/ubirch/v2" log "github.com/sirupsen/logrus" h "github.com/ubirch/ubirch-client-go/main/adapters/http_server" ) +var ( + ErrUnknownIdentity = errors.New("UPP from unknown identity") + ErrInvalidUPP = errors.New("invalid UPP") +) + type verification struct { UPP []byte `json:"upp"` Prev []byte `json:"prev"` @@ -46,18 +55,27 @@ type verificationResponse struct { PubKey []byte `json:"pubKey,omitempty"` } +type VerifierProtocol interface { + LoadPublicKey(id uuid.UUID) (pubKeyPEM []byte, err error) + PublicKeyPEMToBytes(pubKeyPEM []byte) (pubKeyBytes []byte, err error) + SetPublicKeyBytes(id uuid.UUID, pubKeyBytes []byte) error + Verify(id uuid.UUID, upp []byte) (bool, error) + StoreExternalIdentity(context.Context, ent.ExternalIdentity) error +} + type Verifier struct { - Protocol *repository.ExtendedProtocol + VerifierProtocol RequestHash func(hashBase64 string) (h.HTTPResponse, error) RequestPublicKeys func(id uuid.UUID) ([]ubirch.SignedKeyRegistration, error) VerifyFromKnownIdentitiesOnly bool + VerificationTimeout time.Duration } -func (v *Verifier) Verify(hash []byte) h.HTTPResponse { +func (v *Verifier) Verify(ctx context.Context, hash []byte) h.HTTPResponse { log.Infof("verifying hash %s", base64.StdEncoding.EncodeToString(hash)) // retrieve certificate for hash from the ubirch backend - code, upp, err := v.loadUPP(hash) + code, upp, err := v.loadUPP(ctx, hash) if err != nil { log.Error(err) return errorResponse(code, err.Error()) @@ -65,33 +83,77 @@ func (v *Verifier) Verify(hash []byte) h.HTTPResponse { log.Debugf("retrieved UPP %x", upp) // verify validity of the retrieved UPP locally - id, pkey, err := v.verifyUPP(upp) + id, pubKey, verified, err := v.verifyUppSignature(upp, v.VerifyFromKnownIdentitiesOnly) if err != nil { - return getVerificationResponse(http.StatusUnprocessableEntity, hash, upp, id, pkey, err.Error()) + if err == ErrUnknownIdentity { + return getVerificationResponse(http.StatusForbidden, hash, upp, id, pubKey, err.Error()) + } + return getVerificationResponse(http.StatusInternalServerError, hash, upp, id, pubKey, err.Error()) + } + + if !verified { + return getVerificationResponse(http.StatusForbidden, hash, upp, id, pubKey, "invalid UPP signature") } - log.Debugf("verified UPP from identity %s using public key %s", id, base64.StdEncoding.EncodeToString(pkey)) + log.Infof("successfully verified UPP: uuid: %s, hash: %s, public key: %s, offline: %v", + id, base64.StdEncoding.EncodeToString(hash), base64.StdEncoding.EncodeToString(pubKey), false) - return getVerificationResponse(http.StatusOK, hash, upp, id, pkey, "") + return getVerificationResponse(http.StatusOK, hash, upp, id, pubKey, "") +} + +func (v *Verifier) VerifyOffline(upp, hash []byte) h.HTTPResponse { + log.Infof("performing offline verification for UPP %s and hash %s", base64.StdEncoding.EncodeToString(upp), base64.StdEncoding.EncodeToString(hash)) + + // verify validity of the UPP locally + id, pubKey, verified, err := v.verifyUppSignature(upp, true) + if err != nil { + if err == ErrUnknownIdentity { + return getVerificationResponse(http.StatusNotFound, hash, upp, id, pubKey, err.Error()) + } + if err == ErrInvalidUPP { + return getVerificationResponse(http.StatusBadRequest, hash, upp, id, pubKey, err.Error()) + } + return getVerificationResponse(http.StatusInternalServerError, hash, upp, id, pubKey, err.Error()) + } + + if !verified { + return getVerificationResponse(http.StatusForbidden, hash, upp, id, pubKey, "invalid UPP signature") + } + + // verify data hash matches UPP payload + err = v.verifyDataMatch(upp, hash) + if err != nil { + return getVerificationResponse(http.StatusBadRequest, hash, upp, id, pubKey, err.Error()) + } + log.Infof("successfully verified UPP: uuid: %s, hash: %s, public key: %s, offline: %v", + id, base64.StdEncoding.EncodeToString(hash), base64.StdEncoding.EncodeToString(pubKey), true) + + return getVerificationResponse(http.StatusOK, hash, upp, id, pubKey, "") } // loadUPP retrieves the UPP which contains a given hash from the ubirch backend -func (v *Verifier) loadUPP(hash []byte) (int, []byte, error) { +func (v *Verifier) loadUPP(ctx context.Context, hash []byte) (int, []byte, error) { var resp h.HTTPResponse var err error hashBase64 := base64.StdEncoding.EncodeToString(hash) n := 0 - for stay, timeout := true, time.After(time.Second); stay; { + for stay, timeout := true, time.After(v.VerificationTimeout); stay; { n++ select { + case <-ctx.Done(): + stay = false case <-timeout: stay = false default: resp, err = v.RequestHash(hashBase64) if err != nil { - return http.StatusInternalServerError, nil, fmt.Errorf("error sending verification request: %v", err) + if os.IsTimeout(err) { + return http.StatusGatewayTimeout, nil, fmt.Errorf("request to UBIRCH Verification Service timed out: %v", err) + } else { + return http.StatusBadGateway, nil, fmt.Errorf("sending request to UBIRCH Verification Service failed: %v", err) + } } - stay = h.HttpFailed(resp.StatusCode) + stay = resp.StatusCode == http.StatusNotFound if stay { log.Debugf("Couldn't verify hash yet (%d). Retry... %d", resp.StatusCode, n) time.Sleep(200 * time.Millisecond) @@ -108,71 +170,119 @@ func (v *Verifier) loadUPP(hash []byte) (int, []byte, error) { if err != nil { return http.StatusBadGateway, nil, fmt.Errorf("unable to decode verification response: %v", err) } - return resp.StatusCode, vf.UPP, nil + return http.StatusOK, vf.UPP, nil } -// verifyUPP verifies the signature of UPPs from known identities using their public keys from the local keystore -func (v *Verifier) verifyUPP(upp []byte) (uuid.UUID, []byte, error) { +// verifyUppSignature verifies the signature of UPPs from known identities using their public key from the local +// keystore. +// If the public key can not be found in the local keystore, i.e. the identity is unknown, the public key will be +// requested from the UBIRCH identity service only if verifyFromKnownIdentitiesOnly is `false`. +func (v *Verifier) verifyUppSignature(upp []byte, verifyFromKnownIdentitiesOnly bool) (id uuid.UUID, pubKeyBytes []byte, verified bool, err error) { uppStruct, err := ubirch.Decode(upp) if err != nil { - return uuid.Nil, nil, fmt.Errorf("retrieved invalid UPP: %v", err) + log.Errorf("unable to decode UPP: %v", err) + return uuid.Nil, nil, false, ErrInvalidUPP } - id := uppStruct.GetUuid() + id = uppStruct.GetUuid() - pubKeyPEM, err := v.Protocol.LoadPublicKey(id) - if err == repository.ErrNotExist { - if v.VerifyFromKnownIdentitiesOnly { - return id, nil, fmt.Errorf("retrieved certificate for requested hash is from unknown identity") - } else { - log.Warnf("couldn't get public key for identity %s from local context", id) - err := v.loadPublicKey(id) - if err != nil { - return id, nil, err - } - pubKeyPEM, err = v.Protocol.LoadPublicKey(id) + pubKeyPEM, err := v.VerifierProtocol.LoadPublicKey(id) + if err != nil { + if err == repository.ErrNotExist { + pubKeyPEM, err = v.loadExternalIdentityPublicKey(verifyFromKnownIdentitiesOnly, id) if err != nil { - return id, nil, err + return id, nil, false, err } + + } else { + return id, nil, false, err } - } else if err != nil { - return id, nil, err } - verified, err := v.Protocol.Verify(id, upp) - if !verified { + pubKeyBytes, err = v.VerifierProtocol.PublicKeyPEMToBytes(pubKeyPEM) + if err != nil { + log.Error(err) + } + + verified, err = v.VerifierProtocol.Verify(id, upp) + if err != nil { + return id, pubKeyBytes, false, fmt.Errorf("unable to verify UPP: %v", err) + } + + return id, pubKeyBytes, verified, nil +} + +func (v *Verifier) loadExternalIdentityPublicKey(verifyFromKnownIdentitiesOnly bool, id uuid.UUID) (pubKeyPEM []byte, err error) { + if verifyFromKnownIdentitiesOnly { + return nil, ErrUnknownIdentity + } + + log.Warnf("UPP from unknown identity %s", id) + err = v.loadPublicKey(id) + if err != nil { + return nil, err + } + + pubKeyPEM, err = v.VerifierProtocol.LoadPublicKey(id) + if err != nil { + return nil, err + } + + go func() { + // store public key persistently + err = v.VerifierProtocol.StoreExternalIdentity(context.TODO(), ent.ExternalIdentity{ + Uid: id, + PublicKey: pubKeyPEM, + }) if err != nil { - log.Error(err) + log.Errorf("external identity %s could not be stored: %v", id, err) } - return id, pubKeyPEM, fmt.Errorf("signature of retrieved certificate for requested hash could not be verified") - } + }() - return id, pubKeyPEM, nil // todo return bytes + return pubKeyPEM, nil } -// loadPublicKey retrieves the first valid public key associated with an identity from the key service +// loadPublicKey retrieves the first valid public key associated with an identity from the UBIRCH identity service +// and loads it into the public key cache func (v *Verifier) loadPublicKey(id uuid.UUID) error { - log.Debugf("requesting public key for identity %s from key service", id.String()) + log.Infof("requesting public key for identity %s from identity service", id) keys, err := v.RequestPublicKeys(id) if err != nil { return err } - if len(keys) < 1 { - return fmt.Errorf("no public key for identity %s registered at key service", id.String()) - } else if len(keys) > 1 { + if len(keys) == 0 { + return fmt.Errorf("no public key for identity %s registered at UBIRCH identity service", id.String()) + } + + if len(keys) > 1 { log.Warnf("several public keys registered for identity %s", id.String()) } - log.Printf("retrieved public key for identity %s: %s", keys[0].PubKeyInfo.HwDeviceId, keys[0].PubKeyInfo.PubKey) + log.Infof("retrieved public key for identity %s: %s", keys[0].PubKeyInfo.HwDeviceId, keys[0].PubKeyInfo.PubKey) pubKeyBytes, err := base64.StdEncoding.DecodeString(keys[0].PubKeyInfo.PubKey) if err != nil { return err } - return v.Protocol.SetPublicKeyBytes(id, pubKeyBytes) + return v.VerifierProtocol.SetPublicKeyBytes(id, pubKeyBytes) +} + +func (v *Verifier) verifyDataMatch(upp, hash []byte) error { + uppStruct, err := ubirch.Decode(upp) + if err != nil { + return fmt.Errorf("invalid UPP: %v", err) + } + + if !bytes.Equal(uppStruct.GetPayload(), hash) { + return fmt.Errorf("data does not match UPP payload, data hash: %s, UPP payload: %s", + base64.StdEncoding.EncodeToString(hash), + base64.StdEncoding.EncodeToString(uppStruct.GetPayload())) + } + + return nil } func getVerificationResponse(respCode int, hash []byte, upp []byte, id uuid.UUID, pkey []byte, errMsg string) h.HTTPResponse { diff --git a/main/adapters/handlers/verifier_test.go b/main/adapters/handlers/verifier_test.go new file mode 100644 index 00000000..2fb4c756 --- /dev/null +++ b/main/adapters/handlers/verifier_test.go @@ -0,0 +1,293 @@ +package handlers + +import ( + "context" + "encoding/base64" + "fmt" + "net/http" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/ubirch/ubirch-client-go/main/ent" + "github.com/ubirch/ubirch-protocol-go/ubirch/v2" + + h "github.com/ubirch/ubirch-client-go/main/adapters/http_server" + r "github.com/ubirch/ubirch-client-go/main/adapters/repository" +) + +func (m *mockProto) LoadPublicKey(id uuid.UUID) (pubKeyPEM []byte, err error) { + args := m.mock.MethodCalled("LoadPublicKey", id) + return args.Get(0).([]byte), args.Error(1) +} + +func (m *mockProto) PublicKeyPEMToBytes(pubKeyPEM []byte) (pubKeyBytes []byte, err error) { + args := m.mock.MethodCalled("PublicKeyPEMToBytes", pubKeyPEM) + return args.Get(0).([]byte), args.Error(1) +} + +func (m *mockProto) SetPublicKeyBytes(id uuid.UUID, pubKeyBytes []byte) error { + args := m.mock.MethodCalled("SetPublicKeyBytes", id, pubKeyBytes) + return args.Error(0) +} + +func (m *mockProto) Verify(id uuid.UUID, upp []byte) (bool, error) { + args := m.mock.MethodCalled("Verify", id, upp) + return args.Bool(0), args.Error(1) +} + +func (m *mockProto) StoreExternalIdentity(ctx context.Context, extId ent.ExternalIdentity) error { + defer func() { m.done <- true }() + args := m.mock.MethodCalled("StoreExternalIdentity", ctx, extId) + return args.Error(0) +} + +func RequestHash(m *mock.Mock) func(hashBase64 string) (h.HTTPResponse, error) { + return func(hashBase64 string) (h.HTTPResponse, error) { + args := m.MethodCalled("RequestHash", hashBase64) + return args.Get(0).(h.HTTPResponse), args.Error(1) + } +} + +func RequestPublicKeys(m *mock.Mock) func(id uuid.UUID) ([]ubirch.SignedKeyRegistration, error) { + return func(id uuid.UUID) ([]ubirch.SignedKeyRegistration, error) { + args := m.MethodCalled("RequestPublicKeys", id) + return args.Get(0).([]ubirch.SignedKeyRegistration), args.Error(1) + } +} + +func TestVerifier_Verify(t *testing.T) { + + testCases := []struct { + name string + VerifyFromKnownIdentitiesOnly bool + setMockBehavior func(m *mock.Mock) + tcChecks func(t *testing.T, resp h.HTTPResponse, m *mockProto) + }{ + { + name: "verification success", + setMockBehavior: func(m *mock.Mock) { + m.On("RequestHash", base64.StdEncoding.EncodeToString(testHash[:])). + Return(h.HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{"content-type": []string{h.JSONType}}, + Content: testVerificationResp, + }, nil) + m.On("LoadPublicKey", testUuid).Return(testPublicKeyPEM, nil) + m.On("PublicKeyPEMToBytes", testPublicKeyPEM).Return(testPublicKey, nil) + m.On("Verify", testUuid, testVerificationUPP).Return(true, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mockProto) { + m.mock.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusOK, testHash[:], testVerificationUPP, testUuid, testPublicKey, ""), resp) + }, + }, + { + name: "load public key", + setMockBehavior: func(m *mock.Mock) { + m.On("RequestHash", base64.StdEncoding.EncodeToString(testHash[:])). + Return(h.HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{"content-type": []string{h.JSONType}}, + Content: testVerificationResp, + }, nil) + m.On("LoadPublicKey", testUuid).Return([]byte{}, r.ErrNotExist).Once() + m.On("RequestPublicKeys", testUuid).Return(testKeyRegs, nil) + m.On("SetPublicKeyBytes", testUuid, testPublicKey).Return(nil) + m.On("LoadPublicKey", testUuid).Return(testPublicKeyPEM, nil) + m.On("StoreExternalIdentity", context.TODO(), ent.ExternalIdentity{ + Uid: testUuid, + PublicKey: testPublicKeyPEM, + }).Return(nil) + m.On("PublicKeyPEMToBytes", testPublicKeyPEM).Return(testPublicKey, nil) + m.On("Verify", testUuid, testVerificationUPP).Return(true, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mockProto) { + <-m.done + m.mock.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusOK, testHash[:], testVerificationUPP, testUuid, testPublicKey, ""), resp) + }, + }, + { + name: "not found", + setMockBehavior: func(m *mock.Mock) { + m.On("RequestHash", base64.StdEncoding.EncodeToString(testHash[:])). + Return(h.HTTPResponse{StatusCode: http.StatusNotFound}, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mockProto) { + m.mock.AssertExpectations(t) + assert.Equal(t, http.StatusNotFound, resp.StatusCode) + }, + }, + { + name: "UPP from unknown identity", + VerifyFromKnownIdentitiesOnly: true, + setMockBehavior: func(m *mock.Mock) { + m.On("RequestHash", base64.StdEncoding.EncodeToString(testHash[:])). + Return(h.HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{"content-type": []string{h.JSONType}}, + Content: testVerificationResp, + }, nil) + m.On("LoadPublicKey", testUuid).Return([]byte{}, r.ErrNotExist) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mockProto) { + m.mock.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusForbidden, testHash[:], testVerificationUPP, testUuid, nil, ErrUnknownIdentity.Error()), resp) + }, + }, + { + name: "internal server error", + setMockBehavior: func(m *mock.Mock) { + m.On("RequestHash", base64.StdEncoding.EncodeToString(testHash[:])). + Return(h.HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{"content-type": []string{h.JSONType}}, + Content: testVerificationResp, + }, nil) + m.On("LoadPublicKey", testUuid).Return([]byte{}, fmt.Errorf("some error")) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mockProto) { + m.mock.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusInternalServerError, testHash[:], testVerificationUPP, testUuid, nil, "some error"), resp) + }, + }, + { + name: "invalid signature", + setMockBehavior: func(m *mock.Mock) { + m.On("RequestHash", base64.StdEncoding.EncodeToString(testHash[:])). + Return(h.HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{"content-type": []string{h.JSONType}}, + Content: testVerificationResp, + }, nil) + m.On("LoadPublicKey", testUuid).Return(testPublicKeyPEM, nil) + m.On("PublicKeyPEMToBytes", testPublicKeyPEM).Return(testPublicKey, nil) + m.On("Verify", testUuid, testVerificationUPP).Return(false, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mockProto) { + m.mock.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusForbidden, testHash[:], testVerificationUPP, testUuid, testPublicKey, "invalid UPP signature"), resp) + }, + }, + } + for _, c := range testCases { + t.Run(c.name, func(t *testing.T) { + m := &mockProto{mock: &mock.Mock{}, done: make(chan bool)} + m.mock.Test(t) + c.setMockBehavior(m.mock) + + v := Verifier{ + VerifierProtocol: m, + RequestHash: RequestHash(m.mock), + RequestPublicKeys: RequestPublicKeys(m.mock), + VerifyFromKnownIdentitiesOnly: c.VerifyFromKnownIdentitiesOnly, + VerificationTimeout: time.Second, + } + + resp := v.Verify(context.Background(), testHash[:]) + + c.tcChecks(t, resp, m) + }) + } +} + +func TestVerifier_VerifyOffline(t *testing.T) { + + testCases := []struct { + name string + upp []byte + setMockBehavior func(m *mock.Mock) + tcChecks func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) + }{ + { + name: "verification success", + upp: testVerificationUPP, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadPublicKey", testUuid).Return(testPublicKeyPEM, nil) + m.On("PublicKeyPEMToBytes", testPublicKeyPEM).Return(testPublicKey, nil) + m.On("Verify", testUuid, testVerificationUPP).Return(true, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusOK, testHash[:], testVerificationUPP, testUuid, testPublicKey, ""), resp) + }, + }, + { + name: "UPP from unknown identity", + upp: testVerificationUPP, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadPublicKey", testUuid).Return([]byte{}, r.ErrNotExist) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusNotFound, testHash[:], testVerificationUPP, testUuid, nil, ErrUnknownIdentity.Error()), resp) + }, + }, + { + name: "invalid UPP", + upp: testVerificationUPP[1:], + setMockBehavior: func(m *mock.Mock) {}, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusBadRequest, testHash[:], testVerificationUPP[1:], uuid.Nil, nil, ErrInvalidUPP.Error()), resp) + }, + }, + { + name: "internal server error", + upp: testVerificationUPP, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadPublicKey", testUuid).Return(testPublicKeyPEM, nil) + m.On("PublicKeyPEMToBytes", testPublicKeyPEM).Return(testPublicKey, nil) + m.On("Verify", testUuid, testVerificationUPP).Return(false, fmt.Errorf("some error")) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusInternalServerError, testHash[:], testVerificationUPP, testUuid, testPublicKey, "unable to verify UPP: some error"), resp) + }, + }, + { + name: "invalid signature", + upp: testVerificationUPP, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadPublicKey", testUuid).Return(testPublicKeyPEM, nil) + m.On("PublicKeyPEMToBytes", testPublicKeyPEM).Return(testPublicKey, nil) + m.On("Verify", testUuid, testVerificationUPP).Return(false, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, getVerificationResponse(http.StatusForbidden, testHash[:], testVerificationUPP, testUuid, testPublicKey, "invalid UPP signature"), resp) + }, + }, + { + name: "hash mismatch", + upp: testSignedUPP, + setMockBehavior: func(m *mock.Mock) { + m.On("LoadPublicKey", testUuid).Return(testPublicKeyPEM, nil) + m.On("PublicKeyPEMToBytes", testPublicKeyPEM).Return(testPublicKey, nil) + m.On("Verify", testUuid, testSignedUPP).Return(true, nil) + }, + tcChecks: func(t *testing.T, resp h.HTTPResponse, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusBadRequest, resp.StatusCode) + }, + }, + } + for _, c := range testCases { + t.Run(c.name, func(t *testing.T) { + m := &mock.Mock{} + m.Test(t) + c.setMockBehavior(m) + + v := Verifier{ + VerifierProtocol: &mockProto{mock: m}, + } + + resp := v.VerifyOffline(c.upp, testHash[:]) + + c.tcChecks(t, resp, m) + }) + } +} diff --git a/main/adapters/http_server/active_updater.go b/main/adapters/http_server/active_updater.go index 27ad103c..6f82ac04 100644 --- a/main/adapters/http_server/active_updater.go +++ b/main/adapters/http_server/active_updater.go @@ -4,7 +4,7 @@ import ( "bytes" "encoding/json" "fmt" - "io/ioutil" + "io" "net/http" "github.com/google/uuid" @@ -12,14 +12,24 @@ import ( log "github.com/sirupsen/logrus" ) +const ( + reactivation = "key reactivation" + deactivation = "key deactivation" +) + type ActiveUpdatePayload struct { Uid uuid.UUID `json:"id"` Active bool `json:"active"` } +type UpdateActivateStatus func(uid uuid.UUID) error + func UpdateActive(auth string, - deactivate func(uid uuid.UUID) error, - reactivate func(uid uuid.UUID) error) http.HandlerFunc { + deactivate UpdateActivateStatus, + reactivate UpdateActivateStatus) http.HandlerFunc { + if len(auth) == 0 { + panic("missing auth token for key deactivation endpoint") + } return func(w http.ResponseWriter, r *http.Request) { if AuthToken(r.Header) != auth { log.Warnf("unauthorized key de-/re-activation attempt") @@ -39,10 +49,10 @@ func UpdateActive(auth string, uid := activeUpdatePayload.Uid if activeUpdatePayload.Active { - action = "key reactivation" + action = reactivation err = reactivate(uid) } else { - action = "key deactivation" + action = deactivation err = deactivate(uid) } if err != nil { @@ -60,7 +70,7 @@ func UpdateActive(auth string, SendResponse(w, HTTPResponse{ StatusCode: http.StatusOK, Header: http.Header{"Content-Type": {"text/plain; charset=utf-8"}}, - Content: []byte(action + " successful"), + Content: []byte(action + " successful\n"), }) } } @@ -71,7 +81,7 @@ func GetActiveUpdatePayload(r *http.Request) (*ActiveUpdatePayload, error) { return nil, fmt.Errorf("invalid content-type: expected %s, got %s", JSONType, contentType) } - reqBodyBytes, err := ioutil.ReadAll(r.Body) + reqBodyBytes, err := io.ReadAll(r.Body) if err != nil { return nil, err } diff --git a/main/adapters/http_server/active_updater_test.go b/main/adapters/http_server/active_updater_test.go index 98fed4f2..71adc42f 100644 --- a/main/adapters/http_server/active_updater_test.go +++ b/main/adapters/http_server/active_updater_test.go @@ -15,110 +15,194 @@ import ( const testAuth = "password123" -func TestUpdateActive_Deactivate(t *testing.T) { - payload := ActiveUpdatePayload{ - Uid: uuid.New(), - Active: false, - } - - payloadBytes, err := json.Marshal(payload) - require.NoError(t, err) - - w := httptest.NewRecorder() - r := httptest.NewRequest(http.MethodPut, "/", bytes.NewReader(payloadBytes)) - r.Header.Set("Content-Type", JSONType) - r.Header.Set(XAuthHeader, testAuth) - - UpdateActive(testAuth, - func(uid uuid.UUID) error { - assert.Equal(t, payload.Uid, uid) - return nil +var testUUID = uuid.MustParse("392110c4-5c4e-482c-80ef-e288ede02462") + +func TestUpdateActive(t *testing.T) { + + testCases := []struct { + name string + payload ActiveUpdatePayload + deactivate UpdateActivateStatus + reactivate UpdateActivateStatus + auth string + contentType string + tcChecks func(t *testing.T, recorder *httptest.ResponseRecorder) + }{ + { + name: "Deactivate", + payload: ActiveUpdatePayload{ + Uid: testUUID, + Active: false, + }, + deactivate: func(uid uuid.UUID) error { + assert.Equal(t, testUUID, uid) + return nil + }, + reactivate: func(uid uuid.UUID) error { + t.Error("reactivate function was called for deactivation") + return nil + }, + auth: testAuth, + contentType: JSONType, + tcChecks: func(t *testing.T, recorder *httptest.ResponseRecorder) { + assert.Equal(t, http.StatusOK, recorder.Code) + assert.Contains(t, recorder.Body.String(), deactivation) + }, }, - func(uid uuid.UUID) error { - t.Error("reactivate function was called for deactivation") - return nil - })(w, r) - - require.Equal(t, http.StatusOK, w.Code) -} - -func TestUpdateActive_Reactivate(t *testing.T) { - payload := ActiveUpdatePayload{ - Uid: uuid.New(), - Active: true, - } - - payloadBytes, err := json.Marshal(payload) - require.NoError(t, err) - - w := httptest.NewRecorder() - r := httptest.NewRequest(http.MethodPut, "/", bytes.NewReader(payloadBytes)) - r.Header.Set("Content-Type", JSONType) - r.Header.Set(XAuthHeader, testAuth) - - UpdateActive(testAuth, - func(uid uuid.UUID) error { - t.Error("deactivate function was called for reactivation") - return nil + { + name: "Reactivate", + payload: ActiveUpdatePayload{ + Uid: testUUID, + Active: true, + }, + deactivate: func(uid uuid.UUID) error { + t.Error("deactivate function was called for reactivation") + return nil + }, + reactivate: func(uid uuid.UUID) error { + assert.Equal(t, testUUID, uid) + return nil + }, + auth: testAuth, + contentType: JSONType, + tcChecks: func(t *testing.T, recorder *httptest.ResponseRecorder) { + assert.Equal(t, http.StatusOK, recorder.Code) + assert.Contains(t, recorder.Body.String(), reactivation) + }, }, - func(uid uuid.UUID) error { - assert.Equal(t, payload.Uid, uid) - return nil - })(w, r) - - require.Equal(t, http.StatusOK, w.Code) -} - -func TestUpdateActive_Unauthorized(t *testing.T) { - payload := ActiveUpdatePayload{ - Uid: uuid.New(), - } - - payloadBytes, err := json.Marshal(payload) - require.NoError(t, err) - - w := httptest.NewRecorder() - r := httptest.NewRequest(http.MethodPut, "/", bytes.NewReader(payloadBytes)) - r.Header.Set("Content-Type", JSONType) - r.Header.Set(XAuthHeader, "invalid") - - UpdateActive(testAuth, - func(uid uuid.UUID) error { - t.Error("deactivate function was called with invalid auth") - return nil + { + name: "Unauthorized", + payload: ActiveUpdatePayload{ + Uid: testUUID, + }, + deactivate: func(uid uuid.UUID) error { + t.Error("deactivate function was called with invalid auth") + return nil + }, + reactivate: func(uid uuid.UUID) error { + t.Error("reactivate function was called with invalid auth") + return nil + }, + auth: "invalid", + contentType: JSONType, + tcChecks: func(t *testing.T, recorder *httptest.ResponseRecorder) { + assert.Equal(t, http.StatusUnauthorized, recorder.Code) + assert.Contains(t, recorder.Body.String(), http.StatusText(http.StatusUnauthorized)) + }, }, - func(uid uuid.UUID) error { - t.Error("reactivate function was called with invalid auth") - return nil - })(w, r) - - require.Equal(t, http.StatusUnauthorized, w.Code) -} - -func TestUpdateActive_InvalidContentType(t *testing.T) { - payload := ActiveUpdatePayload{ - Uid: uuid.New(), - } - - payloadBytes, err := json.Marshal(payload) - require.NoError(t, err) - - w := httptest.NewRecorder() - r := httptest.NewRequest(http.MethodPut, "/", bytes.NewReader(payloadBytes)) - r.Header.Set("Content-Type", BinType) - r.Header.Set(XAuthHeader, testAuth) - - UpdateActive(testAuth, - func(uid uuid.UUID) error { - t.Error("deactivate function was called with invalid content type") - return nil + { + name: "InvalidContentType", + payload: ActiveUpdatePayload{ + Uid: testUUID, + }, + deactivate: func(uid uuid.UUID) error { + t.Error("deactivate function was called with invalid content type") + return nil + }, + reactivate: func(uid uuid.UUID) error { + t.Error("reactivate function was called with invalid content type") + return nil + }, + auth: testAuth, + contentType: BinType, + tcChecks: func(t *testing.T, recorder *httptest.ResponseRecorder) { + assert.Equal(t, http.StatusBadRequest, recorder.Code) + assert.Contains(t, recorder.Body.String(), "invalid content-type") + }, }, - func(uid uuid.UUID) error { - t.Error("reactivate function was called with invalid content type") - return nil - })(w, r) - - require.Equal(t, http.StatusBadRequest, w.Code) + { + name: "InvalidUUID", + payload: ActiveUpdatePayload{}, + deactivate: func(uid uuid.UUID) error { + t.Error("deactivate function was called with invalid UUID") + return nil + }, + reactivate: func(uid uuid.UUID) error { + t.Error("reactivate function was called with invalid UUID") + return nil + }, + auth: testAuth, + contentType: JSONType, + tcChecks: func(t *testing.T, recorder *httptest.ResponseRecorder) { + assert.Equal(t, http.StatusBadRequest, recorder.Code) + assert.Contains(t, recorder.Body.String(), "empty uuid") + }, + }, + { + name: "Unknown", + payload: ActiveUpdatePayload{ + Uid: testUUID, + }, + deactivate: func(uid uuid.UUID) error { + return ErrUnknown + }, + reactivate: func(uid uuid.UUID) error { + t.Error("reactivate function was called for deactivation") + return nil + }, + auth: testAuth, + contentType: JSONType, + tcChecks: func(t *testing.T, recorder *httptest.ResponseRecorder) { + assert.Equal(t, http.StatusNotFound, recorder.Code) + assert.Contains(t, recorder.Body.String(), ErrUnknown.Error()) + }, + }, + { + name: "Conflict", + payload: ActiveUpdatePayload{ + Uid: testUUID, + }, + deactivate: func(uid uuid.UUID) error { + return ErrAlreadyDeactivated + }, + reactivate: func(uid uuid.UUID) error { + t.Error("reactivate function was called for deactivation") + return nil + }, + auth: testAuth, + contentType: JSONType, + tcChecks: func(t *testing.T, recorder *httptest.ResponseRecorder) { + assert.Equal(t, http.StatusConflict, recorder.Code) + assert.Contains(t, recorder.Body.String(), ErrAlreadyDeactivated.Error()) + }, + }, + { + name: "ServerError", + payload: ActiveUpdatePayload{ + Uid: testUUID, + }, + deactivate: func(uid uuid.UUID) error { + return fmt.Errorf("some error") + }, + reactivate: func(uid uuid.UUID) error { + t.Error("reactivate function was called for deactivation") + return nil + }, + auth: testAuth, + contentType: JSONType, + tcChecks: func(t *testing.T, recorder *httptest.ResponseRecorder) { + assert.Equal(t, http.StatusInternalServerError, recorder.Code) + assert.Contains(t, recorder.Body.String(), http.StatusText(http.StatusInternalServerError)) + }, + }, + } + for _, c := range testCases { + t.Run(c.name, func(t *testing.T) { + payloadBytes, err := json.Marshal(c.payload) + require.NoError(t, err) + + w := httptest.NewRecorder() + r := httptest.NewRequest(http.MethodPut, "/", bytes.NewReader(payloadBytes)) + r.Header.Set("Content-Type", c.contentType) + r.Header.Set(XAuthHeader, c.auth) + + UpdateActive(testAuth, + c.deactivate, + c.reactivate, + )(w, r) + c.tcChecks(t, w) + }) + } } func TestUpdateActive_InvalidJSON(t *testing.T) { @@ -146,107 +230,3 @@ func TestUpdateActive_InvalidJSON(t *testing.T) { require.Equal(t, http.StatusBadRequest, w.Code) } - -func TestUpdateActive_InvalidUUID(t *testing.T) { - payload := ActiveUpdatePayload{} - - payloadBytes, err := json.Marshal(payload) - require.NoError(t, err) - - w := httptest.NewRecorder() - r := httptest.NewRequest(http.MethodPut, "/", bytes.NewReader(payloadBytes)) - r.Header.Set("Content-Type", JSONType) - r.Header.Set(XAuthHeader, testAuth) - - UpdateActive(testAuth, - func(uid uuid.UUID) error { - t.Error("deactivate function was called with invalid request content") - return nil - }, - func(uid uuid.UUID) error { - t.Error("reactivate function was called with invalid request content") - return nil - })(w, r) - - require.Equal(t, http.StatusBadRequest, w.Code) -} - -func TestUpdateActive_Unknown(t *testing.T) { - payload := ActiveUpdatePayload{ - Uid: uuid.New(), - Active: false, - } - - payloadBytes, err := json.Marshal(payload) - require.NoError(t, err) - - w := httptest.NewRecorder() - r := httptest.NewRequest(http.MethodPut, "/", bytes.NewReader(payloadBytes)) - r.Header.Set("Content-Type", JSONType) - r.Header.Set(XAuthHeader, testAuth) - - UpdateActive(testAuth, - func(uid uuid.UUID) error { - return ErrUnknown - }, - func(uid uuid.UUID) error { - t.Error("reactivate function was called for deactivation") - return nil - })(w, r) - - require.Equal(t, http.StatusNotFound, w.Code) - require.Equal(t, ErrUnknown.Error()+"\n", w.Body.String()) -} - -func TestUpdateActive_Conflict(t *testing.T) { - payload := ActiveUpdatePayload{ - Uid: uuid.New(), - Active: false, - } - - payloadBytes, err := json.Marshal(payload) - require.NoError(t, err) - - w := httptest.NewRecorder() - r := httptest.NewRequest(http.MethodPut, "/", bytes.NewReader(payloadBytes)) - r.Header.Set("Content-Type", JSONType) - r.Header.Set(XAuthHeader, testAuth) - - UpdateActive(testAuth, - func(uid uuid.UUID) error { - return ErrAlreadyDeactivated - }, - func(uid uuid.UUID) error { - t.Error("reactivate function was called for deactivation") - return nil - })(w, r) - - require.Equal(t, http.StatusConflict, w.Code) - require.Equal(t, ErrAlreadyDeactivated.Error()+"\n", w.Body.String()) -} - -func TestUpdateActive_ServerError(t *testing.T) { - payload := ActiveUpdatePayload{ - Uid: uuid.New(), - Active: false, - } - - payloadBytes, err := json.Marshal(payload) - require.NoError(t, err) - - w := httptest.NewRecorder() - r := httptest.NewRequest(http.MethodPut, "/", bytes.NewReader(payloadBytes)) - r.Header.Set("Content-Type", JSONType) - r.Header.Set(XAuthHeader, testAuth) - - UpdateActive(testAuth, - func(uid uuid.UUID) error { - return fmt.Errorf("some error") - }, - func(uid uuid.UUID) error { - t.Error("reactivate function was called for deactivation") - return nil - })(w, r) - - require.Equal(t, http.StatusInternalServerError, w.Code) -} diff --git a/main/adapters/http_server/chaining_service.go b/main/adapters/http_server/chaining_service.go deleted file mode 100644 index 2a13c8ae..00000000 --- a/main/adapters/http_server/chaining_service.go +++ /dev/null @@ -1,63 +0,0 @@ -package http_server - -import ( - "context" - "net/http" - - "github.com/google/uuid" - - log "github.com/sirupsen/logrus" -) - -type ChainingService struct { - CheckAuth func(context.Context, uuid.UUID, string) (bool, bool, error) - Chain func(HTTPRequest, context.Context) HTTPResponse -} - -// Ensure ChainingService implements the Service interface -var _ Service = (*ChainingService)(nil) - -func (s *ChainingService) HandleRequest(w http.ResponseWriter, r *http.Request) { - var msg HTTPRequest - var err error - - msg.ID, err = GetUUID(r) - if err != nil { - ClientError(msg.ID, r, w, err.Error(), http.StatusNotFound) - return - } - - ctx := r.Context() - msg.Auth = AuthToken(r.Header) - - ok, found, err := s.CheckAuth(ctx, msg.ID, msg.Auth) - if err != nil { - ServerError(msg.ID, r, w, err.Error(), http.StatusInternalServerError) - return - } - - if !found { - ClientError(msg.ID, r, w, "unknown UUID", http.StatusNotFound) - return - } - - if !ok { - ClientError(msg.ID, r, w, "invalid auth token", http.StatusUnauthorized) - return - } - - msg.Hash, err = GetHash(r) - if err != nil { - ClientError(msg.ID, r, w, err.Error(), http.StatusBadRequest) - return - } - - resp := s.Chain(msg, ctx) - - select { - case <-ctx.Done(): - log.Warnf("%s: chaining response could not be sent: http request %s", msg.ID, ctx.Err()) - default: - SendResponse(w, resp) - } -} diff --git a/main/adapters/http_server/common.go b/main/adapters/http_server/common.go index 0e725697..c9b2e56a 100644 --- a/main/adapters/http_server/common.go +++ b/main/adapters/http_server/common.go @@ -2,6 +2,7 @@ package http_server import ( "bytes" + "context" "crypto/sha256" "encoding/base64" "encoding/hex" @@ -9,8 +10,6 @@ import ( "errors" "fmt" "net/http" - "strings" - "time" "github.com/go-chi/chi" "github.com/google/uuid" @@ -19,16 +18,9 @@ import ( ) const ( - BackendRequestTimeout = 15 * time.Second // time after which requests to the ubirch backend will be canceled - GatewayTimeout = 45 * time.Second // time after which a 504 response will be sent if no timely response could be produced - ShutdownTimeout = 25 * time.Second // time after which the server will be shut down forcefully if graceful shutdown did not happen before - ReadTimeout = 1 * time.Second // maximum duration for reading the entire request -> low since we only expect requests with small content - WriteTimeout = 60 * time.Second // time after which the connection will be closed if response was not written -> this should never happen - IdleTimeout = 60 * time.Second // time to wait for the next request when keep-alives are enabled - UUIDKey = "uuid" - OperationKey = "operation" VerifyPath = "/verify" + OfflinePath = "/offline" HashEndpoint = "/hash" RegisterEndpoint = "/register" CSREndpoint = "/csr" @@ -41,7 +33,8 @@ const ( TextType = "text/plain" JSONType = "application/json" - XAuthHeader = "x-auth-token" + XUPPHeader = "X-Ubirch-UPP" + XAuthHeader = "X-Auth-Token" HexEncoding = "hex" @@ -49,8 +42,7 @@ const ( ) var ( - UUIDPath = fmt.Sprintf("/{%s}", UUIDKey) - OperationPath = fmt.Sprintf("/{%s}", OperationKey) + UUIDPath = fmt.Sprintf("/{%s}", UUIDKey) ErrUnknown = errors.New("identity unknown") ErrAlreadyInitialized = errors.New("identity already registered") @@ -59,9 +51,12 @@ var ( ) type HTTPRequest struct { - ID uuid.UUID - Auth string - Hash Sha256Sum + Ctx context.Context + ID uuid.UUID + Auth string + Hash Sha256Sum + Operation Operation + Offline bool } type Sha256Sum [HashLen]byte @@ -76,18 +71,14 @@ func GetUUID(r *http.Request) (uuid.UUID, error) { return id, nil } -func IsHashRequest(r *http.Request) bool { - return strings.HasSuffix(r.URL.Path, HashEndpoint) -} - // GetHash returns the hash from the request body -func GetHash(r *http.Request) (Sha256Sum, error) { +func GetHash(r *http.Request, isHashRequest bool) (Sha256Sum, error) { rBody, err := ReadBody(r) if err != nil { return Sha256Sum{}, err } - if IsHashRequest(r) { // request contains hash + if isHashRequest { // request contains hash return getHashFromHashRequest(r.Header, rBody) } else { // request contains original data return getHashFromDataRequest(r.Header, rBody) diff --git a/main/adapters/http_server/csr_fetcher.go b/main/adapters/http_server/csr_fetcher.go index 518fd445..b69ebdd7 100644 --- a/main/adapters/http_server/csr_fetcher.go +++ b/main/adapters/http_server/csr_fetcher.go @@ -11,6 +11,9 @@ import ( type GetCSR func(uid uuid.UUID) (csr []byte, err error) func FetchCSR(auth string, get GetCSR) http.HandlerFunc { + if len(auth) == 0 { + panic("missing auth token for CSR creation endpoint") + } return func(w http.ResponseWriter, r *http.Request) { if AuthToken(r.Header) != auth { log.Warnf("unauthorized CSR request") diff --git a/main/adapters/http_server/helper.go b/main/adapters/http_server/helper.go index 30864026..54325c06 100644 --- a/main/adapters/http_server/helper.go +++ b/main/adapters/http_server/helper.go @@ -1,8 +1,9 @@ package http_server import ( + "encoding/base64" "fmt" - "io/ioutil" + "io" "net/http" "strings" ) @@ -22,8 +23,22 @@ func AuthToken(header http.Header) string { return header.Get(XAuthHeader) } +// helper function to get "X-Ubirch-UPP" from request header +func getUPP(header http.Header) ([]byte, error) { + upp, err := base64.StdEncoding.DecodeString(header.Get(XUPPHeader)) + if err != nil { + return nil, fmt.Errorf("invalid UPP: %v", err) + } + + if len(upp) == 0 { + return nil, fmt.Errorf("missing UPP in header %s", XUPPHeader) + } + + return upp, nil +} + func ReadBody(r *http.Request) ([]byte, error) { - rBody, err := ioutil.ReadAll(r.Body) + rBody, err := io.ReadAll(r.Body) if err != nil { return nil, fmt.Errorf("unable to read request body: %v", err) } diff --git a/main/adapters/http_server/http_server.go b/main/adapters/http_server/http_server.go index 8fcc2ee8..350cfe4e 100644 --- a/main/adapters/http_server/http_server.go +++ b/main/adapters/http_server/http_server.go @@ -8,25 +8,24 @@ import ( "os/signal" "path" "syscall" + "time" "github.com/go-chi/chi" "github.com/go-chi/chi/middleware" "github.com/go-chi/cors" + "github.com/ubirch/ubirch-client-go/main/config" log "github.com/sirupsen/logrus" prom "github.com/ubirch/ubirch-client-go/main/prometheus" ) -type Service interface { - HandleRequest(w http.ResponseWriter, r *http.Request) -} - -type ServerEndpoint struct { - Path string - Service -} - -func (*ServerEndpoint) HandleOptions(http.ResponseWriter, *http.Request) {} +const ( + GatewayTimeout = 20 * time.Second // time after which a 504 response will be sent if no timely response could be produced + ShutdownTimeout = 10 * time.Second // time after which the server will be shut down forcefully if graceful shutdown did not happen before + ReadTimeout = 1 * time.Second // maximum duration for reading the entire request -> low since we only expect requests with small content + WriteTimeout = 60 * time.Second // time after which the connection will be closed if response was not written -> this should never happen + IdleTimeout = 60 * time.Second // time to wait for the next request when keep-alives are enabled +) type HTTPServer struct { Router *chi.Mux @@ -43,6 +42,117 @@ func NewRouter() *chi.Mux { return router } +func InitHTTPServer(conf *config.Config, + initialize InitializeIdentity, getCSR GetCSR, + checkAuth CheckAuth, sign Sign, + verify Verify, verifyOffline VerifyOffline, + deactivate UpdateActivateStatus, reactivate UpdateActivateStatus, + serverID string, readinessChecks []func() error) *HTTPServer { + + httpServer := &HTTPServer{ + Router: NewRouter(), + Addr: conf.TCP_addr, + TLS: conf.TLS, + CertFile: conf.TLS_CertFile, + KeyFile: conf.TLS_KeyFile, + } + + if conf.CORS && config.IsDevelopment { // never enable CORS on production stage + httpServer.SetUpCORS(conf.CORS_Origins, conf.Debug) + } + + // set up endpoints for liveness and readiness checks + httpServer.Router.Get(LivenessCheckEndpoint, Health(serverID)) + httpServer.Router.Get(ReadinessCheckEndpoint, Ready(serverID, readinessChecks)) + + // set up metrics + httpServer.Router.Method(http.MethodGet, MetricsEndpoint, prom.Handler()) + + // set up endpoint for identity registration + if conf.EnableRegistrationEndpoint { + httpServer.Router.Put(RegisterEndpoint, Register(conf.StaticAuth, initialize)) + } else { + httpServer.Router.Put(RegisterEndpoint, http.NotFound) + } + + // set up endpoint for CSR creation + fetchCSREndpoint := path.Join(UUIDPath, CSREndpoint) // //csr + if conf.EnableCSRCreationEndpoint { + httpServer.Router.Get(fetchCSREndpoint, FetchCSR(conf.StaticAuth, getCSR)) + } else { + httpServer.Router.Get(fetchCSREndpoint, http.NotFound) + } + + // set up endpoint for key status updates (de-/re-activation) + if conf.EnableDeactivationEndpoint { + httpServer.Router.Put(ActiveUpdateEndpoint, UpdateActive(conf.StaticAuth, deactivate, reactivate)) + } else { + httpServer.Router.Put(ActiveUpdateEndpoint, http.NotFound) + } + + // set up endpoints for signing + signingService := &SigningService{ + CheckAuth: checkAuth, + Sign: sign, + } + + // chain: / + // chain hash: //hash + // chain offline: //offline + // chain offline hash: //offline/hash + httpServer.AddServiceEndpoint(UUIDPath, + signingService.HandleSigningRequest(ChainHash), + true, + ) + + // sign: //anchor + // sign hash: //anchor/hash + // sign offline: //anchor/offline + // sign offline hash: //anchor/offline/hash + httpServer.AddServiceEndpoint(path.Join(UUIDPath, string(AnchorHash)), + signingService.HandleSigningRequest(AnchorHash), + true, + ) + + // disable: //disable + // disable hash: //disable/hash + httpServer.AddServiceEndpoint(path.Join(UUIDPath, string(DisableHash)), + signingService.HandleSigningRequest(DisableHash), + false, + ) + + // enable: //enable + // enable hash: //enable/hash + httpServer.AddServiceEndpoint(path.Join(UUIDPath, string(EnableHash)), + signingService.HandleSigningRequest(EnableHash), + false, + ) + + // delete: //delete + // delete hash: //delete/hash + httpServer.AddServiceEndpoint(path.Join(UUIDPath, string(DeleteHash)), + signingService.HandleSigningRequest(DeleteHash), + false, + ) + + // set up endpoints for verification + verificationService := &VerificationService{ + Verify: verify, + VerifyOffline: verifyOffline, + } + + // verify: /verify + // verify hash: /verify/hash + // verify offline: /verify/offline + // verify offline hash: /verify/offline/hash + httpServer.AddServiceEndpoint(VerifyPath, + verificationService.HandleVerificationRequest, + true, + ) + + return httpServer +} + func (srv *HTTPServer) SetUpCORS(allowedOrigins []string, debug bool) { srv.Router.Use(cors.Handler(cors.Options{ AllowedOrigins: allowedOrigins, @@ -55,14 +165,24 @@ func (srv *HTTPServer) SetUpCORS(allowedOrigins []string, debug bool) { })) } -func (srv *HTTPServer) AddServiceEndpoint(endpoint ServerEndpoint) { - hashEndpointPath := path.Join(endpoint.Path, HashEndpoint) +func HandleOptions(http.ResponseWriter, *http.Request) {} + +func (srv *HTTPServer) AddServiceEndpoint(endpointPath string, handle func(offline, isHash bool) http.HandlerFunc, supportOffline bool) { + hashEndpointPath := path.Join(endpointPath, HashEndpoint) - srv.Router.Post(endpoint.Path, endpoint.HandleRequest) - srv.Router.Post(hashEndpointPath, endpoint.HandleRequest) + srv.Router.Post(endpointPath, handle(false, false)) + srv.Router.Post(hashEndpointPath, handle(false, true)) + + if supportOffline { + offlineEndpointPath := path.Join(endpointPath, OfflinePath) + offlineHashEndpointPath := path.Join(offlineEndpointPath, HashEndpoint) + + srv.Router.Post(offlineEndpointPath, handle(true, false)) + srv.Router.Post(offlineHashEndpointPath, handle(true, true)) + } - srv.Router.Options(endpoint.Path, endpoint.HandleOptions) - srv.Router.Options(hashEndpointPath, endpoint.HandleOptions) + srv.Router.Options(endpointPath, HandleOptions) + srv.Router.Options(hashEndpointPath, HandleOptions) } func (srv *HTTPServer) Serve() error { diff --git a/main/adapters/http_server/http_server_test.go b/main/adapters/http_server/http_server_test.go index af50c7f2..701b3712 100644 --- a/main/adapters/http_server/http_server_test.go +++ b/main/adapters/http_server/http_server_test.go @@ -2,11 +2,910 @@ package http_server import ( "bytes" + "context" "crypto/sha256" "encoding/base64" + "net/http" + "net/http/httptest" "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/ubirch/ubirch-client-go/main/config" +) + +func initialize(m *mock.Mock) InitializeIdentity { + return func(uid uuid.UUID, auth string) (csr []byte, err error) { + args := m.MethodCalled("initialize", uid, auth) + return args.Get(0).([]byte), args.Error(1) + } +} + +func getCSR(m *mock.Mock) GetCSR { + return func(uid uuid.UUID) (csr []byte, err error) { + args := m.MethodCalled("getCSR", uid) + return args.Get(0).([]byte), args.Error(1) + } +} + +func checkAuth(m *mock.Mock) CheckAuth { + return func(ctx context.Context, uid uuid.UUID, auth string) (ok, found bool, err error) { + args := m.MethodCalled("checkAuth", ctx, uid, auth) + return args.Bool(0), args.Bool(1), args.Error(2) + } +} + +func sign(m *mock.Mock) Sign { + return func(msg HTTPRequest) (resp HTTPResponse) { + args := m.MethodCalled("sign", msg.Ctx, msg.ID, msg.Auth, msg.Hash, msg.Operation, msg.Offline) + return args.Get(0).(HTTPResponse) + } +} + +func verify(m *mock.Mock) Verify { + return func(ctx context.Context, hash []byte) HTTPResponse { + args := m.MethodCalled("verify", ctx, hash) + return args.Get(0).(HTTPResponse) + } +} + +func verifyOffline(m *mock.Mock) VerifyOffline { + return func(upp []byte, hash []byte) HTTPResponse { + args := m.MethodCalled("verifyOffline", upp, hash) + return args.Get(0).(HTTPResponse) + } +} + +func deactivate(m *mock.Mock) UpdateActivateStatus { + return func(uid uuid.UUID) error { + args := m.MethodCalled("deactivate", uid) + return args.Error(0) + } +} + +func reactivate(m *mock.Mock) UpdateActivateStatus { + return func(uid uuid.UUID) error { + args := m.MethodCalled("reactivate", uid) + return args.Error(0) + } +} + +const ( + serverID = "test server" + testHeader = "X-Test-Header" + testHeaderValue = "test header value" +) + +var ( + readinessChecks []func() error ) +func TestInitHTTPServer(t *testing.T) { + + testCases := []struct { + name string + enableRegistration bool + enableCSRCreation bool + enableDeactivation bool + request *http.Request + setMockBehavior func(m *mock.Mock) + tcChecks func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) + }{ + { + name: "health check", + request: httptest.NewRequest(http.MethodGet, "/healthz", nil), + setMockBehavior: func(m *mock.Mock) {}, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, serverID, w.Header().Get("Server")) + assert.Contains(t, w.Body.String(), http.StatusText(http.StatusOK)) + }, + }, + { + name: "readiness check", + request: httptest.NewRequest(http.MethodGet, "/readyz", nil), + setMockBehavior: func(m *mock.Mock) {}, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, serverID, w.Header().Get("Server")) + assert.Contains(t, w.Body.String(), http.StatusText(http.StatusOK)) + }, + }, + { + name: "metrics", + request: httptest.NewRequest(http.MethodGet, "/metrics", nil), + setMockBehavior: func(m *mock.Mock) {}, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + assert.Equal(t, http.StatusOK, w.Code) + // fixme + // assert.Contains(t, w.Body.String(), "http_requests_total") + // assert.Contains(t, w.Body.String(), "response_status") + // assert.Contains(t, w.Body.String(), "http_response_time_seconds") + assert.Contains(t, w.Body.String(), "identity_creation_success") + }, + }, + { + name: "identity registration disabled", + request: func() *http.Request { + payload := []byte("{\"uuid\": \"5133fbdd-978d-4f95-9af9-41abdef2f2b4\", \"password\": \"1234\"}") + request := httptest.NewRequest(http.MethodPut, "/register", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("initialize", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), "1234").Return([]byte("csr"), nil) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertNotCalled(t, "initialize", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), "1234") + assert.Equal(t, http.StatusNotFound, w.Code) + assert.Contains(t, w.Body.String(), "not found") + }, + }, + { + name: "identity registration - bad auth", + enableRegistration: true, + request: func() *http.Request { + payload := []byte("{\"uuid\": \"5133fbdd-978d-4f95-9af9-41abdef2f2b4\", \"password\": \"1234\"}") + request := httptest.NewRequest(http.MethodPut, "/register", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, "wrong_pw") + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) {}, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + assert.Equal(t, http.StatusUnauthorized, w.Code) + assert.Contains(t, w.Body.String(), "Unauthorized") + }, + }, + { + name: "identity registration", + enableRegistration: true, + request: func() *http.Request { + payload := []byte("{\"uuid\": \"5133fbdd-978d-4f95-9af9-41abdef2f2b4\", \"password\": \"1234\"}") + request := httptest.NewRequest(http.MethodPut, "/register", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("initialize", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), "1234").Return([]byte("csr"), nil) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Contains(t, w.Body.String(), "csr") + }, + }, + { + name: "CSR creation disabled", + request: func() *http.Request { + request := httptest.NewRequest(http.MethodGet, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/csr", nil) + request.Header.Add(XAuthHeader, testAuth) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("getCSR", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4")).Return([]byte("csr"), nil) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertNotCalled(t, "getCSR", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4")) + assert.Equal(t, http.StatusNotFound, w.Code) + assert.Contains(t, w.Body.String(), "not found") + }, + }, + { + name: "CSR creation - bad auth", + enableCSRCreation: true, + request: func() *http.Request { + request := httptest.NewRequest(http.MethodGet, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/csr", nil) + request.Header.Add(XAuthHeader, "wrong_pw") + return request + }(), + setMockBehavior: func(m *mock.Mock) {}, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + assert.Equal(t, http.StatusUnauthorized, w.Code) + assert.Contains(t, w.Body.String(), "Unauthorized") + }, + }, + { + name: "CSR creation", + enableCSRCreation: true, + request: func() *http.Request { + request := httptest.NewRequest(http.MethodGet, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/csr", nil) + request.Header.Add(XAuthHeader, testAuth) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("getCSR", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4")).Return([]byte("csr"), nil) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Contains(t, w.Body.String(), "csr") + }, + }, + { + name: "deactivation disabled", + request: func() *http.Request { + payload := []byte("{\"id\": \"5133fbdd-978d-4f95-9af9-41abdef2f2b4\", \"active\": false}") + request := httptest.NewRequest(http.MethodPut, "/device/updateActive", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("deactivate", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4")).Return(nil) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertNotCalled(t, "deactivate", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4")) + assert.Equal(t, http.StatusNotFound, w.Code) + assert.Contains(t, w.Body.String(), "not found") + }, + }, + { + name: "deactivation - bad auth", + enableDeactivation: true, + request: func() *http.Request { + payload := []byte("{\"id\": \"5133fbdd-978d-4f95-9af9-41abdef2f2b4\", \"active\": false}") + request := httptest.NewRequest(http.MethodPut, "/device/updateActive", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, "wrong_pw") + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) {}, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + assert.Equal(t, http.StatusUnauthorized, w.Code) + assert.Contains(t, w.Body.String(), "Unauthorized") + }, + }, + { + name: "deactivation", + enableDeactivation: true, + request: func() *http.Request { + payload := []byte("{\"id\": \"5133fbdd-978d-4f95-9af9-41abdef2f2b4\", \"active\": false}") + request := httptest.NewRequest(http.MethodPut, "/device/updateActive", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("deactivate", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4")).Return(nil) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Contains(t, w.Body.String(), "key deactivation successful") + }, + }, + { + name: "reactivation", + enableDeactivation: true, + request: func() *http.Request { + payload := []byte("{\"id\": \"5133fbdd-978d-4f95-9af9-41abdef2f2b4\", \"active\": true}") + request := httptest.NewRequest(http.MethodPut, "/device/updateActive", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("reactivate", uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4")).Return(nil) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Contains(t, w.Body.String(), "key reactivation successful") + }, + }, + { + name: "chain - unknown UUID", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + // checkAuth(ctx context.Context, uid uuid.UUID, auth string) (ok, found bool, err error) + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth). + Return(true, false, nil) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusNotFound, w.Code) + assert.Equal(t, w.Body.String(), "unknown UUID\n") + }, + }, + { + name: "chain - bad auth", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + // checkAuth(ctx context.Context, uid uuid.UUID, auth string) (ok, found bool, err error) + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth). + Return(false, true, nil) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusUnauthorized, w.Code) + assert.Equal(t, w.Body.String(), "invalid auth token\n") + }, + }, + { + name: "chain", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + // checkAuth(ctx context.Context, uid uuid.UUID, auth string) (ok, found bool, err error) + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + ChainHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + //Header: http.Header{"test": []string{"1", "2", "3"}}, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("chained"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + //assert.Equal(t, []string{"1", "2", "3"}, w.Header().Get("test")) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "chained") + }, + }, + { + name: "chain hash", + request: func() *http.Request { + payload := []byte("gMmDwvphdRsveEKjozk0/L7RxDqiXKO2OVwS9VPi8F4=") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/hash", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", "text/plain") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + ChainHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("chained hash"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "chained hash") + }, + }, + { + name: "chain offline", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/offline", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + ChainHash, + true, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("chained offline"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "chained offline") + }, + }, + { + name: "chain offline hash", + request: func() *http.Request { + payload := []byte("gMmDwvphdRsveEKjozk0/L7RxDqiXKO2OVwS9VPi8F4=") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/offline/hash", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", "text/plain") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + ChainHash, + true, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("chained offline hash"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "chained offline hash") + }, + }, + { + name: "anchor", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/anchor", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + AnchorHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("anchored"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "anchored") + }, + }, + { + name: "anchor hash", + request: func() *http.Request { + payload := []byte("gMmDwvphdRsveEKjozk0/L7RxDqiXKO2OVwS9VPi8F4=") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/anchor/hash", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", "text/plain") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + AnchorHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("anchored hash"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "anchored hash") + }, + }, + { + name: "anchor offline", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/anchor/offline", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + AnchorHash, + true, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("anchored offline"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "anchored offline") + }, + }, + { + name: "anchor offline hash", + request: func() *http.Request { + payload := []byte("gMmDwvphdRsveEKjozk0/L7RxDqiXKO2OVwS9VPi8F4=") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/anchor/offline/hash", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", "text/plain") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + AnchorHash, + true, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("anchored offline hash"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "anchored offline hash") + }, + }, + { + name: "disable", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/disable", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + DisableHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("disabled"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "disabled") + }, + }, + { + name: "disable hash", + request: func() *http.Request { + payload := []byte("gMmDwvphdRsveEKjozk0/L7RxDqiXKO2OVwS9VPi8F4=") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/disable/hash", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", "text/plain") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + DisableHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("disabled hash"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "disabled hash") + }, + }, + { + name: "enable", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/enable", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + EnableHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("enabled"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "enabled") + }, + }, + { + name: "enable hash", + request: func() *http.Request { + payload := []byte("gMmDwvphdRsveEKjozk0/L7RxDqiXKO2OVwS9VPi8F4=") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/enable/hash", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", "text/plain") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + EnableHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("enabled hash"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "enabled hash") + }, + }, + { + name: "delete", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/delete", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + DeleteHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("deleted"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "deleted") + }, + }, + { + name: "delete hash", + request: func() *http.Request { + payload := []byte("gMmDwvphdRsveEKjozk0/L7RxDqiXKO2OVwS9VPi8F4=") + request := httptest.NewRequest(http.MethodPost, "/5133fbdd-978d-4f95-9af9-41abdef2f2b4/delete/hash", bytes.NewReader(payload)) + request.Header.Add(XAuthHeader, testAuth) + request.Header.Add("Content-Type", "text/plain") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("checkAuth", mock.AnythingOfType("*context.timerCtx"), uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), testAuth).Return(true, true, nil) + m.On("sign", + mock.AnythingOfType("*context.timerCtx"), + uuid.MustParse("5133fbdd-978d-4f95-9af9-41abdef2f2b4"), + testAuth, + Sha256Sum{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + DeleteHash, + false, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("deleted hash"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "deleted hash") + }, + }, + { + name: "verify", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/verify", bytes.NewReader(payload)) + request.Header.Add("Content-Type", JSONType) + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("verify", + mock.AnythingOfType("*context.timerCtx"), + []byte{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("verified"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "verified") + }, + }, + { + name: "verify hash", + request: func() *http.Request { + payload := []byte("gMmDwvphdRsveEKjozk0/L7RxDqiXKO2OVwS9VPi8F4=") + request := httptest.NewRequest(http.MethodPost, "/verify/hash", bytes.NewReader(payload)) + request.Header.Add("Content-Type", "text/plain") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("verify", + mock.AnythingOfType("*context.timerCtx"), + []byte{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("verified hash"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "verified hash") + }, + }, + { + name: "verify offline", + request: func() *http.Request { + payload := []byte("{\"c\": \"d\", \"a\": \"b\", \"e\": \"f\"}") + request := httptest.NewRequest(http.MethodPost, "/verify/offline", bytes.NewReader(payload)) + request.Header.Add("Content-Type", JSONType) + request.Header.Add(XUPPHeader, "lSLEEO+ddLjS0Ujco3KcgNmjOnMAxEBoSogWBWsuKckx353EPQNVuZfbpr/pzcThZgawbVxuD0ljiKjtkHr2eo00rrndBhMTdz2kezf9e0OvxwGzh9K2xEBTra3qLFPlhEvG1Uj2yHZoyDsn4zlLRqtYkq54/NhizZpzobb+4NJds1Kxw++4BASfsXuGJsr8RrRhokICg+Um") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("verifyOffline", + []byte{0x95, 0x22, 0xc4, 0x10, 0xef, 0x9d, 0x74, 0xb8, 0xd2, 0xd1, 0x48, 0xdc, 0xa3, 0x72, 0x9c, 0x80, 0xd9, 0xa3, 0x3a, 0x73, 0x0, 0xc4, 0x40, 0x68, 0x4a, 0x88, 0x16, 0x5, 0x6b, 0x2e, 0x29, 0xc9, 0x31, 0xdf, 0x9d, 0xc4, 0x3d, 0x3, 0x55, 0xb9, 0x97, 0xdb, 0xa6, 0xbf, 0xe9, 0xcd, 0xc4, 0xe1, 0x66, 0x6, 0xb0, 0x6d, 0x5c, 0x6e, 0xf, 0x49, 0x63, 0x88, 0xa8, 0xed, 0x90, 0x7a, 0xf6, 0x7a, 0x8d, 0x34, 0xae, 0xb9, 0xdd, 0x6, 0x13, 0x13, 0x77, 0x3d, 0xa4, 0x7b, 0x37, 0xfd, 0x7b, 0x43, 0xaf, 0xc7, 0x1, 0xb3, 0x87, 0xd2, 0xb6, 0xc4, 0x40, 0x53, 0xad, 0xad, 0xea, 0x2c, 0x53, 0xe5, 0x84, 0x4b, 0xc6, 0xd5, 0x48, 0xf6, 0xc8, 0x76, 0x68, 0xc8, 0x3b, 0x27, 0xe3, 0x39, 0x4b, 0x46, 0xab, 0x58, 0x92, 0xae, 0x78, 0xfc, 0xd8, 0x62, 0xcd, 0x9a, 0x73, 0xa1, 0xb6, 0xfe, 0xe0, 0xd2, 0x5d, 0xb3, 0x52, 0xb1, 0xc3, 0xef, 0xb8, 0x4, 0x4, 0x9f, 0xb1, 0x7b, 0x86, 0x26, 0xca, 0xfc, 0x46, 0xb4, 0x61, 0xa2, 0x42, 0x2, 0x83, 0xe5, 0x26}, + []byte{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("verified offline"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "verified offline") + }, + }, + { + name: "verify offline hash", + request: func() *http.Request { + payload := []byte("gMmDwvphdRsveEKjozk0/L7RxDqiXKO2OVwS9VPi8F4=") + request := httptest.NewRequest(http.MethodPost, "/verify/offline/hash", bytes.NewReader(payload)) + request.Header.Add("Content-Type", "text/plain") + request.Header.Add(XUPPHeader, "lSLEEO+ddLjS0Ujco3KcgNmjOnMAxEBoSogWBWsuKckx353EPQNVuZfbpr/pzcThZgawbVxuD0ljiKjtkHr2eo00rrndBhMTdz2kezf9e0OvxwGzh9K2xEBTra3qLFPlhEvG1Uj2yHZoyDsn4zlLRqtYkq54/NhizZpzobb+4NJds1Kxw++4BASfsXuGJsr8RrRhokICg+Um") + return request + }(), + setMockBehavior: func(m *mock.Mock) { + m.On("verifyOffline", + []byte{0x95, 0x22, 0xc4, 0x10, 0xef, 0x9d, 0x74, 0xb8, 0xd2, 0xd1, 0x48, 0xdc, 0xa3, 0x72, 0x9c, 0x80, 0xd9, 0xa3, 0x3a, 0x73, 0x0, 0xc4, 0x40, 0x68, 0x4a, 0x88, 0x16, 0x5, 0x6b, 0x2e, 0x29, 0xc9, 0x31, 0xdf, 0x9d, 0xc4, 0x3d, 0x3, 0x55, 0xb9, 0x97, 0xdb, 0xa6, 0xbf, 0xe9, 0xcd, 0xc4, 0xe1, 0x66, 0x6, 0xb0, 0x6d, 0x5c, 0x6e, 0xf, 0x49, 0x63, 0x88, 0xa8, 0xed, 0x90, 0x7a, 0xf6, 0x7a, 0x8d, 0x34, 0xae, 0xb9, 0xdd, 0x6, 0x13, 0x13, 0x77, 0x3d, 0xa4, 0x7b, 0x37, 0xfd, 0x7b, 0x43, 0xaf, 0xc7, 0x1, 0xb3, 0x87, 0xd2, 0xb6, 0xc4, 0x40, 0x53, 0xad, 0xad, 0xea, 0x2c, 0x53, 0xe5, 0x84, 0x4b, 0xc6, 0xd5, 0x48, 0xf6, 0xc8, 0x76, 0x68, 0xc8, 0x3b, 0x27, 0xe3, 0x39, 0x4b, 0x46, 0xab, 0x58, 0x92, 0xae, 0x78, 0xfc, 0xd8, 0x62, 0xcd, 0x9a, 0x73, 0xa1, 0xb6, 0xfe, 0xe0, 0xd2, 0x5d, 0xb3, 0x52, 0xb1, 0xc3, 0xef, 0xb8, 0x4, 0x4, 0x9f, 0xb1, 0x7b, 0x86, 0x26, 0xca, 0xfc, 0x46, 0xb4, 0x61, 0xa2, 0x42, 0x2, 0x83, 0xe5, 0x26}, + []byte{0x80, 0xc9, 0x83, 0xc2, 0xfa, 0x61, 0x75, 0x1b, 0x2f, 0x78, 0x42, 0xa3, 0xa3, 0x39, 0x34, 0xfc, 0xbe, 0xd1, 0xc4, 0x3a, 0xa2, 0x5c, 0xa3, 0xb6, 0x39, 0x5c, 0x12, 0xf5, 0x53, 0xe2, 0xf0, 0x5e}, + ).Return(HTTPResponse{ + StatusCode: http.StatusOK, + Header: http.Header{testHeader: []string{testHeaderValue}}, + Content: []byte("verified offline hash"), + }) + }, + tcChecks: func(t *testing.T, w *httptest.ResponseRecorder, m *mock.Mock) { + m.AssertExpectations(t) + assert.Equal(t, http.StatusOK, w.Code) + assert.Equal(t, testHeaderValue, w.Header().Get(testHeader)) + assert.Contains(t, w.Body.String(), "verified offline hash") + }, + }, + } + for _, c := range testCases { + t.Run(c.name, func(t *testing.T) { + m := &mock.Mock{} + m.Test(t) + c.setMockBehavior(m) + + conf := &config.Config{ + StaticAuth: testAuth, + EnableRegistrationEndpoint: c.enableRegistration, + EnableCSRCreationEndpoint: c.enableCSRCreation, + EnableDeactivationEndpoint: c.enableDeactivation, + } + + httpServer := InitHTTPServer(conf, + initialize(m), getCSR(m), + checkAuth(m), sign(m), + verify(m), verifyOffline(m), + deactivate(m), reactivate(m), + serverID, readinessChecks) + + w := httptest.NewRecorder() + + httpServer.Router.ServeHTTP(w, c.request) + + c.tcChecks(t, w, m) + }) + } +} + const ( testInputStr string = "{\n \"id\": \"ba70ad8b-a564-4e58-9a3b-224ac0f0153f\",\n \"ts\": 1613733623,\n \"big\": 5102163654257655,\n \"tpl\": [\n 801874468,\n \" dm.db.Stats().Idle { + // t.Errorf("%d open connections, %d idle", dm.db.Stats().OpenConnections, dm.db.Stats().Idle) + //} +} + +func TestDatabaseManager_Retry_sqlite(t *testing.T) { + dm, err := initSQLiteDB(t, 0) + require.NoError(t, err) + defer cleanUpDB(t, dm) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + tx, err := dm.StartTransaction(ctx) + require.NoError(t, err) + require.NotNil(t, tx) + + tx2, err := dm.StartTransaction(ctx) + require.Error(t, err) + require.Nil(t, tx2) + + liteErr, ok := err.(*sqlite.Error) + require.True(t, ok) + require.Equal(t, 5, liteErr.Code()) +} + +func TestDatabaseManager_StoreExternalIdentity_sqlite(t *testing.T) { + dm, err := initSQLiteDB(t, 0) + require.NoError(t, err) + defer cleanUpDB(t, dm) + + testExtId := ent.ExternalIdentity{ + Uid: uuid.New(), + PublicKey: make([]byte, 64), + } + rand.Read(testExtId.PublicKey) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + _, err = dm.LoadExternalIdentity(ctx, testExtId.Uid) + assert.Equal(t, ErrNotExist, err) + + err = dm.StoreExternalIdentity(ctx, testExtId) + require.NoError(t, err) + + err = dm.StoreExternalIdentity(ctx, testExtId) + assert.Error(t, err) + + storedExtId, err := dm.LoadExternalIdentity(ctx, testExtId.Uid) + require.NoError(t, err) + assert.Equal(t, storedExtId.Uid, testExtId.Uid) + assert.Equal(t, storedExtId.PublicKey, testExtId.PublicKey) + + cancel() + + err = dm.StoreExternalIdentity(ctx, testExtId) + assert.EqualError(t, err, "context canceled") + + _, err = dm.LoadExternalIdentity(ctx, testExtId.Uid) + assert.EqualError(t, err, "context canceled") +} + +func TestDatabaseManager_GetIdentityUUIDs_sqlite(t *testing.T) { + dm, err := initSQLiteDB(t, 0) + require.NoError(t, err) + defer cleanUpDB(t, dm) + + // generate and store identities for testing + var testUUIDs []uuid.UUID + for i := 0; i < 10; i++ { + testId := getTestIdentity() + testId.Uid = uuid.New() + + err = storeIdentity(dm, testId) + require.NoError(t, err) + + testUUIDs = append(testUUIDs, testId.Uid) + } + + ids, err := dm.GetIdentityUUIDs() + require.NoError(t, err) + + assert.Equal(t, len(ids), len(testUUIDs)) + + for _, id := range testUUIDs { + assert.Contains(t, ids, id) + } +} + +func TestDatabaseManager_GetExternalIdentityUUIDs_sqlite(t *testing.T) { + dm, err := initSQLiteDB(t, 0) + require.NoError(t, err) + defer cleanUpDB(t, dm) + + // generate and store external identities for testing + var testExtUUIDs []uuid.UUID + for i := 0; i < 10; i++ { + testExtId := ent.ExternalIdentity{ + Uid: uuid.New(), + PublicKey: make([]byte, 64), + } + + err = dm.StoreExternalIdentity(context.TODO(), testExtId) + require.NoError(t, err) + + testExtUUIDs = append(testExtUUIDs, testExtId.Uid) + } + + ids, err := dm.GetExternalIdentityUUIDs() + require.NoError(t, err) + + assert.Equal(t, len(ids), len(testExtUUIDs)) + + for _, id := range testExtUUIDs { + assert.Contains(t, ids, id) + } +} + +func initSQLiteDB(t *testing.T, maxConns int) (*DatabaseManager, error) { + return NewDatabaseManager(SQLite, filepath.Join(t.TempDir(), testSQLiteDSN), maxConns) +} diff --git a/main/adapters/repository/database_test.go b/main/adapters/repository/database_test.go index 5ae9d68e..b9288f0d 100644 --- a/main/adapters/repository/database_test.go +++ b/main/adapters/repository/database_test.go @@ -20,19 +20,16 @@ import ( "github.com/stretchr/testify/require" "github.com/ubirch/ubirch-client-go/main/config" "github.com/ubirch/ubirch-client-go/main/ent" - "github.com/ubirch/ubirch-protocol-go/ubirch/v2" ) -const ( - testLoad = 100 -) +const testLoad = 100 func TestDatabaseManager(t *testing.T) { - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -46,6 +43,7 @@ func TestDatabaseManager(t *testing.T) { tx, err := dm.StartTransaction(ctx) require.NoError(t, err) + require.NotNil(t, tx) _, err = dm.LoadActiveFlagForUpdate(tx, testIdentity.Uid) assert.Equal(t, ErrNotExist, err) @@ -62,6 +60,7 @@ func TestDatabaseManager(t *testing.T) { // store identity tx, err = dm.StartTransaction(ctx) require.NoError(t, err) + require.NotNil(t, tx) err = dm.StoreIdentity(tx, testIdentity) require.NoError(t, err) @@ -101,11 +100,11 @@ func TestDatabaseManager(t *testing.T) { } func TestDatabaseManager_StoreActiveFlag(t *testing.T) { - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -141,11 +140,11 @@ func TestDatabaseManager_StoreActiveFlag(t *testing.T) { } func TestDatabaseManager_SetSignature(t *testing.T) { - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -188,11 +187,11 @@ func TestDatabaseManager_SetSignature(t *testing.T) { } func TestDatabaseManager_LoadSignatureForUpdate(t *testing.T) { - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -200,6 +199,7 @@ func TestDatabaseManager_LoadSignatureForUpdate(t *testing.T) { // store identity tx, err := dm.StartTransaction(ctx) require.NoError(t, err) + require.NotNil(t, tx) err = dm.StoreIdentity(tx, testIdentity) require.NoError(t, err) @@ -210,6 +210,7 @@ func TestDatabaseManager_LoadSignatureForUpdate(t *testing.T) { // get lock on signature tx, err = dm.StartTransaction(ctx) require.NoError(t, err) + require.NotNil(t, tx) _, err = dm.LoadSignatureForUpdate(tx, testIdentity.Uid) require.NoError(t, err) @@ -226,11 +227,11 @@ func TestDatabaseManager_LoadSignatureForUpdate(t *testing.T) { } func TestDatabaseManager_StoreAuth(t *testing.T) { - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -273,7 +274,7 @@ func TestDatabaseManager_StoreAuth(t *testing.T) { } func TestNewSqlDatabaseInfo_Ready(t *testing.T) { - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) @@ -286,7 +287,7 @@ func TestNewSqlDatabaseInfo_NotReady(t *testing.T) { unreachableDSN := "postgres://nousr:nopwd@localhost:0000/nodatabase" // we expect no error here - dm, err := NewSqlDatabaseInfo(unreachableDSN, 0) + dm, err := NewDatabaseManager(PostgreSQL, unreachableDSN, 0) require.NoError(t, err) defer func(dm *DatabaseManager) { err := dm.Close() @@ -300,11 +301,11 @@ func TestNewSqlDatabaseInfo_NotReady(t *testing.T) { } func TestStoreExisting(t *testing.T) { - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() // store identity ctx, cancel := context.WithCancel(context.Background()) @@ -330,11 +331,11 @@ func TestStoreExisting(t *testing.T) { } func TestDatabaseManager_CancelTransaction(t *testing.T) { - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() // store identity, but cancel context, so transaction will be rolled back ctx, cancel := context.WithCancel(context.Background()) @@ -349,22 +350,30 @@ func TestDatabaseManager_CancelTransaction(t *testing.T) { cancel() - // check not exists + // check transaction was rolled back _, err = dm.LoadIdentity(testIdentity.Uid) assert.Equal(t, ErrNotExist, err) -} -func TestDatabaseManager_StartTransaction(t *testing.T) { - ctx, cancel := context.WithTimeout(context.Background(), time.Second) + // make sure identity can be stored now + ctx, cancel = context.WithCancel(context.Background()) defer cancel() - c, err := getConfig() + tx, err = dm.StartTransaction(ctx) require.NoError(t, err) + require.NotNil(t, tx) - dm, err := NewSqlDatabaseInfo(c.PostgresDSN, 1) + err = dm.StoreIdentity(tx, testIdentity) + require.NoError(t, err) +} + +func TestDatabaseManager_StartTransaction(t *testing.T) { + dm, err := initDB(1) require.NoError(t, err) defer cleanUpDB(t, dm) + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + tx, err := dm.StartTransaction(ctx) require.NoError(t, err) assert.NotNil(t, tx) @@ -375,7 +384,7 @@ func TestDatabaseManager_StartTransaction(t *testing.T) { } func TestDatabaseManager_InvalidTransactionCtx(t *testing.T) { - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) @@ -407,21 +416,25 @@ func TestDatabaseManager_InvalidTransactionCtx(t *testing.T) { func TestDatabaseLoad(t *testing.T) { wg := &sync.WaitGroup{} - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) // generate identities var testIdentities []ent.Identity for i := 0; i < testLoad; i++ { - testIdentities = append(testIdentities, generateRandomIdentity()) + id := getTestIdentity() + id.Uid = uuid.New() + testIdentities = append(testIdentities, id) } // store identities for i, testId := range testIdentities { wg.Add(1) go func(idx int, id ent.Identity) { - err := storeIdentity(dm, id, wg) + defer wg.Done() + + err := storeIdentity(dm, id) if err != nil { t.Errorf("%s: identity could not be stored: %v", id.Uid, err) } @@ -451,12 +464,13 @@ func TestDatabaseManager_RecoverUndefinedTable(t *testing.T) { c, err := getConfig() require.NoError(t, err) - pg, err := sql.Open(PostgreSql, c.PostgresDSN) + pg, err := sql.Open(PostgreSQL, c.DbDSN) require.NoError(t, err) dm := &DatabaseManager{ - options: &sql.TxOptions{}, - db: pg, + options: &sql.TxOptions{}, + db: pg, + driverName: PostgreSQL, } _, err = dm.LoadIdentity(uuid.New()) @@ -467,7 +481,7 @@ func TestDatabaseManager_Retry(t *testing.T) { c, err := getConfig() require.NoError(t, err) - dm, err := NewSqlDatabaseInfo(c.PostgresDSN, 101) + dm, err := NewDatabaseManager(PostgreSQL, c.DbDSN, 101) require.NoError(t, err) defer cleanUpDB(t, dm) @@ -495,6 +509,99 @@ func TestDatabaseManager_Retry(t *testing.T) { wg.Wait() } +func TestDatabaseManager_StoreExternalIdentity(t *testing.T) { + dm, err := initDB(0) + require.NoError(t, err) + defer cleanUpDB(t, dm) + + testExtId := ent.ExternalIdentity{ + Uid: uuid.New(), + PublicKey: make([]byte, 64), + } + rand.Read(testExtId.PublicKey) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + _, err = dm.LoadExternalIdentity(ctx, testExtId.Uid) + assert.Equal(t, ErrNotExist, err) + + err = dm.StoreExternalIdentity(ctx, testExtId) + require.NoError(t, err) + + err = dm.StoreExternalIdentity(ctx, testExtId) + assert.Error(t, err) + + storedExtId, err := dm.LoadExternalIdentity(ctx, testExtId.Uid) + require.NoError(t, err) + assert.Equal(t, storedExtId.Uid, testExtId.Uid) + assert.Equal(t, storedExtId.PublicKey, testExtId.PublicKey) + + cancel() + + err = dm.StoreExternalIdentity(ctx, testExtId) + assert.EqualError(t, err, "context canceled") + + _, err = dm.LoadExternalIdentity(ctx, testExtId.Uid) + assert.EqualError(t, err, "context canceled") +} + +func TestDatabaseManager_GetIdentityUUIDs(t *testing.T) { + dm, err := initDB(0) + require.NoError(t, err) + defer cleanUpDB(t, dm) + + // generate and store identities for testing + var testUUIDs []uuid.UUID + for i := 0; i < 10; i++ { + testId := getTestIdentity() + testId.Uid = uuid.New() + + err = storeIdentity(dm, testId) + require.NoError(t, err) + + testUUIDs = append(testUUIDs, testId.Uid) + } + + ids, err := dm.GetIdentityUUIDs() + require.NoError(t, err) + + assert.Equal(t, len(ids), len(testUUIDs)) + + for _, id := range testUUIDs { + assert.Contains(t, ids, id) + } +} + +func TestDatabaseManager_GetExternalIdentityUUIDs(t *testing.T) { + dm, err := initDB(0) + require.NoError(t, err) + defer cleanUpDB(t, dm) + + // generate and store external identities for testing + var testExtUUIDs []uuid.UUID + for i := 0; i < 10; i++ { + testExtId := ent.ExternalIdentity{ + Uid: uuid.New(), + PublicKey: make([]byte, 64), + } + + err = dm.StoreExternalIdentity(context.TODO(), testExtId) + require.NoError(t, err) + + testExtUUIDs = append(testExtUUIDs, testExtId.Uid) + } + + ids, err := dm.GetExternalIdentityUUIDs() + require.NoError(t, err) + + assert.Equal(t, len(ids), len(testExtUUIDs)) + + for _, id := range testExtUUIDs { + assert.Contains(t, ids, id) + } +} + func getConfig() (*config.Config, error) { configFileName := "config_test.json" fileHandle, err := os.Open(filepath.Join("../../", configFileName)) @@ -504,7 +611,7 @@ func getConfig() (*config.Config, error) { "Please provide a configuration file \"%s\" in the main directory which contains\n"+ "a DSN for a postgres database in order to test the database context management.\n\n"+ "!!! THIS MUST BE DIFFERENT FROM THE DSN USED FOR THE ACTUAL CONTEXT !!!\n\n"+ - "{\n\t\"postgresDSN\": \"postgres://:@:5432/\"\n}\n"+ + "{\n\t\"dbDSN\": \"postgres://:@:5432/\"\n}\n"+ "--------------------------------------------------------------------------------", err, configFileName) } @@ -519,72 +626,42 @@ func getConfig() (*config.Config, error) { return nil, err } + if len(c.DbDSN) == 0 { + return nil, fmt.Errorf("missing DSN for test postgres database ('dbDSN') in configuration %s", configFileName) + } + return c, nil } -func initDB() (*DatabaseManager, error) { +func initDB(maxConns int) (*DatabaseManager, error) { c, err := getConfig() if err != nil { return nil, err } - dm, err := NewSqlDatabaseInfo(c.PostgresDSN, c.DbMaxConns) - if err != nil { - return nil, err - } - - return dm, nil + return NewDatabaseManager(PostgreSQL, c.DbDSN, maxConns) } -func cleanUpDB(t *testing.T, dm *DatabaseManager) { - dropTableQuery := fmt.Sprintf("DROP TABLE %s;", PostgresIdentityTableName) - _, err := dm.db.Exec(dropTableQuery) +func cleanUpDB(t assert.TestingT, dm *DatabaseManager) { + dropTableQuery := "DROP TABLE identity;" + err := dm.retry(func() error { + _, err := dm.db.Exec(dropTableQuery) + return err + }) assert.NoError(t, err) - err = dm.Close() + dropTableQuery = "DROP TABLE external_identity;" + err = dm.retry(func() error { + _, err := dm.db.Exec(dropTableQuery) + return err + }) assert.NoError(t, err) -} - -func generateRandomIdentity() ent.Identity { - uid := uuid.New() - - keystore := &MockKeystorer{} - - c := ubirch.ECDSACryptoContext{Keystore: keystore} - - err := c.GenerateKey(uid) - if err != nil { - panic(err) - } - - priv, err := keystore.GetPrivateKey(uid) - if err != nil { - panic(err) - } - - pub, err := keystore.GetPublicKey(uid) - if err != nil { - panic(err) - } - - sig := make([]byte, 64) - rand.Read(sig) - auth := make([]byte, 16) - rand.Read(auth) - - return ent.Identity{ - Uid: uuid.New(), - PrivateKey: priv, - PublicKey: pub, - Signature: sig, - AuthToken: base64.StdEncoding.EncodeToString(auth), - } + err = dm.Close() + assert.NoError(t, err) } -func storeIdentity(ctxManager ContextManager, id ent.Identity, wg *sync.WaitGroup) error { - defer wg.Done() - +func storeIdentity(ctxManager ContextManager, id ent.Identity) error { ctx, cancel := context.WithCancel(context.Background()) defer cancel() diff --git a/main/adapters/repository/file_manager.go b/main/adapters/repository/legacy_file_manager.go similarity index 59% rename from main/adapters/repository/file_manager.go rename to main/adapters/repository/legacy_file_manager.go index e5bf7fd1..3910bcb9 100644 --- a/main/adapters/repository/file_manager.go +++ b/main/adapters/repository/legacy_file_manager.go @@ -3,11 +3,9 @@ package repository import ( "encoding/json" "fmt" - "io/ioutil" "os" "path/filepath" "strings" - "sync" "github.com/google/uuid" "github.com/ubirch/ubirch-protocol-go/ubirch/v2" @@ -18,7 +16,6 @@ import ( const ( keyFileName = "keys.json" signatureDirName = "signatures" - authTokenDirName = "tokens" filePerm = 0644 dirPerm = 0755 @@ -26,35 +23,22 @@ const ( ) type FileManager struct { - keyFile string - signatureDir string - authTokenDir string + KeyFile string + SignatureDir string EncryptedKeystore *ubirch.EncryptedKeystore - keystoreMutex *sync.RWMutex } -// TODO // Ensure FileManager implements the ContextManager interface -//var _ ContextManager = (*FileManager)(nil) - func NewFileManager(configDir string, secret []byte) (*FileManager, error) { f := &FileManager{ - keyFile: filepath.Join(configDir, keyFileName), - signatureDir: filepath.Join(configDir, signatureDirName), - authTokenDir: filepath.Join(configDir, authTokenDirName), + KeyFile: filepath.Join(configDir, keyFileName), + SignatureDir: filepath.Join(configDir, signatureDirName), EncryptedKeystore: ubirch.NewEncryptedKeystore(secret), - keystoreMutex: &sync.RWMutex{}, - } - - err := initDirectories([]string{f.signatureDir, f.authTokenDir}) - if err != nil { - return nil, err } - log.Debugf(" - keystore file: %s", f.keyFile) - log.Debugf(" - signature dir: %s", f.signatureDir) - log.Debugf(" - token dir: %s", f.authTokenDir) + log.Debugf(" - keystore file: %s", f.KeyFile) + log.Debugf(" - signature dir: %s", f.SignatureDir) - err = f.portLegacyProtocolCtxFile(configDir) + err := f.portLegacyProtocolCtxFile(configDir) if err != nil { return nil, err } @@ -73,113 +57,44 @@ func NewFileManager(configDir string, secret []byte) (*FileManager, error) { if err != nil { return nil, err } - log.Debugf("loaded %d existing keys from local file system", len(ids)) + log.Infof("loaded %d keys from file system", len(ids)) return f, nil } -func (f *FileManager) Exists(uid uuid.UUID) (bool, error) { - f.keystoreMutex.RLock() - defer f.keystoreMutex.RUnlock() - - _, err := f.EncryptedKeystore.GetPrivateKey(uid) - if err != nil { - return false, nil - } - return true, nil -} - func (f *FileManager) GetPrivateKey(uid uuid.UUID) ([]byte, error) { - f.keystoreMutex.RLock() - defer f.keystoreMutex.RUnlock() - return f.EncryptedKeystore.GetPrivateKey(uid) } -func (f *FileManager) SetPrivateKey(uid uuid.UUID, key []byte) error { - f.keystoreMutex.Lock() - defer f.keystoreMutex.Unlock() - - return f.EncryptedKeystore.SetPrivateKey(uid, key) -} - func (f *FileManager) GetPublicKey(uid uuid.UUID) ([]byte, error) { - f.keystoreMutex.RLock() - defer f.keystoreMutex.RUnlock() - return f.EncryptedKeystore.GetPublicKey(uid) } -func (f *FileManager) SetPublicKey(uid uuid.UUID, key []byte) error { - f.keystoreMutex.Lock() - defer f.keystoreMutex.Unlock() - - return f.EncryptedKeystore.SetPublicKey(uid, key) -} - func (f *FileManager) GetSignature(uid uuid.UUID) ([]byte, error) { - return ioutil.ReadFile(f.signatureFile(uid)) + return os.ReadFile(f.signatureFile(uid)) } func (f *FileManager) SetSignature(uid uuid.UUID, signature []byte) error { - return ioutil.WriteFile(f.signatureFile(uid), signature, filePerm) -} - -func (f *FileManager) GetAuthToken(uid uuid.UUID) (string, error) { - tokenBytes, err := ioutil.ReadFile(f.authTokenFile(uid)) - if err != nil { - return "", err - } - - return string(tokenBytes), nil -} - -func (f *FileManager) SetAuthToken(uid uuid.UUID, authToken string) error { - return ioutil.WriteFile(f.authTokenFile(uid), []byte(authToken), filePerm) -} - -func (f *FileManager) Close() error { - return nil + return os.WriteFile(f.signatureFile(uid), signature, filePerm) } func (f *FileManager) signatureFile(uid uuid.UUID) string { signatureFileName := uid.String() + ".bin" - return filepath.Join(f.signatureDir, signatureFileName) -} - -func (f *FileManager) authTokenFile(uid uuid.UUID) string { - authTokenFileName := uid.String() + ".bin" - return filepath.Join(f.authTokenDir, authTokenFileName) + return filepath.Join(f.SignatureDir, signatureFileName) } func (f *FileManager) loadKeys() error { - return loadFile(f.keyFile, f.EncryptedKeystore.Keystore) -} - -func (f *FileManager) persistKeys() error { - return persistFile(f.keyFile, f.EncryptedKeystore.Keystore) -} - -func initDirectories(directories []string) error { - for _, dir := range directories { - if _, err := os.Stat(dir); os.IsNotExist(err) { - err = os.Mkdir(dir, dirPerm) - if err != nil { - return err - } - } - } - return nil + return loadFile(f.KeyFile, f.EncryptedKeystore.Keystore) } func loadFile(file string, dest interface{}) error { if _, err := os.Stat(file); os.IsNotExist(err) { // if file does not exist yet, return right away return nil } - contextBytes, err := ioutil.ReadFile(filepath.Clean(file)) + contextBytes, err := os.ReadFile(filepath.Clean(file)) if err != nil { file = file + ".bck" - contextBytes, err = ioutil.ReadFile(filepath.Clean(file)) + contextBytes, err = os.ReadFile(filepath.Clean(file)) if err != nil { return err } @@ -203,7 +118,7 @@ func persistFile(file string, source interface{}) error { } } contextBytes, _ := json.MarshalIndent(source, "", " ") - return ioutil.WriteFile(file, contextBytes, filePerm) + return os.WriteFile(file, contextBytes, filePerm) } // this is here only for the purpose of backwards compatibility TODO: DEPRECATE @@ -235,7 +150,7 @@ func (f *FileManager) portLegacyProtocolCtxFile(configDir string) error { } // persist loaded keys to new key storage - err = persistFile(f.keyFile, p.Crypto.Keystore) + err = persistFile(f.KeyFile, p.Crypto.Keystore) if err != nil { return fmt.Errorf("unable to persist keys: %v", err) } @@ -252,7 +167,7 @@ func (f *FileManager) portLegacyProtocolCtxFile(configDir string) error { log.Warnf("unable to delete legacy protocol context file: %v", err) } err = os.Remove(contextFileLegacy + ".bck") - if err != nil { + if err != nil && !os.IsNotExist(err) { log.Warnf("unable to delete legacy protocol context backup file: %v", err) } @@ -260,6 +175,13 @@ func (f *FileManager) portLegacyProtocolCtxFile(configDir string) error { } func (f *FileManager) persistSignatures(signatures map[uuid.UUID][]byte) error { + if _, err := os.Stat(f.SignatureDir); os.IsNotExist(err) { + err = os.Mkdir(f.SignatureDir, dirPerm) + if err != nil { + return err + } + } + for uid, signature := range signatures { if len(signature) != 64 { @@ -278,7 +200,7 @@ func (f *FileManager) portLegacyKeystoreFile() error { legacyKeystoreFile := &legacyCryptoCtx{Keystore: map[string]string{}} // read legacy protocol context from persistent storage - err := loadFile(f.keyFile, legacyKeystoreFile) + err := loadFile(f.KeyFile, legacyKeystoreFile) if err != nil { return fmt.Errorf("unable to load legacy protocol context: %v", err) } @@ -288,7 +210,7 @@ func (f *FileManager) portLegacyKeystoreFile() error { } // persist loaded keys to new key storage - err = persistFile(f.keyFile, legacyKeystoreFile.Keystore) + err = persistFile(f.KeyFile, legacyKeystoreFile.Keystore) if err != nil { return fmt.Errorf("unable to persist keys: %v", err) } diff --git a/main/adapters/repository/migrator.go b/main/adapters/repository/migrator.go index e3c4f28d..8507875d 100644 --- a/main/adapters/repository/migrator.go +++ b/main/adapters/repository/migrator.go @@ -2,26 +2,34 @@ package repository import ( "context" - "database/sql" "encoding/base64" "fmt" "os" - "strings" "time" - "github.com/google/uuid" "github.com/ubirch/ubirch-client-go/main/config" "github.com/ubirch/ubirch-client-go/main/ent" - "golang.org/x/crypto/argon2" log "github.com/sirupsen/logrus" ) func Migrate(c *config.Config, configDir string) error { - dm, err := NewSqlDatabaseInfo(c.PostgresDSN, c.DbMaxConns) + ctxManager, err := GetContextManager(c) if err != nil { return err } + defer func() { + err := ctxManager.Close() + if err != nil { + log.Error(err) + } + }() + + dm, ok := ctxManager.(*DatabaseManager) + if !ok { + return fmt.Errorf("context migration only supported in direction file to database. " + + "Please set a DSN for a postgreSQL or SQLite database in the configuration") + } for i := 0; i < 10; i++ { err = dm.IsReady() @@ -36,70 +44,42 @@ func Migrate(c *config.Config, configDir string) error { return err } - txCtx, cancel := context.WithCancel(context.Background()) - defer cancel() - - migration := &Migration{ - id: MigrationID, - } - - err = migration.getVersion(txCtx, dm) - if err != nil { - return err - } - - if migration.version == MigrationVersionLatest { - log.Infof("database migration version already up to date") - return nil + // fixme: this is here for legacy reasons and hacky + // we used to support db schema migration by tracking migration versions in a table "version" + // if this table exists, we can assume that the database has been migrated + // and return without error + if dm.driverName == PostgreSQL { + _, err = dm.db.Exec("SELECT * FROM version") + if err == nil { + log.Warnf("database schema migration is no longer supported in this version") + return nil + } } - log.Debugf("database migration version: %s / application migration version: %s", migration.version, MigrationVersionLatest) p, err := NewExtendedProtocol(dm, c) if err != nil { return err } - if migration.version == MigrationVersionNoDB { - err = migrateIdentities(c, configDir, p) - if err != nil { - return fmt.Errorf("could not migrate file-based context to database: %v", err) - } - - log.Infof("successfully migrated file-based context to database") + err = migrateIdentities(c, configDir, p) + if err != nil { + return fmt.Errorf("could not migrate file-based context to database: %v", err) } - if migration.version == MigrationVersionInit { - err = hashAuthTokens(dm, p) - if err != nil { - return fmt.Errorf("could not hash auth tokens in database: %v", err) - } - - log.Infof("successfully hashed auth tokens in database") - migration.version = MigrationVersionHashedAuth - } + log.Infof("successfully migrated file-based context to database") - if migration.version == MigrationVersionHashedAuth { - err = addColumnActiveBoolean(dm) - if err != nil { - return fmt.Errorf("could not add column \"active\" to database table: %v", err) - } - - log.Infof("successfully added column \"active\" to database table") - } - - migration.version = MigrationVersionLatest - return migration.updateVersion() + return nil } -func getIdentitiesFromLegacyCtx(c *config.Config, configDir string) ([]ent.Identity, error) { - log.Infof("getting existing identities from file system") +func getIdentitiesFromLegacyCtx(c *config.Config, configDir string, p *ExtendedProtocol) (identities []*ent.Identity, err error) { + log.Infof("loading existing identities from file system") secret16Bytes, err := base64.StdEncoding.DecodeString(c.Secret16Base64) if err != nil { - return nil, fmt.Errorf("unable to decode base64 encoded secret for legacy key store decoding (%s): %v", c.Secret16Base64, err) + return nil, fmt.Errorf("unable to decode secret for legacy key store (%s): %v", c.Secret16Base64, err) } if len(secret16Bytes) != 16 { - return nil, fmt.Errorf("invalid secret for legacy key store decoding: secret length must be 16 bytes (is %d)", len(secret16Bytes)) + return nil, fmt.Errorf("invalid secret for legacy key store: secret length must be 16 bytes (is %d)", len(secret16Bytes)) } fileManager, err := NewFileManager(configDir, secret16Bytes) @@ -112,11 +92,13 @@ func getIdentitiesFromLegacyCtx(c *config.Config, configDir string) ([]ent.Ident return nil, err } - var allIdentities []ent.Identity + if len(uids) == 0 { + return nil, fmt.Errorf("%s not found or empty", fileManager.KeyFile) + } for _, uid := range uids { - i := ent.Identity{ + i := &ent.Identity{ Uid: uid, } @@ -132,36 +114,30 @@ func getIdentitiesFromLegacyCtx(c *config.Config, configDir string) ([]ent.Ident i.Signature, err = fileManager.GetSignature(uid) if err != nil { - if os.IsNotExist(err) { // if file does not exist -> create genesis signature - i.Signature = make([]byte, 64) + if os.IsNotExist(err) { // if file does not exist, create genesis signature + i.Signature = make([]byte, p.SignatureLength()) } else { // file exists but something went wrong return nil, fmt.Errorf("%s: %v", uid, err) } } - i.AuthToken, err = fileManager.GetAuthToken(uid) - if err != nil { - if os.IsNotExist(err) { // if file does not exist -> get auth token from config - i.AuthToken = c.Devices[uid.String()] - } else { // file exists but something went wrong - return nil, fmt.Errorf("%s: %v", uid, err) - } - } + // get auth token from config + i.AuthToken = c.Devices[uid.String()] - allIdentities = append(allIdentities, i) + identities = append(identities, i) } - return allIdentities, nil + return identities, nil } func migrateIdentities(c *config.Config, configDir string, p *ExtendedProtocol) error { // migrate from file based context - identitiesToPort, err := getIdentitiesFromLegacyCtx(c, configDir) + identities, err := getIdentitiesFromLegacyCtx(c, configDir, p) if err != nil { return err } - log.Infof("starting migration from files to DB...") + log.Infof("starting migration from legacy context files to DB...") ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -171,7 +147,7 @@ func migrateIdentities(c *config.Config, configDir string, p *ExtendedProtocol) return err } - for i, id := range identitiesToPort { + for i, id := range identities { log.Infof("%4d: %s", i+1, id.Uid) initialized, err := p.IsInitialized(id.Uid) @@ -184,7 +160,7 @@ func migrateIdentities(c *config.Config, configDir string, p *ExtendedProtocol) continue } - err = p.StoreIdentity(tx, id) + err = p.StoreIdentity(tx, *id) if err != nil { return err } @@ -192,111 +168,3 @@ func migrateIdentities(c *config.Config, configDir string, p *ExtendedProtocol) return tx.Commit() } - -func hashAuthTokens(dm *DatabaseManager, p *ExtendedProtocol) error { - query := fmt.Sprintf("SELECT uid, auth_token FROM %s FOR UPDATE", PostgresIdentityTableName) - - rows, err := dm.db.Query(query) - if err != nil { - return err - } - defer func(rows *sql.Rows) { - err := rows.Close() - if err != nil { - log.Error(err) - } - }(rows) - - var ( - uid uuid.UUID - auth string - ) - - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - - for rows.Next() { - err = rows.Scan(&uid, &auth) - if err != nil { - return err - } - - // make sure that the password is not already hashed before hashing - isHashed, err := isArgon2idPasswordHash(auth) - if err != nil { - return err - } - - if isHashed { - continue - } - - pwHash, err := p.pwHasher.GeneratePasswordHash(ctx, auth) - if err != nil { - return err - } - - err = storeAuth(dm, uid, pwHash) - if err != nil { - return err - } - } - - return rows.Err() -} - -func isArgon2idPasswordHash(pw string) (bool, error) { - vals := strings.Split(pw, "$") - if len(vals) != 6 { - return false, nil - } - - var ( - v int - m, t uint32 - p uint8 - ) - - _, err := fmt.Sscanf(vals[2], "v=%d", &v) - if err != nil { - return false, err - } - - if v != argon2.Version { - return false, fmt.Errorf("unsupported argon2id version: %d", v) - } - - _, err = fmt.Sscanf(vals[3], "m=%d,t=%d,p=%d", &m, &t, &p) - if err != nil { - return false, err - } - - _, err = base64.RawStdEncoding.Strict().DecodeString(vals[4]) - if err != nil { - return false, err - } - - _, err = base64.RawStdEncoding.Strict().DecodeString(vals[5]) - if err != nil { - return false, err - } - - return true, nil -} - -func storeAuth(dm *DatabaseManager, uid uuid.UUID, auth string) error { - query := fmt.Sprintf("UPDATE %s SET auth_token = $1 WHERE uid = $2;", PostgresIdentityTableName) - - _, err := dm.db.Exec(query, &auth, uid) - - return err -} - -func addColumnActiveBoolean(dm *DatabaseManager) error { - query := fmt.Sprintf( - "ALTER TABLE %s ADD active boolean NOT NULL DEFAULT(TRUE)", PostgresIdentityTableName) - - _, err := dm.db.Exec(query) - - return err -} diff --git a/main/adapters/repository/migrator_test.go b/main/adapters/repository/migrator_test.go index 3f584e56..e3a310a7 100644 --- a/main/adapters/repository/migrator_test.go +++ b/main/adapters/repository/migrator_test.go @@ -1,35 +1,72 @@ package repository import ( - "database/sql" + "context" "encoding/base64" - "fmt" + "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/ubirch/ubirch-client-go/main/config" - "io/ioutil" "os" + "path/filepath" "testing" ) const ( testSecret16Base64 = "Z+08XlrEAkTf3Ss7eyMrCg==" testSecret32Base64 = "CXbgnOK9QdAB44UaeMCKQIE33iCX4xCPDzbh+sQplRY=" + + testUUID = "21c033cf-38af-466e-b5da-32f0a3ab6020" ) func TestMigrate(t *testing.T) { - conf := setupMigrationTest(t) - defer cleanUpMigrationTest(t, conf.PostgresDSN) + testCases := []struct { + name string + setDSN func(*config.Config, string) error + }{ + { + name: "postgres migration", + setDSN: func(c *config.Config, _ string) error { + dbConf, err := getConfig() + if err != nil { + return err + } + c.DbDriver = PostgreSQL + c.DbDSN = dbConf.DbDSN + return nil + }, + }, + { + name: "sqlite migration", + setDSN: func(c *config.Config, configDir string) error { + c.DbDriver = SQLite + c.DbDSN = filepath.Join(configDir, testSQLiteDSN) + return nil + }, + }, + } + for _, c := range testCases { + t.Run(c.name, func(t *testing.T) { + tmp := t.TempDir() - err := Migrate(conf, "") - assert.NoError(t, err) + conf := setupMigrationTest(t, tmp) + defer cleanUpMigrationTest(t, conf, tmp) + + err := c.setDSN(conf, tmp) + require.NoError(t, err) + + err = Migrate(conf, tmp) + require.NoError(t, err) + + verifyMigration(t, conf) + }) + } } -func setupMigrationTest(t *testing.T) *config.Config { - err := ioutil.WriteFile(contextFileName_Legacy, []byte(legacyProtocolCtxJson), filePerm) - require.NoError(t, err) +func setupMigrationTest(t *testing.T, configDir string) *config.Config { + legacyCtxFile := filepath.Join(configDir, contextFileName_Legacy) - dbConf, err := getConfig() + err := os.WriteFile(legacyCtxFile, []byte(legacyProtocolCtxJson), filePerm) require.NoError(t, err) secretBytes32, _ := base64.StdEncoding.DecodeString(testSecret32Base64) @@ -38,8 +75,6 @@ func setupMigrationTest(t *testing.T) *config.Config { Devices: devices, Secret16Base64: testSecret16Base64, SecretBytes32: secretBytes32, - PostgresDSN: dbConf.PostgresDSN, - DbMaxConns: dbConf.DbMaxConns, KdMaxTotalMemMiB: 4, KdParamMemMiB: 2, KdParamTime: 1, @@ -47,70 +82,89 @@ func setupMigrationTest(t *testing.T) *config.Config { } } -func cleanUpMigrationTest(t *testing.T, dsn string) { - err := os.Remove(contextFileName_Legacy) - if !os.IsNotExist(err) { - assert.NoError(t, err) - } +func cleanUpMigrationTest(t *testing.T, c *config.Config, configDir string) { + // assert legacy files were cleaned up after migration + _, err := os.Stat(filepath.Join(configDir, contextFileName_Legacy)) + assert.Truef(t, os.IsNotExist(err), "%s has not been cleaned up after migration", contextFileName_Legacy) - err = os.Remove(keyFileName) - if !os.IsNotExist(err) { - assert.NoError(t, err) - } + _, err = os.Stat(filepath.Join(configDir, contextFileName_Legacy+".bck")) + assert.Truef(t, os.IsNotExist(err), "%s has not been cleaned up after migration", contextFileName_Legacy+".bck") - err = os.RemoveAll(signatureDirName) - if !os.IsNotExist(err) { - assert.NoError(t, err) - } + //_, err = os.Stat(filepath.Join(configDir, keyFileName)) + //assert.Truef(t, os.IsNotExist(err), "%s has not been cleaned up after migration", keyFileName) + // + //_, err = os.Stat(filepath.Join(configDir, keyFileName+".bck")) + //assert.Truef(t, os.IsNotExist(err), "%s has not been cleaned up after migration", keyFileName+".bck") + // + //_, err = os.Stat(filepath.Join(configDir, signatureDirName)) + //assert.Truef(t, os.IsNotExist(err), "%s has not been cleaned up after migration", signatureDirName) - err = os.RemoveAll(authTokenDirName) - if !os.IsNotExist(err) { - assert.NoError(t, err) - } + ctxManager, err := GetContextManager(c) + require.NoError(t, err) + + dm, ok := ctxManager.(*DatabaseManager) + require.True(t, ok) + + cleanUpDB(t, dm) +} - db, err := sql.Open(PostgreSql, dsn) +func verifyMigration(t *testing.T, c *config.Config) { + ctxManager, err := GetContextManager(c) require.NoError(t, err) - _, err = db.Exec(fmt.Sprintf("DROP TABLE %s;", PostgresIdentityTableName)) - assert.NoError(t, err) + p, err := NewExtendedProtocol(ctxManager, c) + require.NoError(t, err) - _, err = db.Exec(fmt.Sprintf("DROP TABLE %s;", PostgresVersionTableName)) + i, err := p.LoadIdentity(uuid.MustParse(testUUID)) + require.NoError(t, err) + + assert.Equal(t, + "4m52sCx5uW3XY7oNnZul3DZRRcmhPFmo0HoQeIrw8AVca6CWQKtn3+NkbrdOeWd/LVor7WQXREubUsbo9BgpqQ==", + base64.StdEncoding.EncodeToString(i.Signature)) + + ok, found, err := p.CheckAuth(context.Background(), uuid.MustParse(testUUID), "Pv3cAWvHnde/sxcM7fA02g==") assert.NoError(t, err) + assert.True(t, found) + assert.True(t, ok) + + signature, err := p.Crypto.Sign(uuid.MustParse(testUUID), []byte("message")) + require.NoError(t, err) - err = db.Close() + ok, err = p.Crypto.Verify(uuid.MustParse(testUUID), []byte("message"), signature) assert.NoError(t, err) + assert.True(t, ok) } const legacyProtocolCtxJson = `{ "Crypto": { "Keystore": { - "58999fc8-1c9b-4fc9-9d67-ce6223037dea": "7dl0sHbJnXCwPWiVDAx5UqwKiept2I5Lr15uFR1E0M18bemSph2h58ccfVSKSfj8htrYvLJHoVtLmq63ZZ9qwOKki/+ug8sqknLt3TjfAEnz3v5vYRmd5AxRVAFEdiYt6V4r9+R8Y4oZiJGEzGr9UFLN/V0waibWK5tHZ69eMYiOFJUKd4L+G7HWUJ9BJvsOStNOgIkJhk9eOE+7Ix6zpzEvsOf0xed+ZJzP6hfWkSwQhZPawZsShk9H4gr1Sju3", - "68999fc8-1c9b-4fc9-9d67-ce6223037dea": "/5bOrMeqtZJAH7JBYzX2QJzSM5nhoOh7lmO+uG7C/opfV2kiVp56nbjWFmXhSqjrUHBrVgjhSIlziD5Ow1Dx+8b/+79hus/D/lNA5iEJhab4o7B0J3Zt7PLiMkUo5eWvsbXfCUd/bOGjV+N4u1B2mmSPmUqLeu/N7mJMynA1xRoSrEXnBWgBm5nBOABkkFDYBwg09hrPn5fOrn1cg87osl/cJBcZvjZB0I7MYZ7OmBf4sf+mO62zYvzMzoW455VG", - "78999fc8-1c9b-4fc9-9d67-ce6223037dea": "xy9FRVPOn20YJV3cK0FB31TnQN4eKInzGiQbCdZmvaya5x095n0jVBPokQvOg0sCouyCwzHexYiTaKF7Ezfe6H5b7IoxknICCQxb0k3iFVGBMwHA99ZoBn4aUENgsgV3UsLGzX2rBjenPuz8iq0hU3eaI/sZMPVSsN4qs83doXMfMDGc9GQdNfxPASKboyi2p3cFmw2axcOextY3YYWZJHVQa+zze10PrT2TFjezpyMih0weqYCRTTg9BWyi0d7X", - "88999fc8-1c9b-4fc9-9d67-ce6223037dea": "tBKjsCdP8opUns/TGVyIdY79OUutKUlhSBnOzlAABGw7EbukjPCIRc17SynRvWF01svj0Ykb+vtd2xvxXFek9hof3KQsKYr0XiykNqV5pE68mq0BytyRp47ch1aH5jHmskp9G7N8wZX+OqYWY3T5Y5nQuH101OwShohwxAOczR3vQgctbVuVUQcx0XsBGI/jbZPQplAIO9S6L9OcYSiGs7JW8INlNMWUD/EyxqG35rcHQm8Ge4wkKJViQB8+pRak", - "_58999fc8-1c9b-4fc9-9d67-ce6223037dea": "olHwjZjmaQ+DF/BRjXZ1DGOeC1e9UPUHbNxqwj7nCkfFmIi1o3HlijIauBcnrGrkvTXIkxKFYEGiFAlXogiYxFqblqOTR5dWRcCEM2PhWww96p6+vs3P/lBCYb83eHuXKg8pbWU9zWvvrvy47mFnMxj/R2HQi4r2UyWad3hrY7z/EY5FNYOz6OnPGZmEZVpn8cIQk/KBIMKJ+UhbVHEYz1SjCCe8wi1H5lO6mNtZgv7JQqMyEtwqh69bCpeVO0v0kbCl82as9kTNynfQibQDoTv4vc57sHjD/NcDAHgrm9eAg2vkGED/VA==", - "_68999fc8-1c9b-4fc9-9d67-ce6223037dea": "JtctXKoBi7Q1/eswAZN+bSvlddZewg1r+WgGuxqCTB4+C/kVG/E46BJCgJwXjkbgRhmFxy9wjyiHBAf3GfLkk/p0fzhlh71jRVyn8f9ucWQJOlV0QckpEc5+u1zeUY9uNnO5VfJCMOsxrFarWljQtYXK1K1EVjL+DTjbf6TaSRDpZa0b6x7hWqBjHe22XuVKASJA5XOAa0GW6FFZ7eAbZGOz6xLZsbR7aFYODwJBO/mc5VY+Ljb012r512os5f1y+z1ouSHbpb4H5s0vrRayn6XiHx6eHEfPRhU6tjL+Ha4ndpdRPOWPPQ==", - "_78999fc8-1c9b-4fc9-9d67-ce6223037dea": "efmz7GoJHGrmQ3cL3tnXazAVW0XKfsW6COosNN0PFquXjXs68rqZDVnkQlTdeam83BD1Xc92ZWShVLHzT9o/JENsxE/pSQj3SM0QNC1kwNfYCleYPCc4bnu1yVMgVpuLfGYGRsyk5vNTU7ogVBKpsGrAFoMixyDnlIi2fbcG7jZT/rpgUWNAMGdJeruIdMiX8HVTyYwZHwOcJqxNMGfM4ohBiA+T6mYh4nlBmLZ1nIOqf/Vl2zpOsvpWLDWcikQSMsJq+ychHB1vRJwCIb9APLZ9f/z+T1hQANpoIVunlAN4Y0uXlFo2UQ==", - "_88999fc8-1c9b-4fc9-9d67-ce6223037dea": "tYTNIBtYgIgfFEb1t2UV5D0IS/rYsxWbYAPT1PDRm/poqhi8GJx/6pDRSvoUS2pwhHqHF2XViH3WdUh8wCJM4XN9VNveaa7gK7RoK1E/fkV3CLhr/3nszULIchO8xEGVNomoOi5r+wLNE6Fb4Q2BUGcZEA8cYFcbPEI3tttCDhGq8nzvIqdiI0cdyKxZ11A4Xnr6K6Who3GdbtszkSGGxO3JW6JjVQkDorwq70TwQXc7nBOKP9UuizO582zaa0rxzC89I1ybiZ5ICtcx2n7WbCanIaZlWB01J0lcO9jb29hqgs04FomPcg==" + "21c033cf-38af-466e-b5da-32f0a3ab6020": "7dl0sHbJnXCwPWiVDAx5UqwKiept2I5Lr15uFR1E0M18bemSph2h58ccfVSKSfj8htrYvLJHoVtLmq63ZZ9qwOKki/+ug8sqknLt3TjfAEnz3v5vYRmd5AxRVAFEdiYt6V4r9+R8Y4oZiJGEzGr9UFLN/V0waibWK5tHZ69eMYiOFJUKd4L+G7HWUJ9BJvsOStNOgIkJhk9eOE+7Ix6zpzEvsOf0xed+ZJzP6hfWkSwQhZPawZsShk9H4gr1Sju3", + "b8eaf649-3eaa-472e-9b69-0c6793254c76": "/5bOrMeqtZJAH7JBYzX2QJzSM5nhoOh7lmO+uG7C/opfV2kiVp56nbjWFmXhSqjrUHBrVgjhSIlziD5Ow1Dx+8b/+79hus/D/lNA5iEJhab4o7B0J3Zt7PLiMkUo5eWvsbXfCUd/bOGjV+N4u1B2mmSPmUqLeu/N7mJMynA1xRoSrEXnBWgBm5nBOABkkFDYBwg09hrPn5fOrn1cg87osl/cJBcZvjZB0I7MYZ7OmBf4sf+mO62zYvzMzoW455VG", + "80ea9750-ba39-446b-83f4-15724ebb95dd": "xy9FRVPOn20YJV3cK0FB31TnQN4eKInzGiQbCdZmvaya5x095n0jVBPokQvOg0sCouyCwzHexYiTaKF7Ezfe6H5b7IoxknICCQxb0k3iFVGBMwHA99ZoBn4aUENgsgV3UsLGzX2rBjenPuz8iq0hU3eaI/sZMPVSsN4qs83doXMfMDGc9GQdNfxPASKboyi2p3cFmw2axcOextY3YYWZJHVQa+zze10PrT2TFjezpyMih0weqYCRTTg9BWyi0d7X", + "cecc55fb-5f74-4a33-87ec-340158f96260": "tBKjsCdP8opUns/TGVyIdY79OUutKUlhSBnOzlAABGw7EbukjPCIRc17SynRvWF01svj0Ykb+vtd2xvxXFek9hof3KQsKYr0XiykNqV5pE68mq0BytyRp47ch1aH5jHmskp9G7N8wZX+OqYWY3T5Y5nQuH101OwShohwxAOczR3vQgctbVuVUQcx0XsBGI/jbZPQplAIO9S6L9OcYSiGs7JW8INlNMWUD/EyxqG35rcHQm8Ge4wkKJViQB8+pRak", + "_21c033cf-38af-466e-b5da-32f0a3ab6020": "olHwjZjmaQ+DF/BRjXZ1DGOeC1e9UPUHbNxqwj7nCkfFmIi1o3HlijIauBcnrGrkvTXIkxKFYEGiFAlXogiYxFqblqOTR5dWRcCEM2PhWww96p6+vs3P/lBCYb83eHuXKg8pbWU9zWvvrvy47mFnMxj/R2HQi4r2UyWad3hrY7z/EY5FNYOz6OnPGZmEZVpn8cIQk/KBIMKJ+UhbVHEYz1SjCCe8wi1H5lO6mNtZgv7JQqMyEtwqh69bCpeVO0v0kbCl82as9kTNynfQibQDoTv4vc57sHjD/NcDAHgrm9eAg2vkGED/VA==", + "_b8eaf649-3eaa-472e-9b69-0c6793254c76": "JtctXKoBi7Q1/eswAZN+bSvlddZewg1r+WgGuxqCTB4+C/kVG/E46BJCgJwXjkbgRhmFxy9wjyiHBAf3GfLkk/p0fzhlh71jRVyn8f9ucWQJOlV0QckpEc5+u1zeUY9uNnO5VfJCMOsxrFarWljQtYXK1K1EVjL+DTjbf6TaSRDpZa0b6x7hWqBjHe22XuVKASJA5XOAa0GW6FFZ7eAbZGOz6xLZsbR7aFYODwJBO/mc5VY+Ljb012r512os5f1y+z1ouSHbpb4H5s0vrRayn6XiHx6eHEfPRhU6tjL+Ha4ndpdRPOWPPQ==", + "_80ea9750-ba39-446b-83f4-15724ebb95dd": "efmz7GoJHGrmQ3cL3tnXazAVW0XKfsW6COosNN0PFquXjXs68rqZDVnkQlTdeam83BD1Xc92ZWShVLHzT9o/JENsxE/pSQj3SM0QNC1kwNfYCleYPCc4bnu1yVMgVpuLfGYGRsyk5vNTU7ogVBKpsGrAFoMixyDnlIi2fbcG7jZT/rpgUWNAMGdJeruIdMiX8HVTyYwZHwOcJqxNMGfM4ohBiA+T6mYh4nlBmLZ1nIOqf/Vl2zpOsvpWLDWcikQSMsJq+ychHB1vRJwCIb9APLZ9f/z+T1hQANpoIVunlAN4Y0uXlFo2UQ==", + "_cecc55fb-5f74-4a33-87ec-340158f96260": "tYTNIBtYgIgfFEb1t2UV5D0IS/rYsxWbYAPT1PDRm/poqhi8GJx/6pDRSvoUS2pwhHqHF2XViH3WdUh8wCJM4XN9VNveaa7gK7RoK1E/fkV3CLhr/3nszULIchO8xEGVNomoOi5r+wLNE6Fb4Q2BUGcZEA8cYFcbPEI3tttCDhGq8nzvIqdiI0cdyKxZ11A4Xnr6K6Who3GdbtszkSGGxO3JW6JjVQkDorwq70TwQXc7nBOKP9UuizO582zaa0rxzC89I1ybiZ5ICtcx2n7WbCanIaZlWB01J0lcO9jb29hqgs04FomPcg==" }, "Names": { - "58999fc8-1c9b-4fc9-9d67-ce6223037dea": "58999fc8-1c9b-4fc9-9d67-ce6223037dea", - "68999fc8-1c9b-4fc9-9d67-ce6223037dea": "68999fc8-1c9b-4fc9-9d67-ce6223037dea", - "78999fc8-1c9b-4fc9-9d67-ce6223037dea": "78999fc8-1c9b-4fc9-9d67-ce6223037dea", - "88999fc8-1c9b-4fc9-9d67-ce6223037dea": "88999fc8-1c9b-4fc9-9d67-ce6223037dea" + "21c033cf-38af-466e-b5da-32f0a3ab6020": "21c033cf-38af-466e-b5da-32f0a3ab6020", + "b8eaf649-3eaa-472e-9b69-0c6793254c76": "b8eaf649-3eaa-472e-9b69-0c6793254c76", + "80ea9750-ba39-446b-83f4-15724ebb95dd": "80ea9750-ba39-446b-83f4-15724ebb95dd", + "cecc55fb-5f74-4a33-87ec-340158f96260": "cecc55fb-5f74-4a33-87ec-340158f96260" } }, "Signatures": { - "58999fc8-1c9b-4fc9-9d67-ce6223037dea": "4m52sCx5uW3XY7oNnZul3DZRRcmhPFmo0HoQeIrw8AVca6CWQKtn3+NkbrdOeWd/LVor7WQXREubUsbo9BgpqQ==", - "68999fc8-1c9b-4fc9-9d67-ce6223037dea": "Cw+wZR1/6d4H+5tC8fvmjRaoYDX3HJnuacPQob0uxRvwDm6bYF8HqOVMMrdKZLnqXv+NvTexsd0lrVxjJ4KMTA==", - "78999fc8-1c9b-4fc9-9d67-ce6223037dea": "0KTGDy4KfNch8qj6z5LgcwszOY26EiJddnhxy2znMMqDxHUQajdwaz4WqQ1ZC3JOmwtCuMlm3OE8A5I2nzTaXA==", - "88999fc8-1c9b-4fc9-9d67-ce6223037dea": "RMr/tuHxSIzQE0Lx7JLZwAAjcVB4okaWOl6ptsKXYE5JbB1L5yqX6exBvcbVZ+BeINkhSn4o7cVp3cIHh6QiSQ==" + "21c033cf-38af-466e-b5da-32f0a3ab6020": "4m52sCx5uW3XY7oNnZul3DZRRcmhPFmo0HoQeIrw8AVca6CWQKtn3+NkbrdOeWd/LVor7WQXREubUsbo9BgpqQ==", + "b8eaf649-3eaa-472e-9b69-0c6793254c76": "Cw+wZR1/6d4H+5tC8fvmjRaoYDX3HJnuacPQob0uxRvwDm6bYF8HqOVMMrdKZLnqXv+NvTexsd0lrVxjJ4KMTA==", + "80ea9750-ba39-446b-83f4-15724ebb95dd": "0KTGDy4KfNch8qj6z5LgcwszOY26EiJddnhxy2znMMqDxHUQajdwaz4WqQ1ZC3JOmwtCuMlm3OE8A5I2nzTaXA==", + "cecc55fb-5f74-4a33-87ec-340158f96260": "RMr/tuHxSIzQE0Lx7JLZwAAjcVB4okaWOl6ptsKXYE5JbB1L5yqX6exBvcbVZ+BeINkhSn4o7cVp3cIHh6QiSQ==" } }` var devices = map[string]string{ - "58999fc8-1c9b-4fc9-9d67-ce6223037dea": "Pv3cAWvHnde/sxcM7fA02g==", - "68999fc8-1c9b-4fc9-9d67-ce6223037dea": "2PG3un+XgXt4TyCtSZNO2A==", - "78999fc8-1c9b-4fc9-9d67-ce6223037dea": "mlkwF1XW4xEAM2TjUBQCPw==", - "88999fc8-1c9b-4fc9-9d67-ce6223037dea": "PQHlke6G4wFagizLCMwI1w==", + "21c033cf-38af-466e-b5da-32f0a3ab6020": "Pv3cAWvHnde/sxcM7fA02g==", + "b8eaf649-3eaa-472e-9b69-0c6793254c76": "2PG3un+XgXt4TyCtSZNO2A==", + "80ea9750-ba39-446b-83f4-15724ebb95dd": "mlkwF1XW4xEAM2TjUBQCPw==", + "cecc55fb-5f74-4a33-87ec-340158f96260": "PQHlke6G4wFagizLCMwI1w==", } diff --git a/main/adapters/repository/mocks.go b/main/adapters/repository/mocks.go index 5003f5b1..18989813 100644 --- a/main/adapters/repository/mocks.go +++ b/main/adapters/repository/mocks.go @@ -15,7 +15,8 @@ type extendedId struct { } type MockCtxMngr struct { - id extendedId + id extendedId + extId ent.ExternalIdentity } var _ ContextManager = (*MockCtxMngr)(nil) @@ -136,6 +137,26 @@ func (m *MockCtxMngr) LoadAuthForUpdate(t TransactionCtx, u uuid.UUID) (string, return m.id.AuthToken, nil } +func (m *MockCtxMngr) StoreExternalIdentity(ctx context.Context, externalId ent.ExternalIdentity) error { + m.extId = externalId + return nil +} + +func (m *MockCtxMngr) LoadExternalIdentity(ctx context.Context, u uuid.UUID) (*ent.ExternalIdentity, error) { + if m.extId.Uid == uuid.Nil || m.extId.Uid != u { + return nil, ErrNotExist + } + return &m.extId, nil +} + +func (m *MockCtxMngr) GetIdentityUUIDs() ([]uuid.UUID, error) { + return []uuid.UUID{m.id.Uid}, nil +} + +func (m *MockCtxMngr) GetExternalIdentityUUIDs() ([]uuid.UUID, error) { + return []uuid.UUID{m.extId.Uid}, nil +} + func (m *MockCtxMngr) IsReady() error { return nil } diff --git a/main/adapters/repository/protocol.go b/main/adapters/repository/protocol.go index 48c6465a..3e527f29 100644 --- a/main/adapters/repository/protocol.go +++ b/main/adapters/repository/protocol.go @@ -41,6 +41,11 @@ type ExtendedProtocol struct { } func NewExtendedProtocol(ctxManager ContextManager, conf *config.Config) (*ExtendedProtocol, error) { + err := logKnownIdentities(ctxManager, conf.LogKnownIdentities) + if err != nil { + return nil, err + } + keyCache := NewKeyCache() crypto := &ubirch.ECDSACryptoContext{ @@ -160,6 +165,36 @@ func (p *ExtendedProtocol) StoreSignature(tx TransactionCtx, uid uuid.UUID, sign return tx.Commit() } +func (p *ExtendedProtocol) StoreExternalIdentity(ctx context.Context, extId ent.ExternalIdentity) (err error) { + // store public key raw bytes + extId.PublicKey, err = p.PublicKeyPEMToBytes(extId.PublicKey) + if err != nil { + return err + } + + return p.ContextManager.StoreExternalIdentity(ctx, extId) +} + +func (p *ExtendedProtocol) LoadExternalIdentity(ctx context.Context, uid uuid.UUID) (*ent.ExternalIdentity, error) { + extId, err := p.ContextManager.LoadExternalIdentity(ctx, uid) + if err != nil { + return nil, err + } + + extId.PublicKey, err = p.PublicKeyBytesToPEM(extId.PublicKey) + if err != nil { + return nil, err + } + + // load public key to cache + err = p.keyCache.SetPublicKey(uid, extId.PublicKey) + if err != nil { + return nil, err + } + + return extId, nil +} + func (p *ExtendedProtocol) LoadPrivateKey(uid uuid.UUID) (privKeyPEM []byte, err error) { privKeyPEM, err = p.keyCache.GetPrivateKey(uid) if err != nil { @@ -179,15 +214,27 @@ func (p *ExtendedProtocol) LoadPublicKey(uid uuid.UUID) (pubKeyPEM []byte, err e if err != nil { i, err := p.LoadIdentity(uid) if err != nil { - return nil, err + if err == ErrNotExist { // if the public key is not one of the internal identities, try external identities + return p.loadPublicKeyFromExternalIdentity(uid) + } else { + return nil, err + } } - pubKeyPEM = i.PublicKey + return i.PublicKey, nil } return pubKeyPEM, nil } +func (p *ExtendedProtocol) loadPublicKeyFromExternalIdentity(uid uuid.UUID) (pubKeyPEM []byte, err error) { + extId, err := p.LoadExternalIdentity(context.TODO(), uid) + if err != nil { + return nil, err + } + return extId.PublicKey, nil +} + func (p *ExtendedProtocol) IsInitialized(uid uuid.UUID) (initialized bool, err error) { _, err = p.LoadPrivateKey(uid) if err == ErrNotExist { @@ -293,3 +340,31 @@ func (p *ExtendedProtocol) checkIdentityAttributes(i *ent.Identity) error { return nil } + +func logKnownIdentities(ctxManager ContextManager, logKnownIdentities bool) error { + ids, err := ctxManager.GetIdentityUUIDs() + if err != nil { + return err + } + + log.Infof("%d known internal identities (signing and verification)", len(ids)) + if logKnownIdentities { + for i, id := range ids { + log.Infof("%6d: %s", i, id) + } + } + + extIds, err := ctxManager.GetExternalIdentityUUIDs() + if err != nil { + return err + } + + log.Infof("%d known external identities (verification only)", len(extIds)) + if logKnownIdentities { + for i, id := range extIds { + log.Infof("%6d: %s", i, id) + } + } + + return nil +} diff --git a/main/adapters/repository/protocol_test.go b/main/adapters/repository/protocol_test.go index 25a877f1..5c70ca00 100644 --- a/main/adapters/repository/protocol_test.go +++ b/main/adapters/repository/protocol_test.go @@ -19,14 +19,20 @@ import ( var ( testSecret, _ = base64.StdEncoding.DecodeString("ZQJt1OC9+4OZtgZLLT9mX25BbrZdxtOQBjK4GyRF2fQ=") - conf = &config.Config{SecretBytes32: testSecret} + + testUid = uuid.MustParse("b8869002-9d19-418a-94b0-83664843396f") + testPrivKey = []byte("-----BEGIN PRIVATE KEY-----\nMHcCAQEEILagfFV70hVPpY1L5pIkWu3mTZisQ1yCmfhKL5vrGQfOoAoGCCqGSM49\nAwEHoUQDQgAEoEOfFKZ2U+r7L3CqCArZ63IyB83zqByp8chT07MeXLBx9WMYsaqn\nb38qXThsEnH7WwSwA/eRKjm9SbR6cve4Mg==\n-----END PRIVATE KEY-----\n") + testPubKey = []byte("-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEoEOfFKZ2U+r7L3CqCArZ63IyB83z\nqByp8chT07MeXLBx9WMYsaqnb38qXThsEnH7WwSwA/eRKjm9SbR6cve4Mg==\n-----END PUBLIC KEY-----\n") + testPubKeyBytes = []byte{0xa0, 0x43, 0x9f, 0x14, 0xa6, 0x76, 0x53, 0xea, 0xfb, 0x2f, 0x70, 0xaa, 0x08, 0x0a, 0xd9, 0xeb, 0x72, 0x32, 0x07, 0xcd, 0xf3, 0xa8, 0x1c, 0xa9, 0xf1, 0xc8, 0x53, 0xd3, 0xb3, 0x1e, 0x5c, 0xb0, 0x71, 0xf5, 0x63, 0x18, 0xb1, 0xaa, 0xa7, 0x6f, 0x7f, 0x2a, 0x5d, 0x38, 0x6c, 0x12, 0x71, 0xfb, 0x5b, 0x04, 0xb0, 0x03, 0xf7, 0x91, 0x2a, 0x39, 0xbd, 0x49, 0xb4, 0x7a, 0x72, 0xf7, 0xb8, 0x32} + testSignature, _ = base64.StdEncoding.DecodeString("Uv38ByGCZU8WP18PmmIdcpVmx00QA3xNe7sEB9HixkmBhVrYaB0NhtHpHgAWeTnLZpTSxCKs0gigByk5SH9pmQ==") + testAuth = "650YpEeEBF2H88Z88idG6Q==" ) func TestProtocol(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -72,10 +78,10 @@ func TestProtocol(t *testing.T) { } func TestExtendedProtocol_LoadPrivateKey(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -95,10 +101,10 @@ func TestExtendedProtocol_LoadPrivateKey(t *testing.T) { } func TestExtendedProtocol_LoadPublicKey(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -117,6 +123,19 @@ func TestExtendedProtocol_LoadPublicKey(t *testing.T) { assert.Equal(t, i.PublicKey, pub) } +func TestExtendedProtocol_LoadPublicKeyFromExternalIdentity(t *testing.T) { + extId := ent.ExternalIdentity{Uid: testUid, PublicKey: testPubKeyBytes} + + p, err := NewExtendedProtocol(&MockCtxMngr{ + extId: extId, + }, &config.Config{SecretBytes32: testSecret}) + require.NoError(t, err) + + pub, err := p.LoadPublicKey(extId.Uid) + require.NoError(t, err) + assert.Equal(t, testPubKey, pub) +} + func TestNewExtendedProtocol_BadSecret(t *testing.T) { badSecret := make([]byte, 31) rand.Read(badSecret) @@ -130,10 +149,10 @@ func TestNewExtendedProtocol_BadSecret(t *testing.T) { } func TestExtendedProtocol_StoreSignature(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -171,10 +190,10 @@ func TestExtendedProtocol_StoreSignature(t *testing.T) { } func TestExtendedProtocol_BadStoreSignature(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -200,10 +219,10 @@ func TestExtendedProtocol_BadStoreSignature(t *testing.T) { } func Test_StoreNewIdentity_BadUUID(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() i.Uid = uuid.Nil ctx, cancel := context.WithCancel(context.Background()) @@ -217,10 +236,10 @@ func Test_StoreNewIdentity_BadUUID(t *testing.T) { } func Test_StoreNewIdentity_NilPrivateKey(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() i.PrivateKey = nil ctx, cancel := context.WithCancel(context.Background()) @@ -234,11 +253,11 @@ func Test_StoreNewIdentity_NilPrivateKey(t *testing.T) { } func Test_StoreNewIdentity_BadPrivateKey(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() - rand.Read(i.PrivateKey) + i := getTestIdentity() + i.PrivateKey = []byte("bad private key") ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -251,10 +270,10 @@ func Test_StoreNewIdentity_BadPrivateKey(t *testing.T) { } func Test_StoreNewIdentity_NilPublicKey(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() i.PublicKey = nil ctx, cancel := context.WithCancel(context.Background()) @@ -268,11 +287,11 @@ func Test_StoreNewIdentity_NilPublicKey(t *testing.T) { } func Test_StoreNewIdentity_BadPublicKey(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() - rand.Read(i.PublicKey) + i := getTestIdentity() + i.PublicKey = []byte("bad public key") ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -285,10 +304,10 @@ func Test_StoreNewIdentity_BadPublicKey(t *testing.T) { } func Test_StoreNewIdentity_BadSignature(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() i.Signature = make([]byte, p.SignatureLength()+1) ctx, cancel := context.WithCancel(context.Background()) @@ -302,10 +321,10 @@ func Test_StoreNewIdentity_BadSignature(t *testing.T) { } func Test_StoreNewIdentity_BadAuth(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() i.AuthToken = "" ctx, cancel := context.WithCancel(context.Background()) @@ -318,11 +337,37 @@ func Test_StoreNewIdentity_BadAuth(t *testing.T) { assert.Error(t, err) } +func TestExtendedProtocol_StoreExternalIdentity(t *testing.T) { + ctxMngr := &MockCtxMngr{} + p, err := NewExtendedProtocol(ctxMngr, &config.Config{SecretBytes32: testSecret}) + require.NoError(t, err) + + err = p.StoreExternalIdentity(context.Background(), ent.ExternalIdentity{Uid: testUid, PublicKey: testPubKey}) + require.NoError(t, err) + assert.Equal(t, testUid, ctxMngr.extId.Uid) + assert.Equal(t, testPubKeyBytes, ctxMngr.extId.PublicKey) +} + +func TestExtendedProtocol_LoadExternalIdentity(t *testing.T) { + ctxMngr := &MockCtxMngr{extId: ent.ExternalIdentity{Uid: testUid, PublicKey: testPubKeyBytes}} + p, err := NewExtendedProtocol(ctxMngr, &config.Config{SecretBytes32: testSecret}) + require.NoError(t, err) + + extId, err := p.LoadExternalIdentity(context.Background(), testUid) + require.NoError(t, err) + assert.Equal(t, testUid, extId.Uid) + assert.Equal(t, testPubKey, extId.PublicKey) + + cachedKey, err := p.keyCache.GetPublicKey(testUid) + require.NoError(t, err) + assert.Equal(t, testPubKey, cachedKey) +} + func TestExtendedProtocol_CheckAuth(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -344,10 +389,10 @@ func TestExtendedProtocol_CheckAuth(t *testing.T) { } func TestExtendedProtocol_CheckAuth_Invalid(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -370,10 +415,10 @@ func TestExtendedProtocol_CheckAuth_Invalid(t *testing.T) { func TestExtendedProtocol_CheckAuth_Invalid_Cached(t *testing.T) { ctxMngr := &MockCtxMngr{} - p, err := NewExtendedProtocol(ctxMngr, conf) + p, err := NewExtendedProtocol(ctxMngr, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -397,7 +442,7 @@ func TestExtendedProtocol_CheckAuth_Invalid_Cached(t *testing.T) { } func TestExtendedProtocol_CheckAuth_NotFound(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) ok, found, err := p.CheckAuth(context.Background(), uuid.New(), "auth") @@ -408,11 +453,15 @@ func TestExtendedProtocol_CheckAuth_NotFound(t *testing.T) { func TestExtendedProtocol_CheckAuth_Update(t *testing.T) { ctxMngr := &MockCtxMngr{} - conf.KdUpdateParams = true - p, err := NewExtendedProtocol(ctxMngr, conf) + testConf := &config.Config{ + SecretBytes32: testSecret, + KdUpdateParams: true, + } + + p, err := NewExtendedProtocol(ctxMngr, testConf) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -445,10 +494,10 @@ func TestExtendedProtocol_CheckAuth_Update(t *testing.T) { } func TestExtendedProtocol_CheckAuth_AuthCache(t *testing.T) { - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - i := generateRandomIdentity() + i := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -476,10 +525,10 @@ func TestExtendedProtocol_CheckAuth_AuthCache(t *testing.T) { func TestProtocol_Cache(t *testing.T) { wg := &sync.WaitGroup{} - p, err := NewExtendedProtocol(&MockCtxMngr{}, conf) + p, err := NewExtendedProtocol(&MockCtxMngr{}, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) - testIdentity := generateRandomIdentity() + testIdentity := getTestIdentity() ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -507,17 +556,18 @@ func TestProtocol_Cache(t *testing.T) { func TestProtocolLoad(t *testing.T) { wg := &sync.WaitGroup{} - dm, err := initDB() + dm, err := initDB(0) require.NoError(t, err) defer cleanUpDB(t, dm) - p, err := NewExtendedProtocol(dm, conf) + p, err := NewExtendedProtocol(dm, &config.Config{SecretBytes32: testSecret}) require.NoError(t, err) // generate identities var testIdentities []ent.Identity for i := 0; i < testLoad/10; i++ { - testId := generateRandomIdentity() + testId := getTestIdentity() + testId.Uid = uuid.New() testIdentities = append(testIdentities, testId) } @@ -526,7 +576,9 @@ func TestProtocolLoad(t *testing.T) { for _, testId := range testIdentities { wg.Add(1) go func(id ent.Identity) { - err := storeIdentity(p, id, wg) + defer wg.Done() + + err := storeIdentity(p, id) if err != nil { t.Errorf("%s: identity could not be stored: %v", id.Uid, err) } @@ -547,6 +599,16 @@ func TestProtocolLoad(t *testing.T) { wg.Wait() } +func getTestIdentity() ent.Identity { + return ent.Identity{ + Uid: testUid, + PrivateKey: testPrivKey, + PublicKey: testPubKey, + Signature: testSignature, + AuthToken: testAuth, + } +} + func protocolCheckAuth(auth, authToCheck string) error { pwHasher := &pw.Argon2idKeyDerivator{} diff --git a/main/config/config.go b/main/config/config.go index 9ebb70d5..66498ef0 100644 --- a/main/config/config.go +++ b/main/config/config.go @@ -49,45 +49,64 @@ const ( defaultTLSCertFile = "cert.pem" defaultTLSKeyFile = "key.pem" + postgresDriver = "postgres" + sqliteDriver = "sqlite" + defaultSQLiteName = "sqlite.db" + defaultKeyDerivationParamMemory = 15 defaultKeyDerivationParamTime = 2 defaultKeyDerivationParamParallelism = 1 defaultKeyDerivationKeyLen = 32 defaultKeyDerivationSaltLen = 16 + + defaultIdentityServiceTimeoutMs = 10_000 // should be high since we want to avoid canceling an otherwise successful key registration + defaultAuthServiceTimeoutMs = 2_000 + defaultVerifyServiceTimeoutMs = 600 + defaultVerificationTimeoutMs = 2_000 ) var IsDevelopment bool type Config struct { - Devices map[string]string `json:"devices"` // maps UUIDs to backend auth tokens (mandatory) - Secret16Base64 string `json:"secret" envconfig:"secret"` // 16 bytes secret used to encrypt the key store (mandatory for migration) LEGACY - Secret32Base64 string `json:"secret32" envconfig:"secret32"` // 32 byte secret used to encrypt the key store (mandatory) - RegisterAuth string `json:"registerAuth"` // auth token needed for new identity registration - Env string `json:"env"` // the ubirch backend environment [dev, demo, prod], defaults to 'prod' - PostgresDSN string `json:"postgresDSN" envconfig:"POSTGRES_DSN"` // data source name for postgres database - DbMaxConns int `json:"dbMaxConns" envconfig:"DB_MAX_CONNS"` // maximum number of open connections to the database - TCP_addr string `json:"TCP_addr"` // the TCP address for the server to listen on, in the form "host:port", defaults to ":8080" - TLS bool `json:"TLS"` // enable serving HTTPS endpoints, defaults to 'false' - TLS_CertFile string `json:"TLSCertFile"` // filename of TLS certificate file name, defaults to "cert.pem" - TLS_KeyFile string `json:"TLSKeyFile"` // filename of TLS key file name, defaults to "key.pem" - CORS bool `json:"CORS"` // enable CORS, defaults to 'false' - CORS_Origins []string `json:"CORS_origins"` // list of allowed origin hosts, defaults to ["*"] - CSR_Country string `json:"CSR_country"` // subject country for public key Certificate Signing Requests - CSR_Organization string `json:"CSR_organization"` // subject organization for public key Certificate Signing Requests - Debug bool `json:"debug"` // enable extended debug output, defaults to 'false' - LogTextFormat bool `json:"logTextFormat"` // log in text format for better human readability, default format is JSON - KdMaxTotalMemMiB uint32 `json:"kdMaxTotalMemMiB" envconfig:"KD_MAX_TOTAL_MEM_MIB"` // maximal total memory to use for key derivation at a time in MiB - KdParamMemMiB uint32 `json:"kdParamMemMiB" envconfig:"KD_PARAM_MEM_MIB"` // memory parameter for key derivation, specifies the size of the memory in MiB - KdParamTime uint32 `json:"kdParamTime" envconfig:"KD_PARAM_TIME"` // time parameter for key derivation, specifies the number of passes over the memory - KdParamParallelism uint8 `json:"kdParamParallelism" envconfig:"KD_PARAM_PARALLELISM"` // parallelism (threads) parameter for key derivation, specifies the number of threads and can be adjusted to the number of available CPUs - KdParamKeyLen uint32 `json:"kdParamKeyLen" envconfig:"KD_PARAM_KEY_LEN"` // key length parameter for key derivation, specifies the length of the resulting key in bytes - KdParamSaltLen uint32 `json:"kdParamSaltLen" envconfig:"KD_PARAM_SALT_LEN"` // salt length parameter for key derivation, specifies the length of the random salt in bytes - KdUpdateParams bool `json:"kdUpdateParams" envconfig:"KD_UPDATE_PARAMS"` // update key derivation parameters of already existing password hashes - KeyService string // key service URL (set automatically) - IdentityService string // identity service URL (set automatically) - Niomon string // authentication service URL (set automatically) - VerifyService string // verification service URL (set automatically) - SecretBytes32 []byte // the decoded 32 byte key store secret for database (set automatically) + Devices map[string]string `json:"devices"` // maps UUIDs to backend auth tokens + Secret16Base64 string `json:"secret" envconfig:"secret"` // LEGACY: 16 bytes secret used to encrypt the key store (mandatory only for migration) + Secret32Base64 string `json:"secret32" envconfig:"secret32"` // 32 byte secret used to encrypt the key store (mandatory) + StaticAuth string `json:"staticAuth" envconfig:"STATIC_AUTH"` // static auth token needed for identity registration, csr creation or key deactivation + EnableRegistrationEndpoint bool `json:"enableRegistrationEndpoint" envconfig:"ENABLE_REGISTRATION_ENDPOINT"` // expose endpoint for identity registration + EnableCSRCreationEndpoint bool `json:"enableCSRCreationEndpoint" envconfig:"ENABLE_CSR_CREATION_ENDPOINT"` // expose endpoint for CSR creation + EnableDeactivationEndpoint bool `json:"enableDeactivationEndpoint" envconfig:"ENABLE_DEACTIVATION_ENDPOINT"` // expose endpoint for key status updates (de-/re-activation) + Env string `json:"env"` // the ubirch backend environment [dev, demo, prod], defaults to 'prod' + DbDriver string `json:"dbDriver" envconfig:"DB_DRIVER"` // database driver name + DbDSN string `json:"dbDSN" envconfig:"DB_DSN"` // data source name for database, path to the sqlite db file + DbMaxConns int `json:"dbMaxConns" envconfig:"DB_MAX_CONNS"` // maximum number of open connections to the database + TCP_addr string `json:"TCP_addr"` // the TCP address for the server to listen on, in the form "host:port", defaults to ":8080" + TLS bool `json:"TLS"` // enable serving HTTPS endpoints, defaults to 'false' + TLS_CertFile string `json:"TLSCertFile"` // filename of TLS certificate file name, defaults to "cert.pem" + TLS_KeyFile string `json:"TLSKeyFile"` // filename of TLS key file name, defaults to "key.pem" + CORS bool `json:"CORS"` // enable CORS, defaults to 'false' + CORS_Origins []string `json:"CORS_origins"` // list of allowed origin hosts, defaults to ["*"] + CSR_Country string `json:"CSR_country"` // subject country for public key Certificate Signing Requests + CSR_Organization string `json:"CSR_organization"` // subject organization for public key Certificate Signing Requests + Debug bool `json:"debug"` // enable extended debug output, defaults to 'false' + LogTextFormat bool `json:"logTextFormat"` // log in text format for better human readability, default format is JSON + LogKnownIdentities bool `json:"logKnownIdentities" envconfig:"LOG_KNOWN_IDENTITIES"` // log the UUIDs of all known identities at startup + KdMaxTotalMemMiB uint32 `json:"kdMaxTotalMemMiB" envconfig:"KD_MAX_TOTAL_MEM_MIB"` // maximal total memory to use for key derivation at a time in MiB + KdParamMemMiB uint32 `json:"kdParamMemMiB" envconfig:"KD_PARAM_MEM_MIB"` // memory parameter for key derivation, specifies the size of the memory in MiB + KdParamTime uint32 `json:"kdParamTime" envconfig:"KD_PARAM_TIME"` // time parameter for key derivation, specifies the number of passes over the memory + KdParamParallelism uint8 `json:"kdParamParallelism" envconfig:"KD_PARAM_PARALLELISM"` // parallelism (threads) parameter for key derivation, specifies the number of threads and can be adjusted to the number of available CPUs + KdParamKeyLen uint32 `json:"kdParamKeyLen" envconfig:"KD_PARAM_KEY_LEN"` // key length parameter for key derivation, specifies the length of the resulting key in bytes + KdParamSaltLen uint32 `json:"kdParamSaltLen" envconfig:"KD_PARAM_SALT_LEN"` // salt length parameter for key derivation, specifies the length of the random salt in bytes + KdUpdateParams bool `json:"kdUpdateParams" envconfig:"KD_UPDATE_PARAMS"` // update key derivation parameters of already existing password hashes + IdentityServiceTimeoutMs int64 `json:"identityServiceTimeoutMs" envconfig:"IDENTITY_SERVICE_TIMEOUT_MS"` // time limit for requests to the ubirch identity service in milliseconds + AuthServiceTimeoutMs int64 `json:"authServiceTimeoutMs" envconfig:"AUTH_SERVICE_TIMEOUT_MS"` // time limit for requests to the ubirch authentication service (niomon) in milliseconds + VerifyServiceTimeoutMs int64 `json:"verifyServiceTimeoutMs" envconfig:"VERIFY_SERVICE_TIMEOUT_MS"` // time limit for requests to the ubirch verification service in milliseconds + VerificationTimeoutMs int64 `json:"verificationTimeoutMs" envconfig:"VERIFICATION_TIMEOUT_MS"` // time limit for repeated attempts to verify a hash at the ubirch verification service in milliseconds + VerifyFromKnownIdentitiesOnly bool `json:"verifyFromKnownIdentitiesOnly" envconfig:"VERIFY_FROM_KNOWN_IDENTITIES_ONLY"` // flag to determine if a public key should be retrieved from the ubirch identity service in case of incoming verification request for UPP from unknown identity + KeyService string // key service URL (set automatically) + IdentityService string // identity service URL (set automatically) + Niomon string // authentication service URL (set automatically) + VerifyService string // verification service URL (set automatically) + SecretBytes32 []byte // the decoded 32 byte key store secret for database (set automatically) } func (c *Config) Load(configDir, filename string) error { @@ -111,6 +130,7 @@ func (c *Config) Load(configDir, filename string) error { if c.Debug { log.SetLevel(log.DebugLevel) } + if c.LogTextFormat { log.SetFormatter(&log.TextFormatter{FullTimestamp: true, TimestampFormat: "2006-01-02 15:04:05.000 -0700"}) } @@ -125,10 +145,13 @@ func (c *Config) Load(configDir, filename string) error { return err } + // set defaults c.setDefaultCSR() c.setDefaultTLS(configDir) c.setDefaultCORS() + c.setDefaultSQLite(configDir) c.setKeyDerivationParams() + c.setDefaultTimeouts() return c.setDefaultURLs() } @@ -159,12 +182,44 @@ func (c *Config) loadFile(filename string) error { } func (c *Config) checkMandatory() error { + var missingConfig bool + if len(c.SecretBytes32) != secretLength32 { - return fmt.Errorf("secret for aes-256 key encryption ('secret32') length must be %d bytes (is %d)", secretLength32, len(c.SecretBytes32)) + missingConfig = true + log.Errorf("secret for aes-256 key encryption ('secret32' / 'UBIRCH_SECRET32') length must be %d bytes (is %d)", secretLength32, len(c.SecretBytes32)) + } + + if len(c.DbDriver) == 0 { + missingConfig = true + log.Errorf("missing 'dbDriver' / 'UBIRCH_DB_DRIVER' in configuration (\"%s\" | \"%s\")", postgresDriver, sqliteDriver) + } else if c.DbDriver != postgresDriver && c.DbDriver != sqliteDriver { + missingConfig = true + log.Errorf("invalid value for 'dbDriver' / 'UBIRCH_DB_DRIVER' in configuration: \"%s\", expected \"%s\" or \"%s\"", c.DbDriver, postgresDriver, sqliteDriver) + } else if c.DbDriver == postgresDriver && len(c.DbDSN) == 0 { + missingConfig = true + log.Errorf("missing 'dbDSN' / 'UBIRCH_DB_DSN' for %s in configuration", c.DbDriver) + } + + if (c.EnableRegistrationEndpoint || c.EnableCSRCreationEndpoint || c.EnableDeactivationEndpoint) && + len(c.StaticAuth) == 0 { + missingConfig = true + log.Errorf("missing 'staticAuth' / 'UBIRCH_STATIC_AUTH' in configuration") + } + + if !c.EnableRegistrationEndpoint { + log.Warnf("identity registration endpoint disabled. To enable, set json:\"enableRegistrationEndpoint\" env:\"UBIRCH_ENABLE_REGISTRATION_ENDPOINT\" =true") + } + + if !c.EnableCSRCreationEndpoint { + log.Warnf("CSR creation endpoint disabled. To enable, set json:\"enableCSRCreationEndpoint\" env:\"UBIRCH_ENABLE_CSR_CREATION_ENDPOINT\" =true") + } + + if !c.EnableDeactivationEndpoint { + log.Warnf("key deactivation endpoint disabled. To enable, set json:\"enableDeactivationEndpoint\" env:\"ENABLE_DEACTIVATION_ENDPOINT\" =true") } - if len(c.RegisterAuth) == 0 { - return fmt.Errorf("auth token for identity registration ('registerAuth') wasn't set") + if missingConfig { + return fmt.Errorf("missing mandatory configuration") } return nil @@ -216,6 +271,15 @@ func (c *Config) setDefaultCORS() { } } +func (c *Config) setDefaultSQLite(configDir string) { + if c.DbDriver == sqliteDriver { + if c.DbDSN == "" { + c.DbDSN = defaultSQLiteName + } + c.DbDSN = filepath.Join(configDir, c.DbDSN) + } +} + func (c *Config) setKeyDerivationParams() { if c.KdParamMemMiB == 0 { c.KdParamMemMiB = defaultKeyDerivationParamMemory @@ -238,6 +302,24 @@ func (c *Config) setKeyDerivationParams() { } } +func (c *Config) setDefaultTimeouts() { + if c.IdentityServiceTimeoutMs == 0 { + c.IdentityServiceTimeoutMs = defaultIdentityServiceTimeoutMs + } + + if c.AuthServiceTimeoutMs == 0 { + c.AuthServiceTimeoutMs = defaultAuthServiceTimeoutMs + } + + if c.VerifyServiceTimeoutMs == 0 { + c.VerifyServiceTimeoutMs = defaultVerifyServiceTimeoutMs + } + + if c.VerificationTimeoutMs == 0 { + c.VerificationTimeoutMs = defaultVerificationTimeoutMs + } +} + func (c *Config) setDefaultURLs() error { if c.Env == "" { c.Env = PROD_STAGE diff --git a/main/config/config_test.go b/main/config/config_test.go index 12469403..baf451f7 100644 --- a/main/config/config_test.go +++ b/main/config/config_test.go @@ -6,7 +6,7 @@ import ( "testing" ) -const expectedConfig = `{"devices":null,"secret":"MTIzNDU2Nzg5MDU2Nzg5MA==","secret32":"VsCwmGssk7Ho2APyq1reGAKkB/+e8GlRfhM3NbYQWPU=","registerAuth":"test123","env":"","postgresDSN":"","dbMaxConns":0,"TCP_addr":"","TLS":false,"TLSCertFile":"","TLSKeyFile":"","CORS":false,"CORS_origins":null,"CSR_country":"","CSR_organization":"","debug":false,"logTextFormat":false,"kdMaxTotalMemMiB":0,"kdParamMemMiB":0,"kdParamTime":0,"kdParamParallelism":0,"kdParamKeyLen":0,"kdParamSaltLen":0,"kdUpdateParams":false,"KeyService":"","IdentityService":"","Niomon":"","VerifyService":"","SecretBytes32":null}` +const expectedConfig = `{"devices":null,"secret":"MTIzNDU2Nzg5MDU2Nzg5MA==","secret32":"VsCwmGssk7Ho2APyq1reGAKkB/+e8GlRfhM3NbYQWPU=","staticAuth":"test123","enableRegistrationEndpoint":false,"enableCSRCreationEndpoint":false,"enableDeactivationEndpoint":false,"env":"","dbDriver":"","dbDSN":"","dbMaxConns":0,"TCP_addr":"","TLS":false,"TLSCertFile":"","TLSKeyFile":"","CORS":false,"CORS_origins":null,"CSR_country":"","CSR_organization":"","debug":false,"logTextFormat":false,"logKnownIdentities":false,"kdMaxTotalMemMiB":0,"kdParamMemMiB":0,"kdParamTime":0,"kdParamParallelism":0,"kdParamKeyLen":0,"kdParamSaltLen":0,"kdUpdateParams":false,"identityServiceTimeoutMs":0,"authServiceTimeoutMs":0,"verifyServiceTimeoutMs":0,"verificationTimeoutMs":0,"verifyFromKnownIdentitiesOnly":false,"KeyService":"","IdentityService":"","Niomon":"","VerifyService":"","SecretBytes32":null}` func TestConfig(t *testing.T) { configBytes := []byte(expectedConfig) diff --git a/main/config/example.env b/main/config/example.env index d6703ef4..6f3d8291 100644 --- a/main/config/example.env +++ b/main/config/example.env @@ -1,10 +1,13 @@ -UBIRCH_DEVICES=b07c32c6-4525-43f8-ab94-9383bf585ef0:ad3e073b-9ead-437c-9e09-853e9a508dca,e1aead08-1fcb-47b3-bf2c-d3343cb979da:cefa0c34-8448-44f2-9330-27c66519a2d3,8a70ad8b-a564-4e58-9a3b-224ac0f0153f:f728ddb3-d504-4925-b216-b7842a2f1a6b -UBIRCH_SECRET=MTIzNDU2Nzg5MDEyMzQ1Ng== UBIRCH_SECRET32=sdSjtMh6C2oNgsiVcPx89RgcNYl8L6R9PhWU3iGIL+k= -UBIRCH_REGISTERAUTH=S3GyUaoZ8CuElP8NM/NzSTPdO3ABREIsJT1Hie8esb8= -UBIRCH_ENV=prod -UBIRCH_POSTGRES_DSN=postgres://:@:5432/ +UBIRCH_STATIC_AUTH=qlJIjT2asJD23Y7toZczJriPTXd0GAmFUYAjHl0PcnA= +UBIRCH_ENABLE_REGISTRATION_ENDPOINT=true +UBIRCH_ENABLE_CSR_CREATION_ENDPOINT=true +UBIRCH_ENABLE_DEACTIVATION_ENDPOINT=true +UBIRCH_DB_DRIVER=postgres +UBIRCH_DB_DSN=postgres://:@:5432/ UBIRCH_DB_MAX_CONNS=20 +UBIRCH_DEVICES=b07c32c6-4525-43f8-ab94-9383bf585ef0:ad3e073b-9ead-437c-9e09-853e9a508dca,e1aead08-1fcb-47b3-bf2c-d3343cb979da:cefa0c34-8448-44f2-9330-27c66519a2d3,8a70ad8b-a564-4e58-9a3b-224ac0f0153f:f728ddb3-d504-4925-b216-b7842a2f1a6b +UBIRCH_ENV=prod UBIRCH_TCP_ADDR=:8080 UBIRCH_TLS=true UBIRCH_TLS_CERTFILE=certs/cert.pem @@ -14,10 +17,16 @@ UBIRCH_CORS_ORIGINS=https://foo.com,https://*.bar.com UBIRCH_CSR_COUNTRY=DE UBIRCH_CSR_ORGANIZATION=ubirch GmbH UBIRCH_DEBUG=true +UBIRCH_LOGTEXTFORMAT=true UBIRCH_KD_MAX_TOTAL_MEM_MIB=10 UBIRCH_KD_PARAM_MEM_MIB=1 UBIRCH_KD_PARAM_TIME=100 UBIRCH_KD_PARAM_PARALLELISM=4 UBIRCH_KD_PARAM_KEY_LEN=32 UBIRCH_KD_PARAM_SALT_LEN=24 -UBIRCH_KD_UPDATE_PARAMS=true \ No newline at end of file +UBIRCH_KD_UPDATE_PARAMS=true +UBIRCH_IDENTITY_SERVICE_TIMEOUT_MS=10000 +UBIRCH_AUTH_SERVICE_TIMEOUT_MS=2000 +UBIRCH_VERIFY_SERVICE_TIMEOUT_MS=600 +UBIRCH_VERIFICATION_TIMEOUT_MS=2000 +UBIRCH_VERIFY_FROM_KNOWN_IDENTITIES_ONLY=true \ No newline at end of file diff --git a/main/config/example_config.json b/main/config/example_config.json index ebac326d..b057e18d 100644 --- a/main/config/example_config.json +++ b/main/config/example_config.json @@ -1,15 +1,18 @@ { + "secret32": "sdSjtMh6C2oNgsiVcPx89RgcNYl8L6R9PhWU3iGIL+k=", + "staticAuth": "qlJIjT2asJD23Y7toZczJriPTXd0GAmFUYAjHl0PcnA=", + "enableRegistrationEndpoint": true, + "enableCSRCreationEndpoint": true, + "enableDeactivationEndpoint": true, + "dbDriver": "postgres", + "dbDSN": "postgres://:@:5432/", + "dbMaxConns": 20, "devices": { "b07c32c6-4525-43f8-ab94-9383bf585ef0": "ad3e073b-9ead-437c-9e09-853e9a508dca", "e1aead08-1fcb-47b3-bf2c-d3343cb979da": "cefa0c34-8448-44f2-9330-27c66519a2d3", "8a70ad8b-a564-4e58-9a3b-224ac0f0153f": "f728ddb3-d504-4925-b216-b7842a2f1a6b" }, - "secret": "MTIzNDU2Nzg5MDEyMzQ1Ng==", - "secret32": "sdSjtMh6C2oNgsiVcPx89RgcNYl8L6R9PhWU3iGIL+k=", - "registerAuth": "S3GyUaoZ8CuElP8NM/NzSTPdO3ABREIsJT1Hie8esb8=", "env": "prod", - "postgresDSN": "postgres://:@:5432/", - "dbMaxConns": 20, "TCP_addr": ":8080", "TLS": true, "TLSCertFile": "certs/cert.pem", @@ -29,5 +32,10 @@ "kdParamParallelism": 4, "kdParamKeyLen": 32, "kdParamSaltLen": 24, - "kdUpdateParams": true + "kdUpdateParams": true, + "identityServiceTimeoutMs": 10000, + "authServiceTimeoutMs": 2000, + "verifyServiceTimeoutMs": 600, + "verificationTimeoutMs": 2000, + "verifyFromKnownIdentitiesOnly": true } diff --git a/main/ent/identity.go b/main/ent/identity.go index 9c073226..5566fd80 100644 --- a/main/ent/identity.go +++ b/main/ent/identity.go @@ -9,3 +9,8 @@ type Identity struct { Signature []byte AuthToken string } + +type ExternalIdentity struct { + Uid uuid.UUID + PublicKey []byte +} diff --git a/main/go.mod b/main/go.mod index 3348ef8c..6be91f78 100644 --- a/main/go.mod +++ b/main/go.mod @@ -8,13 +8,14 @@ require ( github.com/google/uuid v1.3.0 github.com/kelseyhightower/envconfig v1.4.0 github.com/lib/pq v1.10.7 - github.com/prometheus/client_golang v1.13.0 + github.com/prometheus/client_golang v1.14.0 github.com/sirupsen/logrus v1.9.0 github.com/stretchr/testify v1.7.0 github.com/ubirch/ubirch-protocol-go/ubirch/v2 v2.2.6-0.20220930133231-a5f175fc85ef github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a - golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be - golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0 + golang.org/x/crypto v0.3.0 + golang.org/x/sync v0.1.0 + modernc.org/sqlite v1.19.4 ) require ( @@ -22,14 +23,29 @@ require ( github.com/cespare/xxhash/v2 v2.1.2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/golang/protobuf v1.5.2 // indirect - github.com/matttproud/golang_protobuf_extensions v1.0.2 // indirect + github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect + github.com/mattn/go-isatty v0.0.16 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/client_model v0.3.0 // indirect github.com/prometheus/common v0.37.0 // indirect github.com/prometheus/procfs v0.8.0 // indirect + github.com/remyoudompheng/bigfft v0.0.0-20220927061507-ef77025ab5aa // indirect + github.com/stretchr/objx v0.1.1 // indirect github.com/ubirch/go.crypto v0.1.2 // indirect github.com/ugorji/go/codec v1.2.7 // indirect - golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec // indirect + golang.org/x/mod v0.7.0 // indirect + golang.org/x/sys v0.2.0 // indirect + golang.org/x/tools v0.3.0 // indirect google.golang.org/protobuf v1.28.1 // indirect gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect + lukechampine.com/uint128 v1.2.0 // indirect + modernc.org/cc/v3 v3.40.0 // indirect + modernc.org/ccgo/v3 v3.16.13 // indirect + modernc.org/libc v1.21.4 // indirect + modernc.org/mathutil v1.5.0 // indirect + modernc.org/memory v1.4.0 // indirect + modernc.org/opt v0.1.3 // indirect + modernc.org/strutil v1.1.3 // indirect + modernc.org/token v1.1.0 // indirect ) diff --git a/main/go.sum b/main/go.sum index feb34edc..31162563 100644 --- a/main/go.sum +++ b/main/go.sum @@ -54,6 +54,7 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -114,7 +115,7 @@ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -142,6 +143,8 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1 github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= @@ -155,9 +158,12 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/matttproud/golang_protobuf_extensions v1.0.2 h1:hAHbPm5IJGijwng3PWk09JkG9WeqChjprR5s9bBZ+OM= -github.com/matttproud/golang_protobuf_extensions v1.0.2/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= @@ -175,13 +181,14 @@ github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5Fsn github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.13.0 h1:b71QUfeo5M8gq2+evJdTPfZhYMAU0uKPkyPJ7TPsloU= -github.com/prometheus/client_golang v1.13.0/go.mod h1:vTeo+zgvILHsnnj/39Ou/1fPN5nJFOEMgftOUOmlvYQ= +github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw= +github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= +github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= @@ -195,6 +202,9 @@ github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1 github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= +github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/remyoudompheng/bigfft v0.0.0-20220927061507-ef77025ab5aa h1:tEkEyxYeZ43TR55QU/hsIt9aRGBxbgGuz9CGykjvogY= +github.com/remyoudompheng/bigfft v0.0.0-20220927061507-ef77025ab5aa/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -202,6 +212,7 @@ github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrf github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= @@ -232,8 +243,8 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be h1:fmw3UbQh+nxngCAHrDCCztao/kbYFnWjoqop8dHx05A= -golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.3.0 h1:a06MkbcxBrEFc0w0QIZWXrH/9cCX6KJyWbBOIwAn+7A= +golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -264,6 +275,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA= +golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -311,8 +324,8 @@ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0 h1:cu5kTvlzcw1Q5S9f5ip1/cpiB4nXvw1XYzFPGgzLUOY= -golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -352,8 +365,9 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec h1:BkDtF2Ih9xZ7le9ndzTA7KJow28VbQW3odyk/8drmuI= -golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0 h1:ljd4t30dBnAvMZaQCevtY0xLLD0A+bRZXbgLMLU1F/A= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -406,6 +420,8 @@ golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.3.0 h1:SrNbZl6ECOS1qFzgTdQfWXZM9XBkiA6tkFrH9YSTPHM= +golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -508,6 +524,30 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +lukechampine.com/uint128 v1.2.0 h1:mBi/5l91vocEN8otkC5bDLhi2KdCticRiwbdB0O+rjI= +lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= +modernc.org/cc/v3 v3.40.0 h1:P3g79IUS/93SYhtoeaHW+kRCIrYaxJ27MFPv+7kaTOw= +modernc.org/cc/v3 v3.40.0/go.mod h1:/bTg4dnWkSXowUO6ssQKnOV0yMVxDYNIsIrzqTFDGH0= +modernc.org/ccgo/v3 v3.16.13 h1:Mkgdzl46i5F/CNR/Kj80Ri59hC8TKAhZrYSaqvkwzUw= +modernc.org/ccgo/v3 v3.16.13/go.mod h1:2Quk+5YgpImhPjv2Qsob1DnZ/4som1lJTodubIcoUkY= +modernc.org/ccorpus v1.11.6 h1:J16RXiiqiCgua6+ZvQot4yUuUy8zxgqbqEEUuGPlISk= +modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM= +modernc.org/libc v1.21.4 h1:CzTlumWeIbPV5/HVIMzYHNPCRP8uiU/CWiN2gtd/Qu8= +modernc.org/libc v1.21.4/go.mod h1:przBsL5RDOZajTVslkugzLBj1evTue36jEomFQOoYuI= +modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ= +modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= +modernc.org/memory v1.4.0 h1:crykUfNSnMAXaOJnnxcSzbUGMqkLWjklJKkBK2nwZwk= +modernc.org/memory v1.4.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= +modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4= +modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= +modernc.org/sqlite v1.19.4 h1:nlPIDqumn6/mSvs7T5C8MNYEuN73sISzPdKtMdURpUI= +modernc.org/sqlite v1.19.4/go.mod h1:x/yZNb3h5+I3zGQSlwIv4REL5eJhiRkUH5MReogAeIc= +modernc.org/strutil v1.1.3 h1:fNMm+oJklMGYfU9Ylcywl0CO5O6nTfaowNsh2wpPjzY= +modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw= +modernc.org/tcl v1.15.0 h1:oY+JeD11qVVSgVvodMJsu7Edf8tr5E/7tuhF5cNYz34= +modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y= +modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= +modernc.org/z v1.7.0 h1:xkDw/KepgEjeizO2sNco+hqYkU12taxQFqPEmgm1GWE= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/main/main.go b/main/main.go index 772ae617..c59a7761 100644 --- a/main/main.go +++ b/main/main.go @@ -16,9 +16,8 @@ package main import ( "fmt" - "net/http" "os" - "path" + "time" "github.com/ubirch/ubirch-client-go/main/adapters/clients" "github.com/ubirch/ubirch-client-go/main/adapters/handlers" @@ -28,7 +27,6 @@ import ( log "github.com/sirupsen/logrus" h "github.com/ubirch/ubirch-client-go/main/adapters/http_server" - prom "github.com/ubirch/ubirch-client-go/main/prometheus" ) var ( @@ -43,13 +41,11 @@ func main() { serviceName = "ubirch-client" configFile = "config.json" MigrateArg = "--migrate" - InitArg = "--init-identities-conf" ) var ( configDir string migrate bool - initIdentities bool serverID = fmt.Sprintf("%s/%s", serviceName, Version) readinessChecks []func() error ) @@ -67,8 +63,6 @@ func main() { log.Infof("arg #%d: %s", i+1, arg) if arg == MigrateArg { migrate = true - } else if arg == InitArg { - initIdentities = true } else { configDir = arg } @@ -111,8 +105,11 @@ func main() { client := &clients.UbirchServiceClient{} client.KeyServiceURL = conf.KeyService client.IdentityServiceURL = conf.IdentityService + client.IdentityServiceTimeout = time.Duration(conf.IdentityServiceTimeoutMs) * time.Millisecond client.AuthServiceURL = conf.Niomon + client.AuthServiceTimeout = time.Duration(conf.AuthServiceTimeoutMs) * time.Millisecond client.VerifyServiceURL = conf.VerifyService + client.VerifyServiceTimeout = time.Duration(conf.VerifyServiceTimeoutMs) * time.Millisecond idHandler := &handlers.IdentityHandler{ Protocol: protocol, @@ -123,82 +120,31 @@ func main() { SubjectOrganization: conf.CSR_Organization, } - if initIdentities { - err = idHandler.InitIdentities(conf.Devices) - if err != nil { - log.Fatalf("initialization of identities from configuration failed: %v", err) - } - log.Infof("successfully initialized identities from configuration") - os.Exit(0) + err = idHandler.InitIdentities(conf.Devices) + if err != nil { + log.Fatalf("initialization of identities from configuration failed: %v", err) } signer := handlers.Signer{ - Protocol: protocol, + SignerProtocol: protocol, SendToAuthService: client.SendToAuthService, } verifier := handlers.Verifier{ - Protocol: protocol, + VerifierProtocol: protocol, RequestHash: client.RequestHash, RequestPublicKeys: client.RequestPublicKeys, - VerifyFromKnownIdentitiesOnly: false, // TODO: make configurable + VerifyFromKnownIdentitiesOnly: conf.VerifyFromKnownIdentitiesOnly, + VerificationTimeout: time.Duration(conf.VerificationTimeoutMs) * time.Millisecond, } // set up HTTP server - httpServer := h.HTTPServer{ - Router: h.NewRouter(), - Addr: conf.TCP_addr, - TLS: conf.TLS, - CertFile: conf.TLS_CertFile, - KeyFile: conf.TLS_KeyFile, - } - - if conf.CORS && config.IsDevelopment { // never enable CORS on production stage - httpServer.SetUpCORS(conf.CORS_Origins, conf.Debug) - } - - // set up metrics - httpServer.Router.Method(http.MethodGet, h.MetricsEndpoint, prom.Handler()) - - // set up endpoint for identity registration - httpServer.Router.Put(h.RegisterEndpoint, h.Register(conf.RegisterAuth, idHandler.InitIdentity)) - - // set up endpoint for key status updates (de-/re-activation) - httpServer.Router.Put(h.ActiveUpdateEndpoint, h.UpdateActive(conf.RegisterAuth, idHandler.DeactivateKey, idHandler.ReactivateKey)) - - // set up endpoint for CSRs - fetchCSREndpoint := path.Join(h.UUIDPath, h.CSREndpoint) // //csr - httpServer.Router.Get(fetchCSREndpoint, h.FetchCSR(conf.RegisterAuth, idHandler.CreateCSR)) - - // set up endpoint for chaining - httpServer.AddServiceEndpoint(h.ServerEndpoint{ - Path: h.UUIDPath, - Service: &h.ChainingService{ - CheckAuth: protocol.CheckAuth, - Chain: signer.Chain, - }, - }) - - // set up endpoint for signing - httpServer.AddServiceEndpoint(h.ServerEndpoint{ - Path: path.Join(h.UUIDPath, h.OperationPath), - Service: &h.SigningService{ - CheckAuth: protocol.CheckAuth, - Sign: signer.Sign, - }, - }) - - // set up endpoint for verification - httpServer.AddServiceEndpoint(h.ServerEndpoint{ - Path: h.VerifyPath, - Service: &h.VerificationService{ - Verify: verifier.Verify, - }, - }) - - // set up endpoints for liveness and readiness checks - httpServer.Router.Get(h.LivenessCheckEndpoint, h.Health(serverID)) - httpServer.Router.Get(h.ReadinessCheckEndpoint, h.Ready(serverID, readinessChecks)) + httpServer := h.InitHTTPServer(conf, + idHandler.InitIdentity, idHandler.CreateCSR, + protocol.CheckAuth, signer.Sign, + verifier.Verify, verifier.VerifyOffline, + idHandler.DeactivateKey, idHandler.ReactivateKey, + serverID, readinessChecks) // start HTTP server (blocks until SIGINT or SIGTERM is received) if err = httpServer.Serve(); err != nil {