Merge pull request #31 from datalyze-solutions/sync-before-get

This commit is contained in:
Tobias Trabelsi 2023-10-02 20:35:41 +02:00 committed by GitHub
commit 70546b7484
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 121 additions and 9 deletions

27
Makefile Normal file
View File

@ -0,0 +1,27 @@
deployment_name ?= bitwarden-crd-operator
namespace ?= bitwarden-crd-operator
label_filter = -l app.kubernetes.io/instance=bitwarden-crd-operator -l app.kubernetes.io/name=bitwarden-crd-operator
create-namespace:
kubectl create namespace ${namespace}
dev:
skaffold dev -n ${namespace}
run:
skaffold run -n ${namespace}
pods:
kubectl -n ${namespace} get pods
desc-pods:
kubectl -n ${namespace} describe pod ${label_filter}
delete-pods-force:
kubectl -n ${namespace} delete pod ${label_filter} --force
exec:
kubectl -n ${namespace} exec -it deployment/${deployment_name} -- sh
logs:
kubectl -n ${namespace} logs -f --tail 30 deployment/${deployment_name}

View File

@ -171,6 +171,11 @@ type: Opaque
please note that the rendering engine for this template is jinja2, with an addition of a custom `bitwarden_lookup` function, so there are more possibilities to inject here.
## Configurations parameters
The operator uses the bitwarden cli in the background and does not communicate to the api directly. The cli mirrors the credential store locally but doesn't sync it on every get request. Instead it will sync each secret every 15 minutes (900 seconds). You can adjust the interval by setting `BW_SYNC_INTERVAL` in the values. If you're secrets update very very frequently, you can force the operator to do a sync before each get by setting `BW_FORCE_SYNC="true"`. You might run into rate limits if you do this too frequent.
## Short Term Roadmap
- [ ] support more types

View File

@ -4,7 +4,7 @@ description: Deploy the Bitwarden CRD Operator
type: application
version: "v0.7.6"
version: "v0.8.0"
appVersion: "0.6.5"
@ -95,6 +95,8 @@ annotations:
artifacthub.io/operator: "true"
artifacthub.io/containsSecurityUpdates: "false"
artifacthub.io/changes: |
- kind: changed
description: "Take care to sync with bitwarden before getting a secret, added BW_SYNC_INTERVAL and BW_FORCE_SYNC envs to control sync."
- kind: changed
description: "Updated alpine to 3.18.3"
- kind: changed

View File

@ -15,6 +15,10 @@ nameOverride: ""
fullnameOverride: ""
# env:
# - name: BW_FORCE_SYNC
# value: "false"
# - name: BW_SYNC_INTERVAL
# value: "900"
# - name: BW_HOST
# value: "define_it"
# - name: BW_CLIENTID

17
skaffold.yaml Normal file
View File

@ -0,0 +1,17 @@
apiVersion: skaffold/v4beta5
kind: Config
metadata:
name: bitwarden-crd-operator
build:
artifacts:
- image: ghcr.io/lerentis/bitwarden-crd-operator
docker:
dockerfile: Dockerfile
deploy:
helm:
releases:
- name: bitwarden-crd-operator
chartPath: charts/bitwarden-crd-operator
valuesFiles:
- env/values.yaml
version: v0.7.4

View File

@ -3,7 +3,7 @@ import kubernetes
import base64
import json
from utils.utils import unlock_bw, get_secret_from_bitwarden
from utils.utils import unlock_bw, get_secret_from_bitwarden, bw_sync_interval
def create_dockerlogin(
@ -74,7 +74,7 @@ def create_managed_registry_secret(spec, name, namespace, logger, **kwargs):
@kopf.on.update('registry-credential.lerentis.uploadfilter24.eu')
@kopf.timer('registry-credential.lerentis.uploadfilter24.eu', interval=900)
@kopf.timer('registry-credential.lerentis.uploadfilter24.eu', interval=bw_sync_interval)
def update_managed_registry_secret(
spec,
status,

View File

@ -3,8 +3,7 @@ import kubernetes
import base64
import json
from utils.utils import unlock_bw, get_secret_from_bitwarden, parse_login_scope, parse_fields_scope
from utils.utils import unlock_bw, get_secret_from_bitwarden, parse_login_scope, parse_fields_scope, bw_sync_interval
def create_kv(secret, secret_json, content_def):
secret.type = "Opaque"
@ -67,7 +66,7 @@ def create_managed_secret(spec, name, namespace, logger, body, **kwargs):
@kopf.on.update('bitwarden-secret.lerentis.uploadfilter24.eu')
@kopf.timer('bitwarden-secret.lerentis.uploadfilter24.eu', interval=900)
@kopf.timer('bitwarden-secret.lerentis.uploadfilter24.eu', interval=bw_sync_interval)
def update_managed_secret(
spec,
status,

View File

@ -3,7 +3,7 @@ import base64
import kubernetes
import json
from utils.utils import unlock_bw
from utils.utils import unlock_bw, bw_sync_interval
from lookups.bitwarden_lookup import bitwarden_lookup
from jinja2 import Environment, BaseLoader
@ -58,7 +58,7 @@ def create_managed_secret(spec, name, namespace, logger, body, **kwargs):
@kopf.on.update('bitwarden-template.lerentis.uploadfilter24.eu')
@kopf.timer('bitwarden-template.lerentis.uploadfilter24.eu', interval=900)
@kopf.timer('bitwarden-template.lerentis.uploadfilter24.eu', interval=bw_sync_interval)
def update_managed_secret(
spec,
status,

View File

@ -1,16 +1,73 @@
import os
import json
import subprocess
import distutils
from datetime import datetime, timezone, timedelta
from dateutil import parser
from dateutil.tz import tzutc
tzinfos = {"CDT": tzutc()}
bw_sync_interval = float(os.environ.get(
'BW_SYNC_INTERVAL', 900))
class BitwardenCommandException(Exception):
pass
def get_secret_from_bitwarden(logger, id):
def get_secret_from_bitwarden(logger, id, force_sync=False):
sync_bw(logger, force=force_sync)
return command_wrapper(logger, command=f"get item {id}")
def sync_bw(logger, force=False):
def _sync(logger):
status_output = command_wrapper(logger, command=f"sync")
logger.info(f"Sync successful {status_output}")
return
if force:
_sync(logger)
return
last_sync = last_sync_bw(logger)
now = datetime.now(tzutc())
sync_interval = timedelta(seconds=bw_sync_interval)
bw_is_out_of_sync_inverval = (now - last_sync) >= sync_interval
global_force_sync = bool(distutils.util.strtobool(
os.environ.get('BW_FORCE_SYNC', "false")))
needs_sync = force or global_force_sync or bw_is_out_of_sync_inverval
logger.debug(f"last_sync: {last_sync}")
logger.debug(
f"force: {force}, global_force_sync: {global_force_sync}, bw_is_out_of_sync_inverval: {bw_is_out_of_sync_inverval}, needs_sync: {needs_sync}")
if needs_sync:
status_output = _sync(logger)
logger.info(f"Sync successful {status_output}")
def last_sync_bw(logger):
null_datetime_string = "0001-01-01T00:00:00.000Z"
# retruns: {"success":true,"data":{"object":"string","data":"2023-09-22T13:50:09.995Z"}}
last_sync_output = command_wrapper(
logger, command="sync --last", use_success=False)
# if not last_sync_output:
# return parser.parse(null_datetime_string, tzinfos=tzinfos)
if not last_sync_output or not last_sync_output.get("success"):
logger.error("Error getting last sync time.")
return parser.parse(null_datetime_string, tzinfos=tzinfos)
# in case no sync was done yet, null is returned from api
# use some long ago date...
last_sync_string = last_sync_output.get(
"data").get("data", null_datetime_string)
last_sync = parser.parse(last_sync_string, tzinfos=tzinfos)
return last_sync
def unlock_bw(logger):
status_output = command_wrapper(logger, "status", False)
status = status_output['data']['template']['status']
@ -32,6 +89,7 @@ def command_wrapper(logger, command, use_success: bool = True):
shell=True,
env=system_env)
out, err = sp.communicate()
if "DEBUG" in system_env:
logger.info(out.decode(encoding='UTF-8'))
resp = json.loads(out.decode(encoding='UTF-8'))