feat: add time and force sync envs to trigger a sync before getting the secrets

This commit is contained in:
Matthias Ludwig 2023-09-25 11:13:44 +02:00
parent 63e6f8ab7b
commit f0a9258b71
No known key found for this signature in database
6 changed files with 72 additions and 12 deletions

View File

@ -171,6 +171,11 @@ type: Opaque
please note that the rendering engine for this template is jinja2, with an addition of a custom `bitwarden_lookup` function, so there are more possibilities to inject here.
## Configurations parameters
The operator uses the bitwarden cli in the background and does not communicate to the api directly. The cli mirrors the credential store locally but doesn't sync it on every get request. Instead it will sync each secret every 15 minutes (900 seconds). You can adjust the interval by setting `BW_SYNC_INTERVAL` in the values. If you're secrets update very very frequently, you can force the operator to do a sync before each get by setting `BW_FORCE_SYNC="true"`. You might run into rate limits if you do this too frequent.
## Short Term Roadmap
- [ ] support more types

View File

@ -15,6 +15,10 @@ nameOverride: ""
fullnameOverride: ""
# env:
# - name: BW_FORCE_SYNC
# value: "false"
# - name: BW_SYNC_INTERVAL
# value: "60"
# - name: BW_HOST
# value: "define_it"
# - name: BW_CLIENTID

View File

@ -3,7 +3,7 @@ import kubernetes
import base64
import json
from utils.utils import unlock_bw, get_secret_from_bitwarden
from utils.utils import unlock_bw, get_secret_from_bitwarden, bw_sync_interval
def create_dockerlogin(
@ -74,7 +74,7 @@ def create_managed_registry_secret(spec, name, namespace, logger, **kwargs):
@kopf.on.update('registry-credential.lerentis.uploadfilter24.eu')
@kopf.timer('registry-credential.lerentis.uploadfilter24.eu', interval=900)
@kopf.timer('registry-credential.lerentis.uploadfilter24.eu', interval=bw_sync_interval)
def update_managed_registry_secret(
spec,
status,

View File

@ -3,8 +3,7 @@ import kubernetes
import base64
import json
from utils.utils import unlock_bw, get_secret_from_bitwarden, parse_login_scope, parse_fields_scope
from utils.utils import unlock_bw, get_secret_from_bitwarden, parse_login_scope, parse_fields_scope, bw_sync_interval
def create_kv(secret, secret_json, content_def):
secret.type = "Opaque"
@ -67,7 +66,7 @@ def create_managed_secret(spec, name, namespace, logger, body, **kwargs):
@kopf.on.update('bitwarden-secret.lerentis.uploadfilter24.eu')
@kopf.timer('bitwarden-secret.lerentis.uploadfilter24.eu', interval=900)
@kopf.timer('bitwarden-secret.lerentis.uploadfilter24.eu', interval=bw_sync_interval)
def update_managed_secret(
spec,
status,

View File

@ -3,7 +3,7 @@ import base64
import kubernetes
import json
from utils.utils import unlock_bw
from utils.utils import unlock_bw, bw_sync_interval
from lookups.bitwarden_lookup import bitwarden_lookup
from jinja2 import Environment, BaseLoader
@ -58,7 +58,7 @@ def create_managed_secret(spec, name, namespace, logger, body, **kwargs):
@kopf.on.update('bitwarden-template.lerentis.uploadfilter24.eu')
@kopf.timer('bitwarden-template.lerentis.uploadfilter24.eu', interval=900)
@kopf.timer('bitwarden-template.lerentis.uploadfilter24.eu', interval=bw_sync_interval)
def update_managed_secret(
spec,
status,

View File

@ -1,20 +1,71 @@
import os
import json
import subprocess
import distutils
from datetime import datetime, timezone, timedelta
from dateutil import parser
from dateutil.tz import tzutc
tzinfos = {"CDT": tzutc()}
bw_sync_interval = float(os.environ.get(
'BW_SYNC_INTERVAL', 900))
class BitwardenCommandException(Exception):
pass
def get_secret_from_bitwarden(logger, id):
sync_bw(logger)
def get_secret_from_bitwarden(logger, id, force_sync=False):
sync_bw(logger, force=force_sync)
return command_wrapper(logger, command=f"get item {id}")
def sync_bw(logger):
status_output = command_wrapper(logger, command=f"sync")
logger.info(f"Sync successful {status_output}")
def sync_bw(logger, force=False):
def _sync(logger):
status_output = command_wrapper(logger, command=f"sync")
logger.info(f"Sync successful {status_output}")
return
if force:
_sync(logger)
return
last_sync = last_sync_bw(logger)
now = datetime.now(tzutc())
sync_interval = timedelta(seconds=bw_sync_interval)
bw_is_out_of_sync_inverval = (now - last_sync) >= sync_interval
global_force_sync = bool(distutils.util.strtobool(
os.environ.get('BW_FORCE_SYNC', "false")))
needs_sync = force or global_force_sync or bw_is_out_of_sync_inverval
logger.debug(f"last_sync: {last_sync}")
logger.debug(
f"force: {force}, global_force_sync: {global_force_sync}, bw_is_out_of_sync_inverval: {bw_is_out_of_sync_inverval}, needs_sync: {needs_sync}")
if needs_sync:
status_output = _sync(logger)
logger.info(f"Sync successful {status_output}")
def last_sync_bw(logger):
null_datetime_string = "0001-01-01T00:00:00.000Z"
# retruns: {"success":true,"data":{"object":"string","data":"2023-09-22T13:50:09.995Z"}}
last_sync_output = command_wrapper(
logger, command="sync --last", use_success=False)
# if not last_sync_output:
# return parser.parse(null_datetime_string, tzinfos=tzinfos)
if not last_sync_output or not last_sync_output.get("success"):
logger.error("Error getting last sync time.")
return parser.parse(null_datetime_string, tzinfos=tzinfos)
# in case no sync was done yet, null is returned from api
# use some long ago date...
last_sync_string = last_sync_output.get(
"data").get("data", null_datetime_string)
last_sync = parser.parse(last_sync_string, tzinfos=tzinfos)
return last_sync
def unlock_bw(logger):
@ -38,6 +89,7 @@ def command_wrapper(logger, command, use_success: bool = True):
shell=True,
env=system_env)
out, err = sp.communicate()
if "DEBUG" in system_env:
logger.info(out.decode(encoding='UTF-8'))
resp = json.loads(out.decode(encoding='UTF-8'))