Compare commits

...

34 Commits

Author SHA1 Message Date
Langhammer, Jens
34cbf5f702 new release: 0.5.0-beta 2019-10-04 13:55:13 +02:00
Langhammer, Jens
3c6e94b6a8 ci(minor): fix path in bumpversion config 2019-10-04 13:55:12 +02:00
Langhammer, Jens
1cd149c815 policy(minor): fix linting 2019-10-04 13:49:27 +02:00
Langhammer, Jens
4c6f562805 policy(minor): fix deadlock issue 2019-10-04 13:44:26 +02:00
Langhammer, Jens
e59c4ec1c7 root(minor): cleanup, remove unused log 2019-10-04 13:43:47 +02:00
Langhammer, Jens
1169db7530 docker(minor): move docker-related files into separate folder 2019-10-04 12:45:19 +02:00
Langhammer, Jens
1453008796 wsgi(minor): add proper request logging 2019-10-04 12:44:59 +02:00
Langhammer, Jens
2209b6d603 deploy(minor): fix robots.txt not being in the right path
fix path matching in docker compose
2019-10-04 12:01:38 +02:00
Langhammer, Jens
ccbc0384f9 deploy(minor): remove app-gw, add robots.txt 2019-10-04 11:57:41 +02:00
Langhammer, Jens
a48924c896 docker(minor): switch to debian based image so we can use wheels 2019-10-04 11:50:52 +02:00
Langhammer, Jens
dc8d8dd2b6 deploy(minor): add docker-compose file for easy testing 2019-10-04 11:50:26 +02:00
Langhammer, Jens
afca94ceb8 policy(minor): improve loading of policy subclasses 2019-10-04 10:22:06 +02:00
Langhammer, Jens
0b86231a36 *(minor): make better use of structured logging 2019-10-04 10:21:33 +02:00
Langhammer, Jens
c0df1f38b8 *(minor): remove __name__ param from get_logger 2019-10-04 10:08:53 +02:00
Langhammer, Jens
2b8fed8f4e saml_idp(minor): rewrite to use defusedxml instead of bs4 2019-10-04 09:50:25 +02:00
Langhammer, Jens
c7322a32a0 app_gw(minor): remove current implementation 2019-10-04 09:28:28 +02:00
Jens Langhammer
64b75cab84 policy(minor): add data class for policy request 2019-10-03 10:45:31 +02:00
Jens Langhammer
f58bc61999 new release: 0.4.2-beta 2019-10-02 21:05:51 +00:00
Jens Langhammer
fb8ccc0283 lint(minor): fix import order 2019-10-02 21:05:37 +00:00
Jens Langhammer
c38012f147 new release: 0.4.1-beta 2019-10-02 21:04:16 +00:00
Jens Langhammer
3676ff21c2 helm(minor): use postgres 4.2.2 2019-10-02 21:03:39 +00:00
Jens Langhammer
920e705d75 policy(minor): lookup correct policy subclass 2019-10-02 22:28:58 +02:00
Jens Langhammer
de0b137b1e policy(minor): improve error handling 2019-10-02 22:28:39 +02:00
Jens Langhammer
d44ac6e2a3 static(minor): fix build path for static image 2019-10-02 22:16:48 +02:00
Jens Langhammer
71039a4012 helm(minor): fix p2 to passbook 2019-10-02 22:16:32 +02:00
Langhammer, Jens
8745ac7932 new release: 0.4.0-beta 2019-10-01 17:01:30 +02:00
Langhammer, Jens
7f70048423 ci(minor): disable pylint since its currently broken upstream 2019-10-01 16:53:09 +02:00
Langhammer, Jens
97dbfc8885 req(minor): fix dependency issue by downgrading prospector 2019-10-01 15:54:29 +02:00
Langhammer, Jens
149ea22a93 k8s(minor): switch to uwsgi 2019-10-01 15:43:06 +02:00
Langhammer, Jens
404ed5406d k8s(minor): remove passwords from configmap 2019-10-01 15:42:55 +02:00
Langhammer, Jens
b8656858ec k8s(minor): load secrets as env vars 2019-10-01 15:42:14 +02:00
Langhammer, Jens
6b0f0e8993 deploy(minor): use 5.x postgresql chart for psql 10.x 2019-10-01 15:33:43 +02:00
Langhammer, Jens
aec1ccd88d root(minor): fix redis password not being loaded 2019-10-01 15:30:35 +02:00
Langhammer, Jens
bee5c200b6 docker(minor): fix static build failing 2019-10-01 15:30:22 +02:00
105 changed files with 641 additions and 1571 deletions

View File

@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.3.0-beta
current_version = 0.5.0-beta
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
@@ -23,5 +23,5 @@ values =
[bumpversion:file:passbook/__init__.py]
[bumpversion:file:passbook/core/nginx.conf]
[bumpversion:file:docker/nginx.conf]

View File

@@ -1,7 +1,6 @@
[run]
source = passbook
omit =
env/
*/wsgi.py
manage.py
*/migrations/*

View File

@@ -2,3 +2,4 @@ env
helm
passbook-ui
static
*.env.yml

View File

@@ -27,7 +27,7 @@ create-base-image:
before_script:
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
script:
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/base.Dockerfile --destination docker.beryju.org/passbook/base:latest --destination docker.beryju.org/passbook/base:0.3.0-beta
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/base.Dockerfile --destination docker.beryju.org/passbook/base:latest --destination docker.beryju.org/passbook/base:0.5.0-beta
stage: build-base-image
only:
refs:
@@ -41,7 +41,7 @@ build-dev-image:
before_script:
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
script:
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/dev.Dockerfile --destination docker.beryju.org/passbook/dev:latest --destination docker.beryju.org/passbook/dev:0.3.0-beta
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/dev.Dockerfile --destination docker.beryju.org/passbook/dev:latest --destination docker.beryju.org/passbook/dev:0.5.0-beta
stage: build-dev-image
only:
refs:
@@ -63,20 +63,20 @@ migrations:
services:
- postgres:latest
- redis:latest
prospector:
script:
- prospector
stage: test
services:
- postgres:latest
- redis:latest
pylint:
script:
- pylint passbook
stage: test
services:
- postgres:latest
- redis:latest
# prospector:
# script:
# - prospector
# stage: test
# services:
# - postgres:latest
# - redis:latest
# pylint:
# script:
# - pylint passbook
# stage: test
# services:
# - postgres:latest
# - redis:latest
coverage:
script:
- coverage run manage.py test
@@ -94,7 +94,7 @@ package-passbook-server:
before_script:
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
script:
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.beryju.org/passbook/server:latest --destination docker.beryju.org/passbook/server:0.3.0-beta
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.beryju.org/passbook/server:latest --destination docker.beryju.org/passbook/server:0.5.0-beta
stage: build
only:
- tags
@@ -107,7 +107,7 @@ build-passbook-static:
before_script:
- echo "{\"auths\":{\"docker.beryju.org\":{\"auth\":\"$DOCKER_AUTH\"}}}" > /kaniko/.docker/config.json
script:
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/static.Dockerfile --destination docker.beryju.org/passbook/static:latest --destination docker.beryju.org/passbook/static:0.3.0-beta
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/static.Dockerfile --destination docker.beryju.org/passbook/static:latest --destination docker.beryju.org/passbook/static:0.5.0-beta
only:
- tags
- /^version/.*$/
@@ -124,7 +124,7 @@ package-helm:
- curl https://raw.githubusercontent.com/helm/helm/master/scripts/get | bash
script:
- helm init --client-only
- helm dependency build helm/passbook
- helm dependency update helm/passbook
- helm package helm/passbook
artifacts:
paths:

View File

@@ -3,11 +3,9 @@ test-warnings: true
doc-warnings: false
ignore-paths:
- env
- migrations
- docs
- node_modules
- client-packages
uses:
- django

View File

@@ -2,9 +2,10 @@
disable=redefined-outer-name,arguments-differ,no-self-use,cyclic-import,fixme,locally-disabled,unpacking-non-sequence,too-many-ancestors,too-many-branches,too-few-public-methods
load-plugins=pylint_django,pylint.extensions.bad_builtin
#,pylint.extensions.docparams
extension-pkg-whitelist=lxml
const-rgx=[a-zA-Z0-9_]{1,40}$
ignored-modules=django-otp
jobs=4
[SIMILARITIES]

View File

@@ -1,7 +1,8 @@
FROM docker.beryju.org/passbook/base:latest
FROM docker.beryju.org/passbook/base:test
COPY ./passbook/ /app/passbook
COPY ./manage.py /app/
COPY ./docker/uwsgi.ini /app/
USER passbook

17
Pipfile
View File

@@ -4,13 +4,8 @@ url = "https://pypi.org/simple"
verify_ssl = true
[packages]
asgiref = "*"
beautifulsoup4 = "*"
celery = "*"
channels = "*"
cherrypy = "*"
colorlog = "*"
daphne = "*"
defusedxml = "*"
django = "*"
django-cors-middleware = "*"
@@ -23,7 +18,6 @@ django-otp = "*"
django-recaptcha = "*"
django-redis = "*"
django-rest-framework = "*"
django-revproxy = "*"
djangorestframework = "==3.9.4"
drf-yasg = "*"
ldap3 = "*"
@@ -31,7 +25,7 @@ lxml = "*"
markdown = "*"
oauthlib = "*"
packaging = "*"
psycopg2 = "*"
psycopg2-binary = "*"
pycryptodome = "*"
pyyaml = "*"
qrcode = "*"
@@ -40,22 +34,21 @@ sentry-sdk = "*"
service_identity = "*"
signxml = "*"
urllib3 = {extras = ["secure"],version = "*"}
websocket_client = "*"
structlog = "*"
[requires]
python_version = "3.7"
[dev-packages]
astroid = "==2.2.5"
coverage = "*"
isort = "*"
pylint = "*"
pylint-django = "*"
prospector = "*"
pylint = "==2.3.1"
pylint-django = "==2.0.10"
prospector = "==1.1.7"
django-debug-toolbar = "*"
bumpversion = "*"
unittest-xml-reporting = "*"
autopep8 = "*"
bandit = "*"
twine = "*"
colorama = "*"

470
Pipfile.lock generated
View File

@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "f8694b0ee03f99560e853fd24e9cd7ac987c757cd50249398346e42cdd98cbbb"
"sha256": "d03d1e494d28a90b39edd1d489afdb5e39ec09bceb18daa2a54b2cc7de61d83c"
},
"pipfile-spec": 6,
"requires": {
@@ -23,57 +23,19 @@
],
"version": "==2.5.1"
},
"asgiref": {
"hashes": [
"sha256:a4ce726e6ef49cca13642ff49588530ebabcc47c669c7a95af37ea5a74b9b823",
"sha256:f62b1c88ebf5fe95db202a372982970edcf375c1513d7e70717df0750f5c2b98"
],
"index": "pypi",
"version": "==3.2.2"
},
"asn1crypto": {
"hashes": [
"sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
"sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
"sha256:d02bf8ea1b964a5ff04ac7891fe3a39150045d1e5e4fe99273ba677d11b92a04",
"sha256:f822954b90c4c44f002e2cd46d636ab630f1fe4df22c816a82b66505c404eb2a"
],
"version": "==0.24.0"
"version": "==1.0.0"
},
"attrs": {
"hashes": [
"sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
"sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
"sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2",
"sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396"
],
"version": "==19.1.0"
},
"autobahn": {
"hashes": [
"sha256:27688cbddd5545fc2ee2614ec8fa65119f1a2122606ce2ef7756392c33e3ec0f",
"sha256:a24826ad0bcc35d32cb4576a092fa744e8b6738bd6320d2de857ad8a71df0bec"
],
"version": "==19.9.3"
},
"automat": {
"hashes": [
"sha256:cbd78b83fa2d81fe2a4d23d258e1661dd7493c9a50ee2f1a5b2cac61c1793b0e",
"sha256:fdccab66b68498af9ecfa1fa43693abe546014dd25cf28543cbe9d1334916a58"
],
"version": "==0.7.0"
},
"backports.functools-lru-cache": {
"hashes": [
"sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
"sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
],
"version": "==1.5"
},
"beautifulsoup4": {
"hashes": [
"sha256:05668158c7b85b791c5abde53e50265e16f98ad601c402ba44d70f96c4159612",
"sha256:25288c9e176f354bf277c0a10aa96c782a6a18a17122dba2e8cec4a97e03343b",
"sha256:f040590be10520f2ea4c2ae8c3dae441c7cfff5308ec9d58a0ec0c1b8f81d469"
],
"index": "pypi",
"version": "==4.8.0"
"version": "==19.2.0"
},
"billiard": {
"hashes": [
@@ -92,10 +54,10 @@
},
"certifi": {
"hashes": [
"sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
"sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
"sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50",
"sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"
],
"version": "==2019.6.16"
"version": "==2019.9.11"
},
"cffi": {
"hashes": [
@@ -130,14 +92,6 @@
],
"version": "==1.12.3"
},
"channels": {
"hashes": [
"sha256:9191a85800673b790d1d74666fb7676f430600b71b662581e97dd69c9aedd29a",
"sha256:af7cdba9efb3f55b939917d1b15defb5d40259936013e60660e5e9aff98db4c5"
],
"index": "pypi",
"version": "==2.2.0"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
@@ -147,33 +101,18 @@
},
"cheroot": {
"hashes": [
"sha256:427e7e3ce51ad5a6e5cf953252b5782d5dfbeb544c09910634971bc06df6621b",
"sha256:74d733c55178812253d855990f7ad7b31ab4ee8dab80e4803bd5e52299c50395"
"sha256:6168371ab9aaf574ac5f75675f244bbfebf990202bf75048065e9d675b9ae719",
"sha256:8cc7c28961db2e13d0cac6b234a589a314c1844f7bbf54e67888ac9a2e25ac59"
],
"version": "==6.5.8"
"version": "==7.0.0"
},
"cherrypy": {
"hashes": [
"sha256:16fc226a280cd772ede7c309d3964002196784ac6615d8bface52be12ff51230",
"sha256:488ea5e639885c75330686c1d7d3dfbd002f784c027a3fe5b374b41926b8cba3"
"sha256:033368d25fcc6bca143e7efe9adbfd3a6d91cc0d90c37a649261935f116aafab",
"sha256:683e687e7c7b1ba31ef86a113b1eafd0407269fed175bf488d3c839d37d1cc60"
],
"index": "pypi",
"version": "==18.2.0"
},
"colorlog": {
"hashes": [
"sha256:3cf31b25cbc8f86ec01fef582ef3b840950dea414084ed19ab922c8b493f9b42",
"sha256:450f52ea2a2b6ebb308f034ea9a9b15cea51e65650593dca1da3eb792e4e4981"
],
"index": "pypi",
"version": "==4.0.2"
},
"constantly": {
"hashes": [
"sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35",
"sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"
],
"version": "==15.1.0"
"version": "==18.3.0"
},
"coreapi": {
"hashes": [
@@ -210,14 +149,6 @@
],
"version": "==2.7"
},
"daphne": {
"hashes": [
"sha256:2329b7a74b5559f7ea012879c10ba945c3a53df7d8d2b5932a904e3b4c9abcc2",
"sha256:3cae286a995ae5b127d7de84916f0480cb5be19f81125b6a150b8326250dadd5"
],
"index": "pypi",
"version": "==2.3.0"
},
"defusedxml": {
"hashes": [
"sha256:6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93",
@@ -228,11 +159,11 @@
},
"django": {
"hashes": [
"sha256:148a4a2d1a85b23883b0a4e99ab7718f518a83675e4485e44dc0c1d36988c5fa",
"sha256:deb70aa038e59b58593673b15e9a711d1e5ccd941b5973b30750d5d026abfd56"
"sha256:4025317ca01f75fc79250ff7262a06d8ba97cd4f82e93394b2a0a6a4a925caeb",
"sha256:a8ca1033acac9f33995eb2209a6bf18a4681c3e5269a878e9a7e0b7384ed1ca3"
],
"index": "pypi",
"version": "==2.2.5"
"version": "==2.2.6"
},
"django-cors-middleware": {
"hashes": [
@@ -280,11 +211,11 @@
},
"django-otp": {
"hashes": [
"sha256:246b11ee38ec1cea2e2312311a830740d1a8d0384ba15e7b70e03f851d790157",
"sha256:cefbf5e7295498c767752d77828ce3f56cdb0373915e56fe4f87d99604742394"
"sha256:79c8253be97246df86540d551dc705e8fe6ca76af8e8c77f78314cd1b513c2cf",
"sha256:c5bf3916dca5d53cb377aa6dea40aa785c164013fbf750384137362dfa278cf5"
],
"index": "pypi",
"version": "==0.7.0"
"version": "==0.7.2"
},
"django-recaptcha": {
"hashes": [
@@ -309,14 +240,6 @@
"index": "pypi",
"version": "==0.1.0"
},
"django-revproxy": {
"hashes": [
"sha256:0b539736e438aad3cd8b34563125783678f65bcb847970c95d8e9820e6dc88b3",
"sha256:b2c6244aaf53fbbecb79084bf507761754b36895c0f6d01349066e9a355e8455"
],
"index": "pypi",
"version": "==0.9.15"
},
"djangorestframework": {
"hashes": [
"sha256:376f4b50340a46c15ae15ddd0c853085f4e66058f97e4dbe7d43ed62f5e60651",
@@ -327,11 +250,11 @@
},
"drf-yasg": {
"hashes": [
"sha256:68fded2ffdf46e03f33e766184b7d8f1e1a5236f94acfd0c4ba932a57b812566",
"sha256:fcef74709ead2b365410be3d12afbfd0a6e49d1efe615a15a929da7e950bb83c"
"sha256:4cfec631880ae527a91ec7cd3241aea2f82189f59e2f089119aa687761afb227",
"sha256:504cce09035cf1bace63b84d9d778b772f86bb37d8a71ed6f723346362e633b2"
],
"index": "pypi",
"version": "==1.16.1"
"version": "==1.17.0"
},
"eight": {
"hashes": [
@@ -346,13 +269,6 @@
],
"version": "==0.16.0"
},
"hyperlink": {
"hashes": [
"sha256:4288e34705da077fada1111a24a0aa08bb1e76699c9ce49876af722441845654",
"sha256:ab4a308feb039b04f855a020a6eda3b18ca5a68e6d8f8c899cbe9e653721d04f"
],
"version": "==19.0.0"
},
"idna": {
"hashes": [
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
@@ -362,17 +278,10 @@
},
"importlib-metadata": {
"hashes": [
"sha256:0c505102757e7fa28b9f0958d8bc81301159dea16e2649858c92edc158b78a83",
"sha256:9a9f75ce32e78170905888acbf2376a81d3f21ecb3bb4867050413411d3ca7a9"
"sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26",
"sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"
],
"version": "==0.21"
},
"incremental": {
"hashes": [
"sha256:717e12246dddf231a349175f48d74d93e2897244939173b01974ab6661406b9f",
"sha256:7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3"
],
"version": "==17.5.0"
"version": "==0.23"
},
"inflection": {
"hashes": [
@@ -402,10 +311,10 @@
},
"kombu": {
"hashes": [
"sha256:55274dc75eb3c3994538b0973a0fadddb236b698a4bc135b8aa4981e0a710b8f",
"sha256:e5f0312dfb9011bebbf528ccaf118a6c2b5c3b8244451f08381fb23e7715809b"
"sha256:31edb84947996fdda065b6560c128d5673bb913ff34aa19e7b84755217a24deb",
"sha256:c9078124ce2616b29cf6607f0ac3db894c59154252dee6392cdbbe15e5c4b566"
],
"version": "==4.6.4"
"version": "==4.6.5"
},
"ldap3": {
"hashes": [
@@ -501,11 +410,11 @@
},
"packaging": {
"hashes": [
"sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
"sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
"sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47",
"sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"
],
"index": "pypi",
"version": "==19.1"
"version": "==19.2"
},
"portend": {
"hashes": [
@@ -514,19 +423,36 @@
],
"version": "==2.5"
},
"psycopg2": {
"psycopg2-binary": {
"hashes": [
"sha256:128d0fa910ada0157bba1cb74a9c5f92bb8a1dca77cf91a31eb274d1f889e001",
"sha256:227fd46cf9b7255f07687e5bde454d7d67ae39ca77e170097cdef8ebfc30c323",
"sha256:2315e7f104681d498ccf6fd70b0dba5bce65d60ac92171492bfe228e21dcc242",
"sha256:4b5417dcd2999db0f5a891d54717cfaee33acc64f4772c4bc574d4ff95ed9d80",
"sha256:640113ddc943522aaf71294e3f2d24013b0edd659b7820621492c9ebd3a2fb0b",
"sha256:897a6e838319b4bf648a574afb6cabcb17d0488f8c7195100d48d872419f4457",
"sha256:8dceca81409898c870e011c71179454962dec152a1a6b86a347f4be74b16d864",
"sha256:b1b8e41da09a0c3ef0b3d4bb72da0dde2abebe583c1e8462973233fd5ad0235f",
"sha256:cb407fccc12fc29dc331f2b934913405fa49b9b75af4f3a72d0f50f57ad2ca23",
"sha256:d3a27550a8185e53b244ad7e79e307594b92fede8617d80200a8cce1fba2c60f",
"sha256:f0e6b697a975d9d3ccd04135316c947dd82d841067c7800ccf622a8717e98df1"
"sha256:080c72714784989474f97be9ab0ddf7b2ad2984527e77f2909fcd04d4df53809",
"sha256:110457be80b63ff4915febb06faa7be002b93a76e5ba19bf3f27636a2ef58598",
"sha256:171352a03b22fc099f15103959b52ee77d9a27e028895d7e5fde127aa8e3bac5",
"sha256:19d013e7b0817087517a4b3cab39c084d78898369e5c46258aab7be4f233d6a1",
"sha256:249b6b21ae4eb0f7b8423b330aa80fab5f821b9ffc3f7561a5e2fd6bb142cf5d",
"sha256:2ac0731d2d84b05c7bb39e85b7e123c3a0acd4cda631d8d542802c88deb9e87e",
"sha256:2b6d561193f0dc3f50acfb22dd52ea8c8dfbc64bcafe3938b5f209cc17cb6f00",
"sha256:2bd23e242e954214944481124755cbefe7c2cf563b1a54cd8d196d502f2578bf",
"sha256:3e1239242ca60b3725e65ab2f13765fc199b03af9eaf1b5572f0e97bdcee5b43",
"sha256:3eb70bb697abbe86b1d2b1316370c02ba320bfd1e9e35cf3b9566a855ea8e4e5",
"sha256:51a2fc7e94b98bd1bb5d4570936f24fc2b0541b63eccadf8fdea266db8ad2f70",
"sha256:52f1bdafdc764b7447e393ed39bb263eccb12bfda25a4ac06d82e3a9056251f6",
"sha256:5b3581319a3951f1e866f4f6c5e42023db0fae0284273b82e97dfd32c51985cd",
"sha256:63c1b66e3b2a3a336288e4bcec499e0dc310cd1dceaed1c46fa7419764c68877",
"sha256:8123a99f24ecee469e5c1339427bcdb2a33920a18bb5c0d58b7c13f3b0298ba3",
"sha256:85e699fcabe7f817c0f0a412d4e7c6627e00c412b418da7666ff353f38e30f67",
"sha256:8dbff4557bbef963697583366400822387cccf794ccb001f1f2307ed21854c68",
"sha256:908d21d08d6b81f1b7e056bbf40b2f77f8c499ab29e64ec5113052819ef1c89b",
"sha256:af39d0237b17d0a5a5f638e9dffb34013ce2b1d41441fd30283e42b22d16858a",
"sha256:af51bb9f055a3f4af0187149a8f60c9d516cf7d5565b3dac53358796a8fb2a5b",
"sha256:b2ecac57eb49e461e86c092761e6b8e1fd9654dbaaddf71a076dcc869f7014e2",
"sha256:cd37cc170678a4609becb26b53a2bc1edea65177be70c48dd7b39a1149cabd6e",
"sha256:d17e3054b17e1a6cb8c1140f76310f6ede811e75b7a9d461922d2c72973f583e",
"sha256:d305313c5a9695f40c46294d4315ed3a07c7d2b55e48a9010dad7db7a66c8b7f",
"sha256:dd0ef0eb1f7dd18a3f4187226e226a7284bda6af5671937a221766e6ef1ee88f",
"sha256:e1adff53b56db9905db48a972fb89370ad5736e0450b96f91bcf99cadd96cfd7",
"sha256:f0d43828003c82dbc9269de87aa449e9896077a71954fbbb10a614c017e65737",
"sha256:f78e8b487de4d92640105c1389e5b90be3496b1d75c90a666edd8737cc2dbab7"
],
"index": "pypi",
"version": "==2.8.3"
@@ -618,13 +544,6 @@
],
"version": "==3.9.0"
},
"pyhamcrest": {
"hashes": [
"sha256:6b672c02fdf7470df9674ab82263841ce8333fb143f32f021f6cb26f0e512420",
"sha256:8ffaa0a53da57e89de14ced7185ac746227a8894dbd5a3c718bf05ddbd1d56cd"
],
"version": "==1.9.0"
},
"pyjwkest": {
"hashes": [
"sha256:5560fd5ba08655f29ff6ad1df1e15dc05abc9d976fcbcec8d2b5167f49b70222"
@@ -710,35 +629,35 @@
},
"ruamel.yaml.clib": {
"hashes": [
"sha256:0bbe19d3e099f8ba384e1846e6b54f245f58aeec8700edbbf9abb87afa54fd82",
"sha256:2f38024592613f3a8772bbc2904be027d9abf463518ba145f2d0c8e6da27009f",
"sha256:44449b3764a3f75815eea8ae5930b98e8326be64a90b0f782747318f861abfe0",
"sha256:5710be9a357801c31c1eaa37b9bc92d38176d785af5b2f0c9751385c5dc9659a",
"sha256:5a089acb6833ed5f412e24cbe3e665683064c1429824d2819137b5ade54435c3",
"sha256:6143386ddd61599ea081c012a69a16e5bdd7b3c6c231bd039534365a48940f30",
"sha256:6726aaf851f5f9e4cbdd3e1e414bc700bdd39220e8bc386415fd41c87b1b53c2",
"sha256:68fbc3b5d94d145a391452f886ae5fca240cb7e3ab6bd66e1a721507cdaac28a",
"sha256:75ebddf99ba9e0b48f32b5bdcf9e5a2b84c017da9e0db7bf11995fa414aa09cd",
"sha256:79948a6712baa686773a43906728e20932c923f7b2a91be7347993be2d745e55",
"sha256:8a2dd8e8b08d369558cade05731172c4b5e2f4c5097762c6b352bd28fd9f9dc4",
"sha256:c747acdb5e8c242ab2280df6f0c239e62838af4bee647031d96b3db2f9cefc04",
"sha256:cadc8eecd27414dca30366b2535cb5e3f3b47b4e2d6be7a0b13e4e52e459ff9f",
"sha256:cee86ecc893a6a8ecaa7c6a9c2d06f75f614176210d78a5f155f8e78d6989509",
"sha256:e59af39e895aff28ee5f55515983cab3466d1a029c91c04db29da1c0f09cf333",
"sha256:eee7ecd2eee648884fae6c51ae50c814acdcc5d6340dc96c970158aebcd25ac6",
"sha256:ef8d4522d231cb9b29f6cdd0edc8faac9d9715c60dc7becbd6eb82c915a98e5b",
"sha256:f504d45230cc9abf2810623b924ae048b224a90adb01f97db4e766cfdda8e6eb"
"sha256:1e77424825caba5553bbade750cec2277ef130647d685c2b38f68bc03453bac6",
"sha256:392b7c371312abf27fb549ec2d5e0092f7ef6e6c9f767bfb13e83cb903aca0fd",
"sha256:4d55386129291b96483edcb93b381470f7cd69f97585829b048a3d758d31210a",
"sha256:550168c02d8de52ee58c3d8a8193d5a8a9491a5e7b2462d27ac5bf63717574c9",
"sha256:57933a6986a3036257ad7bf283529e7c19c2810ff24c86f4a0cfeb49d2099919",
"sha256:615b0396a7fad02d1f9a0dcf9f01202bf9caefee6265198f252c865f4227fcc6",
"sha256:77556a7aa190be9a2bd83b7ee075d3df5f3c5016d395613671487e79b082d784",
"sha256:7aee724e1ff424757b5bd8f6c5bbdb033a570b2b4683b17ace4dbe61a99a657b",
"sha256:8073c8b92b06b572e4057b583c3d01674ceaf32167801fe545a087d7a1e8bf52",
"sha256:9c6d040d0396c28d3eaaa6cb20152cb3b2f15adf35a0304f4f40a3cf9f1d2448",
"sha256:a0ff786d2a7dbe55f9544b3f6ebbcc495d7e730df92a08434604f6f470b899c5",
"sha256:b1b7fcee6aedcdc7e62c3a73f238b3d080c7ba6650cd808bce8d7761ec484070",
"sha256:b66832ea8077d9b3f6e311c4a53d06273db5dc2db6e8a908550f3c14d67e718c",
"sha256:d0d3ac228c9bbab08134b4004d748cf9f8743504875b3603b3afbb97e3472947",
"sha256:d10e9dd744cf85c219bf747c75194b624cc7a94f0c80ead624b06bfa9f61d3bc",
"sha256:ea4362548ee0cbc266949d8a441238d9ad3600ca9910c3fe4e82ee3a50706973",
"sha256:ed5b3698a2bb241b7f5cbbe277eaa7fe48b07a58784fba4f75224fd066d253ad",
"sha256:f9dcc1ae73f36e8059589b601e8e4776b9976effd76c21ad6a855a74318efd6e"
],
"markers": "platform_python_implementation == 'CPython' and python_version < '3.8'",
"version": "==0.1.2"
"version": "==0.2.0"
},
"sentry-sdk": {
"hashes": [
"sha256:528f936118679e9a52dacb96bfefe20acb5d63e0797856c64a582cc3c2bc1f9e",
"sha256:b4edcb1296fee107439345d0f8b23432b8732b7e28407f928367d0a4a36301a9"
"sha256:15e51e74b924180c98bcd636cb4634945b0a99a124d50b433c3a9dc6a582e8db",
"sha256:1d6a2ee908ec6d8f96c27d78bc39e203df4d586d287c233140af7d8d1aca108a"
],
"index": "pypi",
"version": "==0.11.2"
"version": "==0.12.3"
},
"service-identity": {
"hashes": [
@@ -763,13 +682,6 @@
],
"version": "==1.12.0"
},
"soupsieve": {
"hashes": [
"sha256:8662843366b8d8779dec4e2f921bebec9afd856a5ff2e82cd419acc5054a1a92",
"sha256:a5a6166b4767725fd52ae55fee8c8b6137d9a51e9f1edea461a062a759160118"
],
"version": "==1.9.3"
},
"sqlparse": {
"hashes": [
"sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177",
@@ -792,37 +704,6 @@
],
"version": "==1.14.1"
},
"twisted": {
"hashes": [
"sha256:02214ef6f125804969aedd55daccea57060b98dae6a2aa0a4cb60c4d0acb8a2c",
"sha256:15b51047ab116ee61d791cf9fe6f037f35e909a6d344ccb437d1691627c4d8a1",
"sha256:17704d98d58c9c52d97e88570732e4c094a93fe5df937d01b759bab593345eec",
"sha256:222e0cfd60b0c867dd303bce6355a3ffac46574079dff11ae7a1775235ad12c8",
"sha256:23090c9fcec01ce4e102912a39eb4645b2bf916abe459804f87853d977ced6e3",
"sha256:5102fc2bf0d870c1e217aa09ed7a48b633cc579950a31ecae9cecc556ebffdf2",
"sha256:6bc71d5a2320576a3ac7f2dac7802c290fcf9f1972c59f9ef5c5b85b8bac1e1e",
"sha256:6c7703b62de08fd5873d60e6ed30478cdb39e3a37b1ead3a5d2fed10deb6e112",
"sha256:6ca398abd58730070e9bc34e8a01d1198438b2ff130e95492090a2fec5fb683b",
"sha256:98840f28c44894f44dc597747b4cddc740197dc6f6f18ba4dd810422094e35cb",
"sha256:998e3baf509c7cf7973b8174c1050ac10f6a8bc1aaf0178ad6a7c422c75a0c68",
"sha256:a5f2de00c6630c8f5ad32fca64fc4c853536c21e9ea8d0d2ae54804ef5836b9c",
"sha256:aad65a24b27253eb94f2749131a872487b093c599c5873c03d90a65cc9b8a2fc",
"sha256:ab788465701f553f764f4442d22b850f39a6a6abd4861e70c05b4c27119c9b50",
"sha256:c7244e24fcb72f838be57d3e117ad7df135ff5af4c9d4c565417d671cd1e68c9",
"sha256:d5db93026568f60cacdc0615fcd21d46f694a6bfad0ef3ff53cde2b4bb85a39d",
"sha256:da92426002703b02d8fccff3acfea2d8baf76a9052e8c55ea76d0407eeaa06ce",
"sha256:f4f0af14d288140ecb00861a3bd1e0b94ffdc63057cc1abe8b9dc84f6b6dcf18",
"sha256:f985f31e3244d18610816b55becf8fbf445c8e30fe0731500cadaf19f296baf0"
],
"version": "==19.7.0"
},
"txaio": {
"hashes": [
"sha256:67e360ac73b12c52058219bb5f8b3ed4105d2636707a36a7cdafb56fe06db7fe",
"sha256:b6b235d432cc58ffe111b43e337db71a5caa5d3eaa88f0eacf60b431c7626ef5"
],
"version": "==18.8.1"
},
"uritemplate": {
"hashes": [
"sha256:01c69f4fe8ed503b2951bef85d996a9d22434d2431584b5b107b2981ff416fbd",
@@ -836,11 +717,11 @@
"secure"
],
"hashes": [
"sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
"sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
"sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398",
"sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86"
],
"index": "pypi",
"version": "==1.25.3"
"version": "==1.25.6"
},
"vine": {
"hashes": [
@@ -849,14 +730,6 @@
],
"version": "==1.3.0"
},
"websocket-client": {
"hashes": [
"sha256:1151d5fb3a62dc129164292e1227655e4bbc5dd5340a5165dfae61128ec50aa9",
"sha256:1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a"
],
"index": "pypi",
"version": "==0.56.0"
},
"zc.lockfile": {
"hashes": [
"sha256:307ad78227e48be260e64896ec8886edc7eae22d8ec53e4d528ab5537a83203b",
@@ -870,40 +743,6 @@
"sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
],
"version": "==0.6.0"
},
"zope.interface": {
"hashes": [
"sha256:086707e0f413ff8800d9c4bc26e174f7ee4c9c8b0302fbad68d083071822316c",
"sha256:1157b1ec2a1f5bf45668421e3955c60c610e31913cc695b407a574efdbae1f7b",
"sha256:11ebddf765bff3bbe8dbce10c86884d87f90ed66ee410a7e6c392086e2c63d02",
"sha256:14b242d53f6f35c2d07aa2c0e13ccb710392bcd203e1b82a1828d216f6f6b11f",
"sha256:1b3d0dcabc7c90b470e59e38a9acaa361be43b3a6ea644c0063951964717f0e5",
"sha256:20a12ab46a7e72b89ce0671e7d7a6c3c1ca2c2766ac98112f78c5bddaa6e4375",
"sha256:298f82c0ab1b182bd1f34f347ea97dde0fffb9ecf850ecf7f8904b8442a07487",
"sha256:2f6175722da6f23dbfc76c26c241b67b020e1e83ec7fe93c9e5d3dd18667ada2",
"sha256:3b877de633a0f6d81b600624ff9137312d8b1d0f517064dfc39999352ab659f0",
"sha256:4265681e77f5ac5bac0905812b828c9fe1ce80c6f3e3f8574acfb5643aeabc5b",
"sha256:550695c4e7313555549aa1cdb978dc9413d61307531f123558e438871a883d63",
"sha256:5f4d42baed3a14c290a078e2696c5f565501abde1b2f3f1a1c0a94fbf6fbcc39",
"sha256:62dd71dbed8cc6a18379700701d959307823b3b2451bdc018594c48956ace745",
"sha256:7040547e5b882349c0a2cc9b50674b1745db551f330746af434aad4f09fba2cc",
"sha256:7e099fde2cce8b29434684f82977db4e24f0efa8b0508179fce1602d103296a2",
"sha256:7e5c9a5012b2b33e87980cee7d1c82412b2ebabcb5862d53413ba1a2cfde23aa",
"sha256:81295629128f929e73be4ccfdd943a0906e5fe3cdb0d43ff1e5144d16fbb52b1",
"sha256:95cc574b0b83b85be9917d37cd2fad0ce5a0d21b024e1a5804d044aabea636fc",
"sha256:968d5c5702da15c5bf8e4a6e4b67a4d92164e334e9c0b6acf080106678230b98",
"sha256:9e998ba87df77a85c7bed53240a7257afe51a07ee6bc3445a0bf841886da0b97",
"sha256:a0c39e2535a7e9c195af956610dba5a1073071d2d85e9d2e5d789463f63e52ab",
"sha256:a15e75d284178afe529a536b0e8b28b7e107ef39626a7809b4ee64ff3abc9127",
"sha256:a6a6ff82f5f9b9702478035d8f6fb6903885653bff7ec3a1e011edc9b1a7168d",
"sha256:b639f72b95389620c1f881d94739c614d385406ab1d6926a9ffe1c8abbea23fe",
"sha256:bad44274b151d46619a7567010f7cde23a908c6faa84b97598fd2f474a0c6891",
"sha256:bbcef00d09a30948756c5968863316c949d9cedbc7aabac5e8f0ffbdb632e5f1",
"sha256:d788a3999014ddf416f2dc454efa4a5dbeda657c6aba031cf363741273804c6b",
"sha256:eed88ae03e1ef3a75a0e96a55a99d7937ed03e53d0cffc2451c208db445a2966",
"sha256:f99451f3a579e73b5dd58b1b08d1179791d49084371d9a47baad3b22417f0317"
],
"version": "==4.6.0"
}
},
"develop": {
@@ -912,6 +751,7 @@
"sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
"sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
],
"index": "pypi",
"version": "==2.2.5"
},
"autopep8": {
@@ -929,13 +769,6 @@
"index": "pypi",
"version": "==1.6.2"
},
"bleach": {
"hashes": [
"sha256:213336e49e102af26d9cde77dd2d0397afabc5a6bf2fed985dc35b5d1e285a16",
"sha256:3fdf7f77adcf649c9911387df51254b813185e32b2c6619f690b593a617e19fa"
],
"version": "==3.1.0"
},
"bumpversion": {
"hashes": [
"sha256:6744c873dd7aafc24453d8b6a1a0d6d109faf63cd0cd19cb78fd46e74932c77e",
@@ -944,20 +777,6 @@
"index": "pypi",
"version": "==0.5.3"
},
"certifi": {
"hashes": [
"sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
"sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
],
"version": "==2019.6.16"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"version": "==3.0.4"
},
"colorama": {
"hashes": [
"sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d",
@@ -1006,11 +825,11 @@
},
"django": {
"hashes": [
"sha256:148a4a2d1a85b23883b0a4e99ab7718f518a83675e4485e44dc0c1d36988c5fa",
"sha256:deb70aa038e59b58593673b15e9a711d1e5ccd941b5973b30750d5d026abfd56"
"sha256:4025317ca01f75fc79250ff7262a06d8ba97cd4f82e93394b2a0a6a4a925caeb",
"sha256:a8ca1033acac9f33995eb2209a6bf18a4681c3e5269a878e9a7e0b7384ed1ca3"
],
"index": "pypi",
"version": "==2.2.5"
"version": "==2.2.6"
},
"django-debug-toolbar": {
"hashes": [
@@ -1020,14 +839,6 @@
"index": "pypi",
"version": "==2.0"
},
"docutils": {
"hashes": [
"sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0",
"sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827",
"sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99"
],
"version": "==0.15.2"
},
"dodgy": {
"hashes": [
"sha256:65e13cf878d7aff129f1461c13cb5fd1bb6dfe66bb5327e09379c3877763280c"
@@ -1036,24 +847,17 @@
},
"gitdb2": {
"hashes": [
"sha256:83361131a1836661a155172932a13c08bda2db3674e4caa32368aa6eb02f38c2",
"sha256:e3a0141c5f2a3f635c7209d56c496ebe1ad35da82fe4d3ec4aaa36278d70648a"
"sha256:1b6df1433567a51a4a9c1a5a0de977aa351a405cc56d7d35f3388bad1f630350",
"sha256:96bbb507d765a7f51eb802554a9cfe194a174582f772e0d89f4e87288c288b7b"
],
"version": "==2.0.5"
"version": "==2.0.6"
},
"gitpython": {
"hashes": [
"sha256:947cc75913e7b6da108458136607e2ee0e40c20be1e12d4284e7c6c12956c276",
"sha256:d2f4945f8260f6981d724f5957bc076398ada55cb5d25aaee10108bcdc894100"
"sha256:631263cc670aa56ce3d3c414cf0fe2e840f2e913514b138ea28d88a477bbcd21",
"sha256:6e97b9f0954807f30c2dd8e3165731ed6c477a1b365f194b69d81d7940a08332"
],
"version": "==3.0.2"
},
"idna": {
"hashes": [
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
"sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
],
"version": "==2.8"
"version": "==3.0.3"
},
"isort": {
"hashes": [
@@ -1107,13 +911,6 @@
],
"version": "==0.4.1"
},
"pkginfo": {
"hashes": [
"sha256:7424f2c8511c186cd5424bbf31045b77435b37a8d604990b79d4e70d741148bb",
"sha256:a6d9e40ca61ad3ebd0b72fbadd4fba16e4c0e4df0428c041e01e06eb6ee71f32"
],
"version": "==1.5.0.1"
},
"prospector": {
"hashes": [
"sha256:aba551e53dc1a5a432afa67385eaa81d7b4cf4c162dc1a4d0ee00b3a0712ad90"
@@ -1142,13 +939,6 @@
],
"version": "==1.6.0"
},
"pygments": {
"hashes": [
"sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127",
"sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
],
"version": "==2.4.2"
},
"pylint": {
"hashes": [
"sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
@@ -1179,9 +969,10 @@
},
"pylint-plugin-utils": {
"hashes": [
"sha256:8d9e31d5ea8b7b0003e1f0f136b44a5235896a32e47c5bc2ef1143e9f6ba0b74"
"sha256:2f30510e1c46edf268d3a195b2849bd98a1b9433229bb2ba63b8d776e1fc4d0a",
"sha256:57625dcca20140f43731311cd8fd879318bf45a8b0fd17020717a8781714a25a"
],
"version": "==0.5"
"version": "==0.6"
},
"pytz": {
"hashes": [
@@ -1209,27 +1000,6 @@
"index": "pypi",
"version": "==5.1.2"
},
"readme-renderer": {
"hashes": [
"sha256:bb16f55b259f27f75f640acf5e00cf897845a8b3e4731b5c1a436e4b8529202f",
"sha256:c8532b79afc0375a85f10433eca157d6b50f7d6990f337fa498c96cd4bfc203d"
],
"version": "==24.0"
},
"requests": {
"hashes": [
"sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
"sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
],
"version": "==2.22.0"
},
"requests-toolbelt": {
"hashes": [
"sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f",
"sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"
],
"version": "==0.9.1"
},
"requirements-detector": {
"hashes": [
"sha256:9fbc4b24e8b7c3663aff32e3eba34596848c6b91bd425079b386973bd8d08931"
@@ -1258,9 +1028,10 @@
},
"snowballstemmer": {
"hashes": [
"sha256:713e53b79cbcf97bc5245a06080a33d54a77e7cce2f789c835a143bcdb5c033e"
"sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0",
"sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"
],
"version": "==1.9.1"
"version": "==2.0.0"
},
"sqlparse": {
"hashes": [
@@ -1276,21 +1047,6 @@
],
"version": "==1.31.0"
},
"tqdm": {
"hashes": [
"sha256:1be3e4e3198f2d0e47b928e9d9a8ec1b63525db29095cec1467f4c5a4ea8ebf9",
"sha256:7e39a30e3d34a7a6539378e39d7490326253b7ee354878a92255656dc4284457"
],
"version": "==4.35.0"
},
"twine": {
"hashes": [
"sha256:b2cec0dc1ac55bd74280d257f43763cf0cf928bdcd0de0fd70be70aa1195e3b0",
"sha256:e37d5a73d77b095b85314dde807bfb85b580b5b9d137f5b21332f4636990d97a"
],
"index": "pypi",
"version": "==1.14.0"
},
"typed-ast": {
"hashes": [
"sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
@@ -1320,24 +1076,6 @@
"index": "pypi",
"version": "==2.5.1"
},
"urllib3": {
"extras": [
"secure"
],
"hashes": [
"sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
"sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
],
"index": "pypi",
"version": "==1.25.3"
},
"webencodings": {
"hashes": [
"sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78",
"sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"
],
"version": "==0.5.1"
},
"wrapt": {
"hashes": [
"sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"

9
README.md Normal file
View File

@@ -0,0 +1,9 @@
# passbook
## Quick instance
```
docker-compose pull
docker-compose up -d
docker-compose exec server ./manage.py createsuperuser
```

View File

@@ -1,15 +1,19 @@
FROM python:3.7-alpine
FROM python:3.7-slim-stretch
COPY ./Pipfile /app/
COPY ./Pipfile.lock /app/
WORKDIR /app/
RUN apk update && \
apk add --no-cache openssl-dev build-base libxml2-dev libxslt-dev libffi-dev gcc musl-dev libgcc zlib-dev postgresql-dev && \
pip install pipenv --no-cache-dir && \
pipenv lock -r > requirements.txt && \
RUN apt-get update && \
apt-get install -y --no-install-recommends build-essential && \
pip install pipenv uwsgi --no-cache-dir && \
apt-get remove -y --purge build-essential && \
apt-get autoremove -y --purge && \
rm -rf /var/lib/apt/lists/*
RUN pipenv lock -r > requirements.txt && \
pipenv --rm && \
pip install -r requirements.txt --no-cache-dir && \
adduser -S passbook && \
adduser --system --no-create-home passbook && \
chown -R passbook /app

View File

@@ -1,4 +1,5 @@
FROM docker.beryju.org/passbook/base:latest
RUN pipenv lock --dev -r > requirements-dev.txt && \
pipenv --rm && \
pip install -r /app/requirements-dev.txt --no-cache-dir

97
docker-compose.yml Normal file
View File

@@ -0,0 +1,97 @@
---
version: '3.2'
services:
postgresql:
image: postgres
volumes:
- database:/var/lib/postgresql/data
networks:
- internal
environment:
- POSTGRES_PASSWORD=${PG_PASS:-thisisnotagoodpassword}
- POSTGRES_USER=passbook
- POSTGRES_DB=passbook
labels:
- traefik.enable=false
redis:
image: redis
networks:
- internal
labels:
- traefik.enable=false
database-migrate:
build:
context: .
image: docker.beryju.org/passbook/server:${TAG:-test}
command:
- ./manage.py
- migrate
networks:
- internal
restart: 'no'
environment:
- PASSBOOK_REDIS__HOST=redis
- PASSBOOK_POSTGRESQL__HOST=postgresql
- PASSBOOK_POSTGRESQL__PASSWORD=${PG_PASS:-thisisnotagoodpassword}
server:
build:
context: .
image: docker.beryju.org/passbook/server:${TAG:-test}
command:
- uwsgi
- uwsgi.ini
environment:
- PASSBOOK_REDIS__HOST=redis
- PASSBOOK_POSTGRESQL__HOST=postgresql
- PASSBOOK_POSTGRESQL__PASSWORD=${PG_PASS:-thisisnotagoodpassword}
ports:
- 8000
networks:
- internal
labels:
- traefik.port=8000
- traefik.docker.network=internal
- traefik.frontend.rule=PathPrefix:/
worker:
image: docker.beryju.org/passbook/server:${TAG:-test}
command:
- ./manage.py
- worker
networks:
- internal
labels:
- traefik.enable=false
environment:
- PASSBOOK_REDIS__HOST=redis
- PASSBOOK_POSTGRESQL__HOST=postgresql
- PASSBOOK_POSTGRESQL__PASSWORD=${PG_PASS:-thisisnotagoodpassword}
static:
build:
context: .
dockerfile: static.Dockerfile
image: docker.beryju.org/passbook/static:${TAG:-test}
networks:
- internal
labels:
- traefik.frontend.rule=PathPrefix:/static, /robots.txt
- traefik.port=80
- traefik.docker.network=internal
traefik:
image: traefik:1.7
command: --api --docker
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
ports:
- "0.0.0.0:80:80"
- "0.0.0.0:443:443"
- "0.0.0.0:8080:8080"
networks:
- internal
volumes:
database:
driver: local
networks:
internal: {}

View File

@@ -39,9 +39,9 @@ http {
gzip on;
gzip_types application/javascript image/* text/css;
gunzip on;
add_header X-passbook-Version 0.3.0-beta;
add_header X-passbook-Version 0.5.0-beta;
add_header Vary X-passbook-Version;
root /static/;
root /data/;
location /_/healthz {
return 204;

9
docker/uwsgi.ini Normal file
View File

@@ -0,0 +1,9 @@
[uwsgi]
http = 0.0.0.0:8000
chdir = /app
wsgi-file = passbook/root/wsgi.py
processes = 4
threads = 2
enable-threads = true
uid = passbook
gid = passbook

View File

@@ -1,6 +1,6 @@
apiVersion: v1
appVersion: "0.3.0-beta"
appVersion: "0.5.0-beta"
description: A Helm chart for passbook.
name: passbook
version: "0.3.0-beta"
version: "0.5.0-beta"
icon: https://git.beryju.org/uploads/-/system/project/avatar/108/logo.png

Binary file not shown.

View File

@@ -1,12 +1,9 @@
dependencies:
- name: rabbitmq
repository: https://kubernetes-charts.storage.googleapis.com/
version: 4.3.2
- name: postgresql
repository: https://kubernetes-charts.storage.googleapis.com/
version: 3.10.1
version: 4.2.2
- name: redis
repository: https://kubernetes-charts.storage.googleapis.com/
version: 5.1.0
digest: sha256:8bf68bc928a2e3c0f05139635be05fa0840554c7bde4cecd624fac78fb5fa5a3
generated: 2019-03-21T11:06:51.553379+01:00
version: 9.2.1
digest: sha256:8782e974a1094eaeecf1d68f093ca4fb84977217b2bd38b09790a05ec289aec2
generated: "2019-10-02T21:03:25.90491153Z"

View File

@@ -1,6 +1,6 @@
dependencies:
- name: postgresql
version: 6.3.10
version: 4.2.2
repository: https://kubernetes-charts.storage.googleapis.com/
- name: redis
version: 9.2.1

View File

@@ -1,62 +0,0 @@
apiVersion: apps/v1beta2
kind: Deployment
metadata:
name: {{ include "passbook.fullname" . }}-appgw
labels:
app.kubernetes.io/name: {{ include "passbook.name" . }}
helm.sh/chart: {{ include "passbook.chart" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
spec:
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
app.kubernetes.io/name: {{ include "passbook.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
template:
metadata:
labels:
app.kubernetes.io/name: {{ include "passbook.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
passbook.io/component: appgw
spec:
volumes:
- name: config-volume
configMap:
name: {{ include "passbook.fullname" . }}-config
containers:
- name: {{ .Chart.Name }}
image: "docker.beryju.org/passbook/server:{{ .Values.image.tag }}"
imagePullPolicy: IfNotPresent
command:
- ./manage.py
args:
- app_gw_web
ports:
- name: http
containerPort: 8000
protocol: TCP
volumeMounts:
- mountPath: /etc/passbook
name: config-volume
livenessProbe:
httpGet:
path: /
port: http
httpHeaders:
- name: Host
value: kubernetes-healthcheck-host
readinessProbe:
httpGet:
path: /
port: http
httpHeaders:
- name: Host
value: kubernetes-healthcheck-host
resources:
requests:
cpu: 150m
memory: 300M
limits:
cpu: 500m
memory: 500M

View File

@@ -1,20 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "passbook.fullname" . }}-appgw
labels:
app.kubernetes.io/name: {{ include "passbook.name" . }}
helm.sh/chart: {{ include "passbook.chart" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
app.kubernetes.io/name: {{ include "passbook.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
passbook.io/component: appgw

View File

@@ -8,10 +8,8 @@ data:
host: "{{ .Release.Name }}-postgresql"
name: "{{ .Values.postgresql.postgresqlDatabase }}"
user: postgres
password: "{{ .Values.postgresql.postgresqlPassword }}"
redis:
host: "{{ .Release.Name }}-redis-master"
password: "{{ .Values.redis.password }}"
cache_db: 0
message_queue_db: 1

View File

@@ -37,14 +37,9 @@ spec:
backend:
serviceName: {{ $fullName }}-static
servicePort: http
{{- end }}
{{- range .Values.ingress.app_gw_hosts }}
- host: {{ . | quote }}
http:
paths:
- path: /
- path: /robots.txt
backend:
serviceName: {{ $fullName }}-appgw
serviceName: {{ $fullName }}-static
servicePort: http
{{- end }}
{{- end }}

View File

@@ -31,6 +31,21 @@ spec:
- ./manage.py
args:
- migrate
envFrom:
- configMapRef:
name: {{ include "passbook.fullname" . }}-config
prefix: PASSBOOK_
env:
- name: PASSBOOK_REDIS__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-redis"
key: redis-password
- name: PASSBOOK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-postgresql"
key: postgresql-password
volumeMounts:
- mountPath: /etc/passbook
name: config-volume
@@ -39,9 +54,24 @@ spec:
image: "docker.beryju.org/passbook/server:{{ .Values.image.tag }}"
imagePullPolicy: IfNotPresent
command:
- ./manage.py
- uwsgi
args:
- web
- uwsgi.ini
envFrom:
- configMapRef:
name: {{ include "passbook.fullname" . }}-config
prefix: PASSBOOK_
env:
- name: PASSBOOK_REDIS__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-redis"
key: redis-password
- name: PASSBOOK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-postgresql"
key: postgresql-password
ports:
- name: http
containerPort: 8000

View File

@@ -32,6 +32,21 @@ spec:
- ./manage.py
args:
- worker
envFrom:
- configMapRef:
name: {{ include "passbook.fullname" . }}-config
prefix: PASSBOOK_
env:
- name: PASSBOOK_REDIS__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-redis"
key: redis-password
- name: PASSBOOK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: "{{ .Release.Name }}-postgresql"
key: postgresql-password
ports:
- name: http
containerPort: 8000

View File

@@ -5,7 +5,7 @@
replicaCount: 1
image:
tag: 0.3.0-beta
tag: 0.5.0-beta
nameOverride: ""
@@ -21,10 +21,6 @@ postgresql:
postgresqlDatabase: passbook
postgresqlPassword: foo
rabbitmq:
rabbitmq:
password: foo
service:
type: ClusterIP
port: 80
@@ -37,28 +33,7 @@ ingress:
path: /
hosts:
- passbook.k8s.local
app_gw_hosts:
- '*.passbook.k8s.local'
defaultHost: passbook.k8s.local
tls: []
# - secretName: chart-example-tls
# hosts:
# - passbook.k8s.local
resources: {}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -1,2 +1,2 @@
"""passbook"""
__version__ = '0.3.0-beta'
__version__ = '0.5.0-beta'

View File

@@ -179,8 +179,8 @@
<span class="card-pf-aggregate-status-notification">
<a href="#">
{% if worker_count < 1%}
<span class="pficon-error-circle-o" data-toggle="tooltip" data-placement="right"
title="{% trans 'No workers connected. Policies will not work and you may expect other issues.' %}"></span> {{ worker_count }}
<span class="pficon-warning-triangle-o" data-toggle="tooltip" data-placement="right"
title="{% trans 'No workers connected.' %}"></span> {{ worker_count }}
{% else %}
<span class="pficon pficon-ok"></span>{{ worker_count }}
{% endif %}

View File

@@ -8,7 +8,7 @@ from structlog import get_logger
from passbook.lib.utils.template import render_to_string
register = template.Library()
LOGGER = get_logger(__name__)
LOGGER = get_logger()
@register.simple_tag()
def get_links(model_instance):

Binary file not shown.

View File

@@ -1,6 +1,4 @@
"""passbook Application Security Gateway app"""
from importlib import import_module
from django.apps import AppConfig
@@ -10,7 +8,4 @@ class PassbookApplicationApplicationGatewayConfig(AppConfig):
name = 'passbook.app_gw'
label = 'passbook_app_gw'
verbose_name = 'passbook Application Security Gateway'
mountpoint = 'app_gw/'
def ready(self):
import_module('passbook.app_gw.signals')
# mountpoint = 'app_gw/'

View File

@@ -1,13 +0,0 @@
"""
ASGI entrypoint. Configures Django and then runs the application
defined in the ASGI_APPLICATION setting.
"""
import os
import django
from channels.routing import get_default_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.root.settings")
django.setup()
application = get_default_application()

View File

@@ -1,29 +0,0 @@
"""passbook app_gw webserver management command"""
from daphne.cli import CommandLineInterface
from django.core.management.base import BaseCommand
from django.utils import autoreload
from structlog import get_logger
from passbook.lib.config import CONFIG
LOGGER = get_logger(__name__)
class Command(BaseCommand):
"""Run Daphne Webserver for app_gw"""
def handle(self, *args, **options):
"""passbook daphne server"""
autoreload.run_with_reloader(self.daphne_server)
def daphne_server(self):
"""Run daphne server within autoreload"""
autoreload.raise_last_exception()
CommandLineInterface().run([
'-p', str(CONFIG.y('app_gw.port', 8000)),
'-b', CONFIG.y('app_gw.listen', '0.0.0.0'), # nosec
'--access-log', '/dev/null',
'--application-close-timeout', '500',
'passbook.app_gw.asgi:application'
])

View File

@@ -1,33 +0,0 @@
"""passbook app_gw middleware"""
from django.views.generic import RedirectView
from passbook.app_gw.proxy.handler import RequestHandler
from passbook.lib.config import CONFIG
class ApplicationGatewayMiddleware:
"""Check if request should be proxied or handeled normally"""
_app_gw_cache = {}
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Rudimentary cache
host_header = request.META.get('HTTP_HOST')
if host_header not in self._app_gw_cache:
self._app_gw_cache[host_header] = RequestHandler.find_app_gw_for_request(request)
if self._app_gw_cache[host_header]:
return self.dispatch(request, self._app_gw_cache[host_header])
return self.get_response(request)
def dispatch(self, request, app_gw):
"""Build proxied request and pass to upstream"""
handler = RequestHandler(app_gw, request)
if not handler.check_permission():
to_url = 'https://%s/?next=%s' % (CONFIG.y('domains')[0], request.get_full_path())
return RedirectView.as_view(url=to_url)(request)
return handler.get_response()

View File

@@ -1,8 +0,0 @@
"""Exception classes"""
class ReverseProxyException(Exception):
"""Base for revproxy exception"""
class InvalidUpstream(ReverseProxyException):
"""Invalid upstream set"""

View File

@@ -1,233 +0,0 @@
"""passbook app_gw request handler"""
import mimetypes
from random import SystemRandom
from urllib.parse import urlparse
import certifi
import urllib3
from django.core.cache import cache
from django.utils.http import urlencode
from structlog import get_logger
from passbook.app_gw.models import ApplicationGatewayProvider
from passbook.app_gw.proxy.exceptions import InvalidUpstream
from passbook.app_gw.proxy.response import get_django_response
from passbook.app_gw.proxy.rewrite import Rewriter
from passbook.app_gw.proxy.utils import encode_items, normalize_request_headers
from passbook.core.models import Application
from passbook.policy.engine import PolicyEngine
SESSION_UPSTREAM_KEY = 'passbook_app_gw_upstream'
IGNORED_HOSTNAMES_KEY = 'passbook_app_gw_ignored'
LOGGER = get_logger(__name__)
QUOTE_SAFE = r'<.;>\(}*+|~=-$/_:^@)[{]&\'!,"`'
ERRORS_MESSAGES = {
'upstream-no-scheme': ("Upstream URL scheme must be either "
"'http' or 'https' (%s).")
}
HTTP_NO_VERIFY = urllib3.PoolManager()
HTTP = urllib3.PoolManager(
cert_reqs='CERT_REQUIRED',
ca_certs=certifi.where())
IGNORED_HOSTS = cache.get(IGNORED_HOSTNAMES_KEY, [])
POLICY_CACHE = {}
class RequestHandler:
"""Forward requests"""
_parsed_url = None
_request_headers = None
def __init__(self, app_gw, request):
self.app_gw = app_gw
self.request = request
if self.app_gw.pk not in POLICY_CACHE:
POLICY_CACHE[self.app_gw.pk] = self.app_gw.application.policies.all()
@staticmethod
def find_app_gw_for_request(request):
"""Check if a request should be proxied or forwarded to passbook"""
# Check if hostname is in cached list of ignored hostnames
# This saves us having to query the database on each request
host_header = request.META.get('HTTP_HOST')
if host_header in IGNORED_HOSTS:
# LOGGER.debug("%s is ignored", host_header)
return False
# Look through all ApplicationGatewayProviders and check hostnames
matches = ApplicationGatewayProvider.objects.filter(
server_name__contains=[host_header],
enabled=True)
if not matches.exists():
# Mo matching Providers found, add host header to ignored list
IGNORED_HOSTS.append(host_header)
cache.set(IGNORED_HOSTNAMES_KEY, IGNORED_HOSTS)
# LOGGER.debug("Ignoring %s", host_header)
return False
# At this point we're certain there's a matching ApplicationGateway
if len(matches) > 1:
# This should never happen
raise ValueError
app_gw = matches.first()
try:
# Check if ApplicationGateway is associated with application
getattr(app_gw, 'application')
if app_gw:
return app_gw
except Application.DoesNotExist:
pass
# LOGGER.debug("ApplicationGateway not associated with Application")
return True
def _get_upstream(self):
"""Choose random upstream and save in session"""
if SESSION_UPSTREAM_KEY not in self.request.session:
self.request.session[SESSION_UPSTREAM_KEY] = {}
if self.app_gw.pk not in self.request.session[SESSION_UPSTREAM_KEY]:
upstream_index = int(SystemRandom().random() * len(self.app_gw.upstream))
self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk] = upstream_index
return self.app_gw.upstream[self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk]]
def get_upstream(self):
"""Get upstream as parsed url"""
upstream = self._get_upstream()
self._parsed_url = urlparse(upstream)
if self._parsed_url.scheme not in ('http', 'https'):
raise InvalidUpstream(ERRORS_MESSAGES['upstream-no-scheme'] %
upstream)
return upstream
def _format_path_to_redirect(self):
# LOGGER.debug("Path before: %s", self.request.get_full_path())
rewriter = Rewriter(self.app_gw, self.request)
after = rewriter.build()
# LOGGER.debug("Path after: %s", after)
return after
def get_proxy_request_headers(self):
"""Get normalized headers for the upstream
Gets all headers from the original request and normalizes them.
Normalization occurs by removing the prefix ``HTTP_`` and
replacing and ``_`` by ``-``. Example: ``HTTP_ACCEPT_ENCODING``
becames ``Accept-Encoding``.
.. versionadded:: 0.9.1
:param request: The original HTTPRequest instance
:returns: Normalized headers for the upstream
"""
return normalize_request_headers(self.request)
def get_request_headers(self):
"""Return request headers that will be sent to upstream.
The header REMOTE_USER is set to the current user
if AuthenticationMiddleware is enabled and
the view's add_remote_user property is True.
.. versionadded:: 0.9.8
"""
request_headers = self.get_proxy_request_headers()
if not self.app_gw.authentication_header:
return request_headers
request_headers[self.app_gw.authentication_header] = self.request.user.get_username()
# LOGGER.debug("%s set", self.app_gw.authentication_header)
return request_headers
def check_permission(self):
"""Check if user is authenticated and has permission to access app"""
if not hasattr(self.request, 'user'):
return False
if not self.request.user.is_authenticated:
return False
policy_engine = PolicyEngine(POLICY_CACHE[self.app_gw.pk])
policy_engine.for_user(self.request.user).with_request(self.request).build()
passing, _messages = policy_engine.result
return passing
def get_encoded_query_params(self):
"""Return encoded query params to be used in proxied request"""
get_data = encode_items(self.request.GET.lists())
return urlencode(get_data)
def _created_proxy_response(self, path):
request_payload = self.request.body
# LOGGER.debug("Request headers: %s", self._request_headers)
request_url = self.get_upstream() + path
# LOGGER.debug("Request URL: %s", request_url)
if self.request.GET:
request_url += '?' + self.get_encoded_query_params()
# LOGGER.debug("Request URL: %s", request_url)
http = HTTP
if not self.app_gw.upstream_ssl_verification:
http = HTTP_NO_VERIFY
try:
proxy_response = http.urlopen(self.request.method,
request_url,
redirect=False,
retries=None,
headers=self._request_headers,
body=request_payload,
decode_content=False,
preload_content=False)
# LOGGER.debug("Proxy response header: %s",
# proxy_response.getheaders())
except urllib3.exceptions.HTTPError as error:
LOGGER.exception(error)
raise
return proxy_response
def _replace_host_on_redirect_location(self, proxy_response):
location = proxy_response.headers.get('Location')
if location:
if self.request.is_secure():
scheme = 'https://'
else:
scheme = 'http://'
request_host = scheme + self.request.META.get('HTTP_HOST')
upstream_host_http = 'http://' + self._parsed_url.netloc
upstream_host_https = 'https://' + self._parsed_url.netloc
location = location.replace(upstream_host_http, request_host)
location = location.replace(upstream_host_https, request_host)
proxy_response.headers['Location'] = location
# LOGGER.debug("Proxy response LOCATION: %s",
# proxy_response.headers['Location'])
def _set_content_type(self, proxy_response):
content_type = proxy_response.headers.get('Content-Type')
if not content_type:
content_type = (mimetypes.guess_type(self.request.path)
[0] or self.app_gw.default_content_type)
proxy_response.headers['Content-Type'] = content_type
# LOGGER.debug("Proxy response CONTENT-TYPE: %s",
# proxy_response.headers['Content-Type'])
def get_response(self):
"""Pass request to upstream and return response"""
self._request_headers = self.get_request_headers()
path = self._format_path_to_redirect()
proxy_response = self._created_proxy_response(path)
self._replace_host_on_redirect_location(proxy_response)
self._set_content_type(proxy_response)
response = get_django_response(proxy_response, strict_cookies=False)
# If response has a 'Location' header, we rewrite that location as well
if 'Location' in response:
LOGGER.debug("Rewriting Location header")
for server_name in self.app_gw.server_name:
response['Location'] = response['Location'].replace(
self._parsed_url.hostname, server_name)
LOGGER.debug(response['Location'])
# LOGGER.debug("RESPONSE RETURNED: %s", response)
return response

View File

@@ -1,62 +0,0 @@
"""response functions from django-revproxy"""
from django.http import HttpResponse, StreamingHttpResponse
from structlog import get_logger
from passbook.app_gw.proxy.utils import (cookie_from_string,
set_response_headers, should_stream)
#: Default number of bytes that are going to be read in a file lecture
DEFAULT_AMT = 2 ** 16
logger = get_logger(__name__)
def get_django_response(proxy_response, strict_cookies=False):
"""This method is used to create an appropriate response based on the
Content-Length of the proxy_response. If the content is bigger than
MIN_STREAMING_LENGTH, which is found on utils.py,
than django.http.StreamingHttpResponse will be created,
else a django.http.HTTPResponse will be created instead
:param proxy_response: An Instance of urllib3.response.HTTPResponse that
will create an appropriate response
:param strict_cookies: Whether to only accept RFC-compliant cookies
:returns: Returns an appropriate response based on the proxy_response
content-length
"""
status = proxy_response.status
headers = proxy_response.headers
logger.debug('Proxy response headers: %s', headers)
content_type = headers.get('Content-Type')
logger.debug('Content-Type: %s', content_type)
if should_stream(proxy_response):
logger.info('Content-Length is bigger than %s', DEFAULT_AMT)
response = StreamingHttpResponse(proxy_response.stream(DEFAULT_AMT),
status=status,
content_type=content_type)
else:
content = proxy_response.data or b''
response = HttpResponse(content, status=status,
content_type=content_type)
logger.info('Normalizing response headers')
set_response_headers(response, headers)
logger.debug('Response headers: %s', getattr(response, '_headers'))
cookies = proxy_response.headers.getlist('set-cookie')
logger.info('Checking for invalid cookies')
for cookie_string in cookies:
cookie_dict = cookie_from_string(cookie_string,
strict_cookies=strict_cookies)
# if cookie is invalid cookie_dict will be None
if cookie_dict:
response.set_cookie(**cookie_dict)
logger.debug('Response cookies: %s', response.cookies)
return response

View File

@@ -1,42 +0,0 @@
"""passbook app_gw rewriter"""
from passbook.app_gw.models import RewriteRule
RULE_CACHE = {}
class Context:
"""Empty class which we dynamically add attributes to"""
class Rewriter:
"""Apply rewrites"""
__application = None
__request = None
def __init__(self, application, request):
self.__application = application
self.__request = request
if self.__application.pk not in RULE_CACHE:
RULE_CACHE[self.__application.pk] = RewriteRule.objects.filter(
provider__in=[self.__application])
def __build_context(self, matches):
"""Build object with .0, .1, etc as groups and give access to request"""
context = Context()
for index, group_match in enumerate(matches.groups()):
setattr(context, "g%d" % (index + 1), group_match)
setattr(context, 'request', self.__request)
return context
def build(self):
"""Run all rules over path and return final path"""
path = self.__request.get_full_path()
for rule in RULE_CACHE[self.__application.pk]:
matches = rule.compiled_matcher.search(path)
if not matches:
continue
replace_context = self.__build_context(matches)
path = rule.replacement.format(context=replace_context)
if rule.halt:
return path
return path

View File

@@ -1,226 +0,0 @@
"""Utils from django-revproxy, slightly adjusted"""
import re
from wsgiref.util import is_hop_by_hop
from structlog import get_logger
try:
from http.cookies import SimpleCookie
COOKIE_PREFIX = ''
except ImportError:
from Cookie import SimpleCookie
COOKIE_PREFIX = 'Set-Cookie: '
#: List containing string constant that are used to represent headers that can
#: be ignored in the required_header function
IGNORE_HEADERS = (
'HTTP_ACCEPT_ENCODING', # We want content to be uncompressed so
# we remove the Accept-Encoding from
# original request
'HTTP_HOST',
'HTTP_REMOTE_USER',
)
# Default from HTTP RFC 2616
# See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
#: Variable that represent the default charset used
DEFAULT_CHARSET = 'latin-1'
#: List containing string constants that represents possible html content type
HTML_CONTENT_TYPES = (
'text/html',
'application/xhtml+xml'
)
#: Variable used to represent a minimal content size required for response
#: to be turned into stream
MIN_STREAMING_LENGTH = 4 * 1024 # 4KB
#: Regex used to find charset in a html content type
_get_charset_re = re.compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I)
def is_html_content_type(content_type):
"""Function used to verify if the parameter is a proper html content type
:param content_type: String variable that represent a content-type
:returns: A boolean value stating if the content_type is a valid html
content type
"""
for html_content_type in HTML_CONTENT_TYPES:
if content_type.startswith(html_content_type):
return True
return False
def should_stream(proxy_response):
"""Function to verify if the proxy_response must be converted into
a stream.This will be done by checking the proxy_response content-length
and verify if its length is bigger than one stipulated
by MIN_STREAMING_LENGTH.
:param proxy_response: An Instance of urllib3.response.HTTPResponse
:returns: A boolean stating if the proxy_response should
be treated as a stream
"""
content_type = proxy_response.headers.get('Content-Type')
if is_html_content_type(content_type):
return False
try:
content_length = int(proxy_response.headers.get('Content-Length', 0))
except ValueError:
content_length = 0
if not content_length or content_length > MIN_STREAMING_LENGTH:
return True
return False
def get_charset(content_type):
"""Function used to retrieve the charset from a content-type.If there is no
charset in the content type then the charset defined on DEFAULT_CHARSET
will be returned
:param content_type: A string containing a Content-Type header
:returns: A string containing the charset
"""
if not content_type:
return DEFAULT_CHARSET
matched = _get_charset_re.search(content_type)
if matched:
# Extract the charset and strip its double quotes
return matched.group('charset').replace('"', '')
return DEFAULT_CHARSET
def required_header(header):
"""Function that verify if the header parameter is a essential header
:param header: A string represented a header
:returns: A boolean value that represent if the header is required
"""
if header in IGNORE_HEADERS:
return False
if header.startswith('HTTP_') or header == 'CONTENT_TYPE':
return True
return False
def set_response_headers(response, response_headers):
"""Set response's header"""
for header, value in response_headers.items():
if is_hop_by_hop(header) or header.lower() == 'set-cookie':
continue
response[header.title()] = value
logger.debug('Response headers: %s', getattr(response, '_headers'))
def normalize_request_headers(request):
"""Function used to transform header, replacing 'HTTP\\_' to ''
and replace '_' to '-'
:param request: A HttpRequest that will be transformed
:returns: A dictionary with the normalized headers
"""
norm_headers = {}
for header, value in request.META.items():
if required_header(header):
norm_header = header.replace('HTTP_', '').title().replace('_', '-')
norm_headers[norm_header] = value
return norm_headers
def encode_items(items):
"""Function that encode all elements in the list of items passed as
a parameter
:param items: A list of tuple
:returns: A list of tuple with all items encoded in 'utf-8'
"""
encoded = []
for key, values in items:
for value in values:
encoded.append((key.encode('utf-8'), value.encode('utf-8')))
return encoded
logger = get_logger()
def cookie_from_string(cookie_string, strict_cookies=False):
"""Parser for HTTP header set-cookie
The return from this function will be used as parameters for
django's response.set_cookie method. Because set_cookie doesn't
have parameter comment, this cookie attribute will be ignored.
:param cookie_string: A string representing a valid cookie
:param strict_cookies: Whether to only accept RFC-compliant cookies
:returns: A dictionary containing the cookie_string attributes
"""
if strict_cookies:
cookies = SimpleCookie(COOKIE_PREFIX + cookie_string)
if not cookies.keys():
return None
cookie_name, = cookies.keys()
cookie_dict = {k: v for k, v in cookies[cookie_name].items()
if v and k != 'comment'}
cookie_dict['key'] = cookie_name
cookie_dict['value'] = cookies[cookie_name].value
return cookie_dict
valid_attrs = ('path', 'domain', 'comment', 'expires',
'max_age', 'httponly', 'secure')
cookie_dict = {}
cookie_parts = cookie_string.split(';')
try:
cookie_dict['key'], cookie_dict['value'] = \
cookie_parts[0].split('=', 1)
cookie_dict['value'] = cookie_dict['value'].replace('"', '')
except ValueError:
logger.warning('Invalid cookie: `%s`', cookie_string)
return None
if cookie_dict['value'].startswith('='):
logger.warning('Invalid cookie: `%s`', cookie_string)
return None
for part in cookie_parts[1:]:
if '=' in part:
attr, value = part.split('=', 1)
value = value.strip()
else:
attr = part
value = ''
attr = attr.strip().lower()
if not attr:
continue
if attr in valid_attrs:
if attr in ('httponly', 'secure'):
cookie_dict[attr] = True
elif attr in 'comment':
# ignoring comment attr as explained in the
# function docstring
continue
else:
cookie_dict[attr] = value
else:
logger.warning('Unknown cookie attribute %s', attr)
return cookie_dict

View File

@@ -1,5 +0,0 @@
"""Application Security Gateway settings"""
INSTALLED_APPS = [
'channels'
]
ASGI_APPLICATION = "passbook.app_gw.websocket.routing.application"

View File

@@ -1,19 +0,0 @@
"""passbook app_gw cache clean signals"""
from django.core.cache import cache
from django.db.models.signals import post_save
from django.dispatch import receiver
from structlog import get_logger
from passbook.app_gw.models import ApplicationGatewayProvider
from passbook.app_gw.proxy.handler import IGNORED_HOSTNAMES_KEY
LOGGER = get_logger(__name__)
@receiver(post_save)
# pylint: disable=unused-argument
def invalidate_app_gw_cache(sender, instance, **kwargs):
"""Invalidate Policy cache when app_gw is updated"""
if isinstance(instance, ApplicationGatewayProvider):
LOGGER.debug("Invalidating cache for ignored hostnames")
cache.delete(IGNORED_HOSTNAMES_KEY)

View File

@@ -1,2 +0,0 @@
"""passbook app_gw urls"""
urlpatterns = []

View File

@@ -1,83 +0,0 @@
"""websocket proxy consumer"""
import threading
from ssl import CERT_NONE
import websocket
from channels.generic.websocket import WebsocketConsumer
from structlog import get_logger
from passbook.app_gw.models import ApplicationGatewayProvider
LOGGER = get_logger(__name__)
class ProxyConsumer(WebsocketConsumer):
"""Proxy websocket connection to upstream"""
_headers_dict = {}
_app_gw = None
_client = None
_thread = None
def _fix_headers(self, input_dict):
"""Fix headers from bytestrings to normal strings"""
return {
key.decode('utf-8'): value.decode('utf-8')
for key, value in dict(input_dict).items()
}
def connect(self):
"""Extract host header, lookup in database and proxy connection"""
self._headers_dict = self._fix_headers(dict(self.scope.get('headers')))
host = self._headers_dict.pop('host')
query_string = self.scope.get('query_string').decode('utf-8')
matches = ApplicationGatewayProvider.objects.filter(
server_name__contains=[host],
enabled=True)
if matches.exists():
self._app_gw = matches.first()
# TODO: Get upstream that starts with wss or
upstream = self._app_gw.upstream[0].replace('http', 'ws') + self.scope.get('path')
if query_string:
upstream += '?' + query_string
sslopt = {}
if not self._app_gw.upstream_ssl_verification:
sslopt = {"cert_reqs": CERT_NONE}
self._client = websocket.WebSocketApp(
url=upstream,
subprotocols=self.scope.get('subprotocols'),
header=self._headers_dict,
on_message=self._client_on_message_handler(),
on_error=self._client_on_error_handler(),
on_close=self._client_on_close_handler(),
on_open=self._client_on_open_handler())
LOGGER.debug("Accepting connection for %s", host)
self._thread = threading.Thread(target=lambda: self._client.run_forever(sslopt=sslopt))
self._thread.start()
def _client_on_open_handler(self):
return lambda ws: self.accept(self._client.sock.handshake_response.subprotocol)
def _client_on_message_handler(self):
# pylint: disable=unused-argument,invalid-name
def message_handler(ws, message):
if isinstance(message, str):
self.send(text_data=message)
else:
self.send(bytes_data=message)
return message_handler
def _client_on_error_handler(self):
return lambda ws, error: print(error)
def _client_on_close_handler(self):
return lambda ws: self.disconnect(0)
def disconnect(self, code):
self._client.close()
def receive(self, text_data=None, bytes_data=None):
if text_data:
opcode = websocket.ABNF.OPCODE_TEXT
if bytes_data:
opcode = websocket.ABNF.OPCODE_BINARY
self._client.send(text_data or bytes_data, opcode)

View File

@@ -1,17 +0,0 @@
"""app_gw websocket proxy"""
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter
from django.conf.urls import url
from passbook.app_gw.websocket.consumer import ProxyConsumer
websocket_urlpatterns = [
url(r'^(.*)$', ProxyConsumer),
]
application = ProtocolTypeRouter({
# (http->django views is added by default)
'websocket': AuthMiddlewareStack(
URLRouter(websocket_urlpatterns)
),
})

View File

@@ -10,7 +10,7 @@ from structlog import get_logger
from passbook.lib.models import UUIDModel
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class AuditEntry(UUIDModel):
"""An individual audit log entry"""

View File

@@ -6,7 +6,7 @@ from structlog import get_logger
from passbook.lib.config import CONFIG
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class PassbookCoreConfig(AppConfig):
"""passbook core app config"""
@@ -22,6 +22,6 @@ class PassbookCoreConfig(AppConfig):
for factors_to_load in factors_to_load:
try:
import_module(factors_to_load)
LOGGER.info("Loaded %s", factors_to_load)
LOGGER.info("Loaded factor", factor_class=factors_to_load)
except ImportError as exc:
LOGGER.debug(exc)

View File

@@ -3,7 +3,7 @@ from structlog import get_logger
from passbook.core.auth.factor import AuthenticationFactor
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class DummyFactor(AuthenticationFactor):

View File

@@ -19,7 +19,7 @@ from passbook.core.tasks import send_email
from passbook.lib.config import CONFIG
from passbook.lib.utils.reflection import path_to_class
LOGGER = get_logger(__name__)
LOGGER = get_logger()
def authenticate(request, backends, **credentials):
@@ -31,14 +31,14 @@ def authenticate(request, backends, **credentials):
signature = Signature.from_callable(backend.authenticate)
signature.bind(request, **credentials)
except TypeError:
LOGGER.debug("Backend %s doesn't accept our arguments", backend)
LOGGER.debug("Backend doesn't accept our arguments", backend=backend)
# This backend doesn't accept these credentials as arguments. Try the next one.
continue
LOGGER.debug('Attempting authentication with %s...', backend)
LOGGER.debug('Attempting authentication...', backend=backend)
try:
user = backend.authenticate(request, **credentials)
except PermissionDenied:
LOGGER.debug('Backend %r threw PermissionDenied', backend)
LOGGER.debug('Backend threw PermissionDenied', backend=backend)
# This backend says to stop in our tracks - this user should not be allowed in at all.
break
if user is None:
@@ -104,5 +104,5 @@ class PasswordFactor(FormView, AuthenticationFactor):
return self.form_invalid(form)
except PermissionDenied:
# User was found, but permission was denied (i.e. user is not active)
LOGGER.debug("Denied access to %s", kwargs)
LOGGER.debug("Denied access", **kwargs)
return self.authenticator.user_invalid()

View File

@@ -14,7 +14,7 @@ from passbook.lib.utils.reflection import class_to_path, path_to_class
from passbook.lib.utils.urls import is_url_absolute
from passbook.policy.engine import PolicyEngine
LOGGER = get_logger(__name__)
LOGGER = get_logger()
def _redirect_with_qs(view, get_query_set=None):
"""Wrapper to redirect whilst keeping GET Parameters"""
@@ -61,10 +61,13 @@ class AuthenticationView(UserPassesTestMixin, View):
_all_factors = Factor.objects.filter(enabled=True).order_by('order').select_subclasses()
pending_factors = []
for factor in _all_factors:
LOGGER.debug("Checking if factor applies to user",
factor=factor, user=self.pending_user)
policy_engine = PolicyEngine(factor.policies.all())
policy_engine.for_user(self.pending_user).with_request(self.request).build()
if policy_engine.passing:
pending_factors.append((factor.uuid.hex, factor.type))
LOGGER.debug("Factor applies", factor=factor, user=self.pending_user)
return pending_factors
def dispatch(self, request, *args, **kwargs):
@@ -100,17 +103,18 @@ class AuthenticationView(UserPassesTestMixin, View):
def get(self, request, *args, **kwargs):
"""pass get request to current factor"""
LOGGER.debug("Passing GET to %s", class_to_path(self._current_factor_class.__class__))
LOGGER.debug("Passing GET", view_class=class_to_path(self._current_factor_class.__class__))
return self._current_factor_class.get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
"""pass post request to current factor"""
LOGGER.debug("Passing POST to %s", class_to_path(self._current_factor_class.__class__))
LOGGER.debug("Passing POST", view_class=class_to_path(self._current_factor_class.__class__))
return self._current_factor_class.post(request, *args, **kwargs)
def user_ok(self):
"""Redirect to next Factor"""
LOGGER.debug("Factor %s passed", class_to_path(self._current_factor_class.__class__))
LOGGER.debug("Factor passed",
factor_class=class_to_path(self._current_factor_class.__class__))
# Remove passed factor from pending factors
current_factor_tuple = (self.current_factor.uuid.hex,
class_to_path(self._current_factor_class.__class__))
@@ -123,7 +127,7 @@ class AuthenticationView(UserPassesTestMixin, View):
self.request.session[AuthenticationView.SESSION_PENDING_FACTORS] = \
self.pending_factors
self.request.session[AuthenticationView.SESSION_FACTOR] = next_factor
LOGGER.debug("Rendering Factor is %s", next_factor)
LOGGER.debug("Rendering Factor", next_factor=next_factor)
return _redirect_with_qs('passbook_core:auth-process', self.request.GET)
# User passed all factors
LOGGER.debug("User passed all factors, logging in")
@@ -140,7 +144,7 @@ class AuthenticationView(UserPassesTestMixin, View):
"""User Successfully passed all factors"""
backend = self.request.session[AuthenticationView.SESSION_USER_BACKEND]
login(self.request, self.pending_user, backend=backend)
LOGGER.debug("Logged in user %s", self.pending_user)
LOGGER.debug("Logged in", user=self.pending_user)
# Cleanup
self.cleanup()
next_param = self.request.GET.get('next', None)

View File

@@ -9,7 +9,7 @@ from passbook.core.models import User
from passbook.lib.config import CONFIG
from passbook.lib.utils.ui import human_list
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class LoginForm(forms.Form):
"""Allow users to login"""

View File

@@ -7,7 +7,7 @@ from structlog import get_logger
from passbook.core.models import User
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class Command(BaseCommand):
"""Import users from CSV file"""

View File

@@ -8,7 +8,7 @@ from structlog import get_logger
from passbook.lib.config import CONFIG
from passbook.root.wsgi import application
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class Command(BaseCommand):

View File

@@ -6,7 +6,7 @@ from structlog import get_logger
from passbook.root.celery import CELERY_APP
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class Command(BaseCommand):

View File

@@ -3,7 +3,6 @@ import re
from datetime import timedelta
from random import SystemRandom
from time import sleep
from typing import List
from uuid import uuid4
from django.contrib.auth.models import AbstractUser
@@ -17,28 +16,16 @@ from structlog import get_logger
from passbook.core.signals import password_changed
from passbook.lib.models import CreatedUpdatedModel, UUIDModel
from passbook.policy.exceptions import PolicyException
from passbook.policy.struct import PolicyRequest, PolicyResult
LOGGER = get_logger(__name__)
LOGGER = get_logger()
def default_nonce_duration():
"""Default duration a Nonce is valid"""
return now() + timedelta(hours=4)
class PolicyResult:
"""Small data-class to hold policy results"""
passing: bool = False
messages: List[str] = []
def __init__(self, passing: bool, *messages: str):
self.passing = passing
self.messages = messages
def __str__(self):
return f"<PolicyResult passing={self.passing}>"
class Group(UUIDModel):
"""Custom Group model which supports a basic hierarchy"""
@@ -243,9 +230,9 @@ class Policy(UUIDModel, CreatedUpdatedModel):
return self.name
return "%s action %s" % (self.name, self.action)
def passes(self, user: User) -> PolicyResult:
def passes(self, request: PolicyRequest) -> PolicyResult:
"""Check if user instance passes this policy"""
raise NotImplementedError()
raise PolicyException()
class FieldMatcherPolicy(Policy):
"""Policy which checks if a field of the User model matches/doesn't match a
@@ -287,13 +274,13 @@ class FieldMatcherPolicy(Policy):
description = "%s: %s" % (self.name, description)
return description
def passes(self, user: User) -> PolicyResult:
def passes(self, request: PolicyRequest) -> PolicyResult:
"""Check if user instance passes this role"""
if not hasattr(user, self.user_field):
if not hasattr(request.user, self.user_field):
raise ValueError("Field does not exist")
user_field_value = getattr(user, self.user_field, None)
LOGGER.debug("Checked '%s' %s with '%s'...",
user_field_value, self.match_action, self.value)
user_field_value = getattr(request.user, self.user_field, None)
LOGGER.debug("Checking field", value=user_field_value,
action=self.match_action, should_be=self.value)
passes = False
if self.match_action == FieldMatcherPolicy.MATCH_STARTSWITH:
passes = user_field_value.startswith(self.value)
@@ -306,8 +293,6 @@ class FieldMatcherPolicy(Policy):
passes = bool(pattern.match(user_field_value))
if self.match_action == FieldMatcherPolicy.MATCH_EXACT:
passes = user_field_value == self.value
LOGGER.debug("User got '%r'", passes)
return PolicyResult(passes)
class Meta:
@@ -327,11 +312,11 @@ class PasswordPolicy(Policy):
form = 'passbook.core.forms.policies.PasswordPolicyForm'
def passes(self, user: User) -> PolicyResult:
def passes(self, request: PolicyRequest) -> PolicyResult:
# Only check if password is being set
if not hasattr(user, '__password__'):
if not hasattr(request.user, '__password__'):
return PolicyResult(True)
password = getattr(user, '__password__')
password = getattr(request.user, '__password__')
filter_regex = r''
if self.amount_lowercase > 0:
@@ -341,7 +326,6 @@ class PasswordPolicy(Policy):
if self.amount_symbols > 0:
filter_regex += r'[%s]{%d,}' % (self.symbol_charset, self.amount_symbols)
result = bool(re.compile(filter_regex).match(password))
LOGGER.debug("User got %r", result)
if not result:
return PolicyResult(result, self.error_message)
return PolicyResult(result)
@@ -378,7 +362,7 @@ class WebhookPolicy(Policy):
form = 'passbook.core.forms.policies.WebhookPolicyForm'
def passes(self, user: User) -> PolicyResult:
def passes(self, request: PolicyRequest) -> PolicyResult:
"""Call webhook asynchronously and report back"""
raise NotImplementedError()
@@ -397,10 +381,10 @@ class DebugPolicy(Policy):
form = 'passbook.core.forms.policies.DebugPolicyForm'
def passes(self, user: User) -> PolicyResult:
def passes(self, request: PolicyRequest) -> PolicyResult:
"""Wait random time then return result"""
wait = SystemRandom().randrange(self.wait_min, self.wait_max)
LOGGER.debug("Policy '%s' waiting for %ds", self.name, wait)
LOGGER.debug("Policy waiting", policy=self, delay=wait)
sleep(wait)
return PolicyResult(self.result, 'Debugging')
@@ -416,8 +400,8 @@ class GroupMembershipPolicy(Policy):
form = 'passbook.core.forms.policies.GroupMembershipPolicyForm'
def passes(self, user: User) -> PolicyResult:
return PolicyResult(self.group.user_set.filter(pk=user.pk).exists())
def passes(self, request: PolicyRequest) -> PolicyResult:
return PolicyResult(self.group.user_set.filter(pk=request.user.pk).exists())
class Meta:
@@ -429,10 +413,11 @@ class SSOLoginPolicy(Policy):
form = 'passbook.core.forms.policies.SSOLoginPolicyForm'
def passes(self, user) -> PolicyResult:
def passes(self, request: PolicyRequest) -> PolicyResult:
"""Check if user instance passes this policy"""
from passbook.core.auth.view import AuthenticationView
return PolicyResult(user.session.get(AuthenticationView.SESSION_IS_SSO_LOGIN, False))
is_sso_login = request.user.session.get(AuthenticationView.SESSION_IS_SSO_LOGIN, False)
return PolicyResult(is_sso_login)
class Meta:

View File

@@ -7,7 +7,7 @@ from structlog import get_logger
from passbook.core.exceptions import PasswordPolicyInvalid
LOGGER = get_logger(__name__)
LOGGER = get_logger()
user_signed_up = Signal(providing_args=['request', 'user'])
invitation_created = Signal(providing_args=['request', 'invitation'])
@@ -35,7 +35,7 @@ def invalidate_policy_cache(sender, instance, **kwargs):
"""Invalidate Policy cache when policy is updated"""
from passbook.core.models import Policy
if isinstance(instance, Policy):
LOGGER.debug("Invalidating cache for %s", instance.pk)
LOGGER.debug("Invalidating policy cache", policy=instance)
keys = cache.keys("%s#*" % instance.pk)
cache.delete_many(keys)
LOGGER.debug("Deleted %d keys", len(keys))

View File

@@ -0,0 +1,2 @@
User-agent: *
Disallow: /

View File

@@ -10,7 +10,7 @@ from passbook.core.models import Nonce
from passbook.lib.config import CONFIG
from passbook.root.celery import CELERY_APP
LOGGER = get_logger(__name__)
LOGGER = get_logger()
@CELERY_APP.task()
def send_email(to_address, subject, template, context):

View File

@@ -5,7 +5,7 @@ from structlog import get_logger
from passbook.core.auth import view
from passbook.core.views import authentication, overview, user
LOGGER = get_logger(__name__)
LOGGER = get_logger()
urlpatterns = [
# Authentication views

View File

@@ -6,7 +6,7 @@ from structlog import get_logger
from passbook.core.models import Application
from passbook.policy.engine import PolicyEngine
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class AccessMixin:
"""Mixin class for usage in Authorization views.

View File

@@ -1,5 +1,5 @@
"""passbook core authentication views"""
from typing import Dict
from typing import Dict, Optional
from django.contrib import messages
from django.contrib.auth import login, logout
@@ -20,7 +20,7 @@ from passbook.core.signals import invitation_used, user_signed_up
from passbook.core.tasks import send_email
from passbook.lib.config import CONFIG
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class LoginView(UserPassesTestMixin, FormView):
@@ -53,7 +53,7 @@ class LoginView(UserPassesTestMixin, FormView):
self.template_name = 'login/with_sources.html'
return super().get_context_data(**kwargs)
def get_user(self, uid_value) -> User:
def get_user(self, uid_value) -> Optional[User]:
"""Find user instance. Returns None if no user was found."""
for search_field in CONFIG.y('passbook.uid_fields'):
# Workaround for E-Mail -> email
@@ -61,7 +61,7 @@ class LoginView(UserPassesTestMixin, FormView):
search_field = 'email'
users = User.objects.filter(**{search_field: uid_value})
if users.exists():
LOGGER.debug("Found user %s with uid_field %s", users.first(), search_field)
LOGGER.debug("Found user", user=users.first(), uid_field=search_field)
return users.first()
return None

View File

@@ -8,7 +8,7 @@ from structlog import get_logger
from passbook.core.models import Policy, PolicyResult, User
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class HaveIBeenPwendPolicy(Policy):
"""Check if password is on HaveIBeenPwned's list by upload the first

View File

@@ -5,7 +5,7 @@ from structlog import get_logger
from passbook.ldap.ldap_connector import LDAPConnector
from passbook.ldap.models import LDAPSource
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class LDAPBackend(ModelBackend):

View File

@@ -9,7 +9,7 @@ from passbook.core.models import User
from passbook.ldap.models import LDAPSource
from passbook.lib.config import CONFIG
LOGGER = get_logger(__name__)
LOGGER = get_logger()
USERNAME_FIELD = CONFIG.y('ldap.username_field', 'sAMAccountName')
LOGIN_FIELD = CONFIG.y('ldap.login_field', 'userPrincipalName')
@@ -222,7 +222,7 @@ class LDAPConnector:
attrs = {
'distinguishedName': str(user_dn),
'cn': str(username),
'description': str('t=' + time()),
'description': 't=' + str(time()),
'sAMAccountName': str(username_trunk),
'givenName': str(user.name),
'displayName': str(user.username),

View File

@@ -1,37 +0,0 @@
"""QueueListener that can be configured from logging.dictConfig"""
from atexit import register
from logging.config import ConvertingList
from logging.handlers import QueueHandler, QueueListener
from queue import Queue
def _resolve_handlers(_list):
"""Evaluates ConvertingList by iterating over it"""
if not isinstance(_list, ConvertingList):
return _list
# Indexing the list performs the evaluation.
return [_list[i] for i in range(len(_list))]
class QueueListenerHandler(QueueHandler):
"""QueueListener that can be configured from logging.dictConfig"""
def __init__(self, handlers, auto_run=True, queue=Queue(-1)):
super().__init__(queue)
handlers = _resolve_handlers(handlers)
self._listener = QueueListener(
self.queue,
*handlers,
respect_handler_level=True)
if auto_run:
self.start()
register(self.stop)
def start(self):
"""start background thread"""
self._listener.start()
def stop(self):
"""stop background thread"""
self._listener.stop()

View File

@@ -1,7 +1,7 @@
"""passbook sentry integration"""
from structlog import get_logger
LOGGER = get_logger(__name__)
LOGGER = get_logger()
def before_send(event, hint):

View File

@@ -5,7 +5,7 @@ from structlog import get_logger
register = template.Library()
LOGGER = get_logger(__name__)
LOGGER = get_logger()
@register.simple_tag(takes_context=True)

View File

@@ -3,7 +3,7 @@ from django import template
from structlog import get_logger
register = template.Library()
LOGGER = get_logger(__name__)
LOGGER = get_logger()
def get_key_unique(context):

View File

@@ -6,7 +6,7 @@ from structlog import get_logger
from passbook.lib.config import CONFIG
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class PassbookOAuthClientConfig(AppConfig):
"""passbook oauth_client config"""
@@ -22,6 +22,6 @@ class PassbookOAuthClientConfig(AppConfig):
for source_type in source_types_to_load:
try:
import_module(source_type)
LOGGER.info("Loaded %s", source_type)
LOGGER.info("Loaded source_type", source_class=source_type)
except ImportError as exc:
LOGGER.debug(exc)

View File

@@ -11,7 +11,7 @@ from requests.exceptions import RequestException
from requests_oauthlib import OAuth1
from structlog import get_logger
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class BaseOAuthClient:

View File

@@ -10,7 +10,7 @@ from passbook.oauth_client.source_types.manager import MANAGER, RequestKind
from passbook.oauth_client.utils import user_get_or_create
from passbook.oauth_client.views.core import OAuthCallback
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class AzureADOAuth2Client(OAuth2Client):

View File

@@ -9,7 +9,7 @@ from passbook.oauth_client.source_types.manager import MANAGER, RequestKind
from passbook.oauth_client.utils import user_get_or_create
from passbook.oauth_client.views.core import OAuthCallback, OAuthRedirect
LOGGER = get_logger(__name__)
LOGGER = get_logger()
@MANAGER.source(kind=RequestKind.redirect, name='Discord')

View File

@@ -5,7 +5,7 @@ from structlog import get_logger
from passbook.oauth_client.views.core import OAuthCallback, OAuthRedirect
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class RequestKind(Enum):
"""Enum of OAuth Request types"""
@@ -27,7 +27,7 @@ class SourceTypeManager:
self.__source_types[kind] = {}
self.__source_types[kind][name.lower()] = cls
self.__names.append(name)
LOGGER.debug("Registered source '%s' for '%s'", cls.__name__, kind)
LOGGER.debug("Registered source", source_class=cls.__name__, kind=kind)
return cls
return inner_wrapper

View File

@@ -10,7 +10,7 @@ from passbook.oauth_client.source_types.manager import MANAGER, RequestKind
from passbook.oauth_client.utils import user_get_or_create
from passbook.oauth_client.views.core import OAuthCallback, OAuthRedirect
LOGGER = get_logger(__name__)
LOGGER = get_logger()
@MANAGER.source(kind=RequestKind.redirect, name='reddit')

View File

@@ -10,7 +10,7 @@ from passbook.oauth_client.source_types.manager import MANAGER, RequestKind
from passbook.oauth_client.utils import user_get_or_create
from passbook.oauth_client.views.core import OAuthCallback
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class SupervisrOAuth2Client(OAuth2Client):

View File

@@ -8,7 +8,7 @@ from passbook.oauth_client.source_types.manager import MANAGER, RequestKind
from passbook.oauth_client.utils import user_get_or_create
from passbook.oauth_client.views.core import OAuthCallback
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class TwitterOAuthClient(OAuthClient):

View File

@@ -16,7 +16,7 @@ from passbook.lib.utils.reflection import app
from passbook.oauth_client.clients import get_client
from passbook.oauth_client.models import OAuthSource, UserOAuthSourceConnection
LOGGER = get_logger(__name__)
LOGGER = get_logger()
# pylint: disable=too-few-public-methods

View File

@@ -14,7 +14,7 @@ from passbook.core.views.access import AccessMixin
from passbook.core.views.utils import LoadingView, PermissionDeniedView
from passbook.oauth_provider.models import OAuth2Provider
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class PassbookAuthorizationLoadingView(LoginRequiredMixin, LoadingView):

View File

@@ -4,7 +4,7 @@ from django.db.utils import InternalError, OperationalError, ProgrammingError
from django.urls import include, path
from structlog import get_logger
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class PassbookOIDCProviderConfig(AppConfig):
"""passbook auth oidc provider app config"""

View File

@@ -6,7 +6,7 @@ from structlog import get_logger
from passbook.core.models import Application
from passbook.policy.engine import PolicyEngine
LOGGER = get_logger(__name__)
LOGGER = get_logger()
def check_permissions(request, user, client):
"""Check permissions, used for

View File

@@ -9,7 +9,7 @@ from passbook.core.auth.factor import AuthenticationFactor
from passbook.otp.forms import OTPVerifyForm
from passbook.otp.views import OTP_SETTING_UP_KEY, EnableView
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class OTPFactor(FormView, AuthenticationFactor):
"""OTP Factor View"""

View File

@@ -23,7 +23,7 @@ from passbook.otp.utils import otpauth_url
OTP_SESSION_KEY = 'passbook_otp_key'
OTP_SETTING_UP_KEY = 'passbook_otp_setup'
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class UserSettingsView(LoginRequiredMixin, TemplateView):
"""View for user settings to control OTP"""

View File

@@ -6,9 +6,10 @@ from django.utils.timezone import now
from django.utils.translation import gettext as _
from structlog import get_logger
from passbook.core.models import Policy, PolicyResult, User
from passbook.core.models import Policy
from passbook.policy.struct import PolicyRequest, PolicyResult
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class PasswordExpiryPolicy(Policy):
@@ -20,15 +21,16 @@ class PasswordExpiryPolicy(Policy):
form = 'passbook.password_expiry_policy.forms.PasswordExpiryPolicyForm'
def passes(self, user: User) -> PolicyResult:
def passes(self, request: PolicyRequest) -> PolicyResult:
"""If password change date is more than x days in the past, call set_unusable_password
and show a notice"""
actual_days = (now() - user.password_change_date).days
days_since_expiry = (now() - (user.password_change_date + timedelta(days=self.days))).days
actual_days = (now() - request.user.password_change_date).days
days_since_expiry = (now() - (request.user.password_change_date + timedelta(days=self.days)
)).days
if actual_days >= self.days:
if not self.deny_only:
user.set_unusable_password()
user.save()
request.user.set_unusable_password()
request.user.save()
message = _(('Password expired %(days)d days ago. '
'Please update your password.') % {
'days': days_since_expiry

View File

@@ -7,30 +7,42 @@ from django.core.cache import cache
from django.http import HttpRequest
from structlog import get_logger
from passbook.core.models import Policy, PolicyResult, User
from passbook.policy.task import PolicyTask
from passbook.core.models import Policy, User
from passbook.policy.process import PolicyProcess
from passbook.policy.struct import PolicyRequest, PolicyResult
LOGGER = get_logger()
def _cache_key(policy, user):
return "policy_%s#%s" % (policy.uuid, user.pk)
return f"policy_{policy.pk}#{user.pk}"
class PolicyProcessInfo:
"""Dataclass to hold all information and communication channels to a process"""
process: PolicyProcess
connection: Connection
result: PolicyResult = None
policy: Policy
def __init__(self, process: PolicyProcess, connection: Connection, policy: Policy):
self.process = process
self.connection = connection
self.policy = policy
class PolicyEngine:
"""Orchestrate policy checking, launch tasks and return result"""
# __group = None
# __cached = None
policies: List[Policy] = []
__request: HttpRequest
__user: User
__proc_list: List[Tuple[Connection, PolicyTask]] = []
__processes: List[PolicyProcessInfo] = []
def __init__(self, policies, user: User = None, request: HttpRequest = None):
self.policies = policies
self.__request = request
self.__user = user
self.__processes = []
def for_user(self, user: User) -> 'PolicyEngine':
"""Check policies for user"""
@@ -42,56 +54,56 @@ class PolicyEngine:
self.__request = request
return self
def _select_subclasses(self) -> List[Policy]:
"""Make sure all Policies are their respective classes"""
return Policy.objects \
.filter(pk__in=[x.pk for x in self.policies]) \
.select_subclasses() \
.order_by('order')
def build(self) -> 'PolicyEngine':
"""Build task group"""
if not self.__user:
raise ValueError("User not set.")
cached_policies = []
kwargs = {
'__password__': getattr(self.__user, '__password__', None),
'session': dict(getattr(self.__request, 'session', {}).items()),
'request': self.__request,
}
for policy in self.policies:
request = PolicyRequest(self.__user)
request.http_request = self.__request
for policy in self._select_subclasses():
cached_policy = cache.get(_cache_key(policy, self.__user), None)
if cached_policy:
LOGGER.debug("Taking result from cache for %s", policy.pk.hex)
LOGGER.debug("Taking result from cache", policy=policy)
cached_policies.append(cached_policy)
else:
LOGGER.debug("Evaluating policy %s", policy.pk.hex)
LOGGER.debug("Evaluating policy", policy=policy)
our_end, task_end = Pipe(False)
task = PolicyTask()
task.ret = task_end
task.user = self.__user
task.policy = policy
task.params = kwargs
LOGGER.debug("Starting Process %s", task.__class__.__name__)
task = PolicyProcess(policy, request, task_end)
LOGGER.debug("Starting Process", for_policy=policy)
task.start()
self.__proc_list.append((our_end, task))
self.__processes.append(PolicyProcessInfo(process=task,
connection=our_end, policy=policy))
# If all policies are cached, we have an empty list here.
if self.__proc_list:
for _, running_proc in self.__proc_list:
running_proc.join()
for proc_info in self.__processes:
proc_info.process.join(proc_info.policy.timeout)
# Only call .recv() if no result is saved, otherwise we just deadlock here
if not proc_info.result:
proc_info.result = proc_info.connection.recv()
return self
@property
def result(self):
def result(self) -> Tuple[bool, List[str]]:
"""Get policy-checking result"""
results: List[PolicyResult] = []
messages: List[str] = []
for our_end, _ in self.__proc_list:
results.append(our_end.recv())
for policy_result in results:
for proc_info in self.__processes:
# passing = (policy_action == Policy.ACTION_ALLOW and policy_result) or \
# (policy_action == Policy.ACTION_DENY and not policy_result)
LOGGER.debug('Result=%r => %r', policy_result, policy_result.passing)
if policy_result.messages:
messages += policy_result.messages
if not policy_result.passing:
LOGGER.debug("Result", passing=proc_info.result.passing)
if proc_info.result.messages:
messages += proc_info.result.messages
if not proc_info.result.passing:
return False, messages
return True, messages
@property
def passing(self):
def passing(self) -> bool:
"""Only get true/false if user passes"""
return self.result[0]

View File

@@ -0,0 +1,4 @@
"""policy exceptions"""
class PolicyException(Exception):
"""Exception that should be raised during Policy Evaluation, and can be recovered from."""

View File

@@ -0,0 +1,47 @@
"""passbook policy task"""
from multiprocessing import Process
from multiprocessing.connection import Connection
from structlog import get_logger
from passbook.core.models import Policy
from passbook.policy.exceptions import PolicyException
from passbook.policy.struct import PolicyRequest, PolicyResult
LOGGER = get_logger()
def _cache_key(policy, user):
return "policy_%s#%s" % (policy.uuid, user.pk)
class PolicyProcess(Process):
"""Evaluate a single policy within a seprate process"""
connection: Connection
policy: Policy
request: PolicyRequest
def __init__(self, policy: Policy, request: PolicyRequest, connection: Connection):
super().__init__()
self.policy = policy
self.request = request
self.connection = connection
def run(self):
"""Task wrapper to run policy checking"""
LOGGER.debug("Running policy", policy=self.policy,
user=self.request.user, process="PolicyProcess")
try:
policy_result = self.policy.passes(self.request)
except PolicyException as exc:
LOGGER.debug(exc)
policy_result = PolicyResult(False, str(exc))
# Invert result if policy.negate is set
if self.policy.negate:
policy_result = not policy_result
LOGGER.debug("Got result", policy=self.policy, result=policy_result,
process="PolicyProcess")
# cache_key = _cache_key(self.policy, self.request.user)
# cache.set(cache_key, (self.policy.action, policy_result, message))
# LOGGER.debug("Cached entry as %s", cache_key)
self.connection.send(policy_result)

33
passbook/policy/struct.py Normal file
View File

@@ -0,0 +1,33 @@
"""policy structs"""
from typing import TYPE_CHECKING, List
from django.http import HttpRequest
if TYPE_CHECKING:
from passbook.core.models import User
class PolicyRequest:
"""Data-class to hold policy request data"""
user: 'User'
http_request: HttpRequest
def __init__(self, user: 'User'):
self.user = user
def __str__(self):
return f"<PolicyRequest user={self.user}>"
class PolicyResult:
"""Small data-class to hold policy results"""
passing: bool = False
messages: List[str] = []
def __init__(self, passing: bool, *messages: str):
self.passing = passing
self.messages = messages
def __str__(self):
return f"<PolicyResult passing={self.passing}>"

View File

@@ -1,39 +0,0 @@
"""passbook policy task"""
from multiprocessing import Process
from multiprocessing.connection import Connection
from typing import Any, Dict
from structlog import get_logger
from passbook.core.models import Policy, User
LOGGER = get_logger(__name__)
def _cache_key(policy, user):
return "policy_%s#%s" % (policy.uuid, user.pk)
class PolicyTask(Process):
"""Evaluate a single policy within a seprate process"""
ret: Connection
user: User
policy: Policy
params: Dict[str, Any]
def run(self):
"""Task wrapper to run policy checking"""
for key, value in self.params.items():
setattr(self.user, key, value)
LOGGER.debug("Running policy `%s`#%s for user %s...", self.policy.name,
self.policy.pk.hex, self.user)
policy_result = self.policy.passes(self.user)
# Invert result if policy.negate is set
if self.policy.negate:
policy_result = not policy_result
LOGGER.debug("Policy %r#%s got %s", self.policy.name, self.policy.pk.hex, policy_result)
# cache_key = _cache_key(self.policy, self.user)
# cache.set(cache_key, (self.policy.action, policy_result, message))
# LOGGER.debug("Cached entry as %s", cache_key)
self.ret.send(policy_result)
self.ret.close()

View File

@@ -9,7 +9,7 @@ from structlog import get_logger
# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.root.settings")
LOGGER = get_logger(__name__)
LOGGER = get_logger()
CELERY_APP = Celery('passbook')

View File

@@ -34,10 +34,8 @@ STATIC_ROOT = BASE_DIR + '/static'
SECRET_KEY = CONFIG.y('secret_key',
"9$@r!d^1^jrn#fk#1#@ks#9&i$^s#1)_13%$rwjrhd=e8jfi_s") # noqa Debug
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = CONFIG.y_bool('debug')
INTERNAL_IPS = ['127.0.0.1']
# ALLOWED_HOSTS = CONFIG.y('domains', []) + [CONFIG.y('primary_domain')]
ALLOWED_HOSTS = ['*']
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
@@ -49,8 +47,7 @@ AUTH_USER_MODEL = 'passbook_core.User'
CSRF_COOKIE_NAME = 'passbook_csrf'
SESSION_COOKIE_NAME = 'passbook_session'
SESSION_COOKIE_DOMAIN = CONFIG.y('primary_domain')
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
SESSION_ENGINE = "django.contrib.sessions.backends.db"
SESSION_CACHE_ALIAS = "default"
LANGUAGE_COOKIE_NAME = 'passbook_language'
@@ -100,7 +97,8 @@ REST_FRAMEWORK = {
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": f"redis://{CONFIG.y('redis.host')}:6379/{CONFIG.y('redis.cache_db')}",
"LOCATION": (f"redis://:{CONFIG.y('redis.password')}@{CONFIG.y('redis.host')}:6379"
f"/{CONFIG.y('redis.cache_db')}"),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
@@ -270,13 +268,6 @@ with CONFIG.cd('log'):
'class': 'logging.StreamHandler',
'formatter': "colored" if DEBUG else "plain",
},
'queue': {
'level': DEBUG,
'class': 'passbook.lib.log.QueueListenerHandler',
'handlers': [
'cfg://handlers.console',
],
}
},
'loggers': {
}

View File

@@ -8,7 +8,7 @@ from structlog import get_logger
from passbook.core.views import error
from passbook.lib.utils.reflection import get_apps
LOGGER = get_logger(__name__)
LOGGER = get_logger()
admin.autodiscover()
admin.site.login = RedirectView.as_view(pattern_name='passbook_core:auth-login')
@@ -26,7 +26,7 @@ for _passbook_app in get_apps():
_passbook_app.label),
namespace=_passbook_app.label))
urlpatterns.append(_path)
LOGGER.debug("Loaded %s's URLs", _passbook_app.name)
LOGGER.debug("Mounted URLs", app_name=_passbook_app.name)
urlpatterns += [
# Administration

View File

@@ -6,12 +6,71 @@ It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from time import time
from django.core.wsgi import get_wsgi_application
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
from structlog import get_logger
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'passbook.settings')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.root.settings")
application = SentryWsgiMiddleware(get_wsgi_application())
LOGGER = get_logger()
class WSGILogger:
""" This is the generalized WSGI middleware for any style request logging. """
def __init__(self, application):
self.application = application
def __healthcheck(self, start_response):
start_response('204 OK', [])
return [b'']
def __call__(self, environ, start_response):
start = time()
status_codes = []
content_lengths = []
if environ.get('HTTP_HOST').startswith('kubernetes-healthcheck-host'):
# Don't log kubernetes health/readiness requests
return self.__healthcheck(start_response)
def custom_start_response(status, response_headers, exc_info=None):
status_codes.append(int(status.partition(' ')[0]))
for name, value in response_headers:
if name.lower() == 'content-length':
content_lengths.append(int(value))
break
return start_response(status, response_headers, exc_info)
retval = self.application(environ, custom_start_response)
runtime = int((time() - start) * 10**6)
content_length = content_lengths[0] if content_lengths else 0
self.log(status_codes[0], environ, content_length,
ip_header=None, runtime=runtime)
return retval
def log(self, status_code, environ, content_length, **kwargs):
"""
Apache log format 'NCSA extended/combined log':
"%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\""
see http://httpd.apache.org/docs/current/mod/mod_log_config.html#formats
"""
ip_header = kwargs.get('ip_header', None)
if ip_header:
host = environ.get(ip_header, '')
else:
host = environ.get('REMOTE_ADDR', '')
query_string = ''
if environ.get('QUERY_STRING') != '':
query_string = f"?{environ.get('QUERY_STRING')}"
LOGGER.info(f"{environ.get('PATH_INFO', '')}{query_string}",
host=host,
method=environ.get('REQUEST_METHOD', ''),
protocol=environ.get('SERVER_PROTOCOL', ''),
status=status_code,
size=content_length / 1000 if content_length > 0 else '-',
runtime=kwargs.get('runtime'))
application = WSGILogger(get_wsgi_application())

View File

@@ -6,7 +6,7 @@ from structlog import get_logger
from passbook.lib.config import CONFIG
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class PassbookSAMLIDPConfig(AppConfig):
"""passbook saml_idp app config"""
@@ -22,6 +22,6 @@ class PassbookSAMLIDPConfig(AppConfig):
for source_type in source_types_to_load:
try:
import_module(source_type)
LOGGER.info("Loaded %s", source_type)
LOGGER.info("Loaded SAML Processor", processor_class=source_type)
except ImportError as exc:
LOGGER.debug(exc)

View File

@@ -3,7 +3,7 @@
import time
import uuid
from bs4 import BeautifulSoup
from defusedxml import ElementTree
from structlog import get_logger
from passbook.saml_idp import exceptions, utils, xml_render
@@ -65,7 +65,7 @@ class Processor:
def __init__(self, remote):
self.name = remote.name
self._remote = remote
self._logger = get_logger(__name__)
self._logger = get_logger()
self._system_params['ISSUER'] = self._remote.issuer
self._logger.debug('processor configured')
@@ -204,13 +204,13 @@ class Processor:
if not str(self._request_xml.strip()).startswith('<'):
raise Exception('RequestXML is not valid XML; '
'it may need to be decoded or decompressed.')
soup = BeautifulSoup(self._request_xml, features="xml")
request = soup.findAll()[0]
root = ElementTree.fromstring(self._request_xml)
params = {}
params['ACS_URL'] = request['AssertionConsumerServiceURL']
params['REQUEST_ID'] = request['ID']
params['DESTINATION'] = request.get('Destination', '')
params['PROVIDER_NAME'] = request.get('ProviderName', '')
params['ACS_URL'] = root.attrib['AssertionConsumerServiceURL']
params['REQUEST_ID'] = root.attrib['ID']
params['DESTINATION'] = root.attrib.get('Destination', '')
params['PROVIDER_NAME'] = root.attrib.get('ProviderName', '')
self._request_params = params
def _reset(self, django_request, sp_config=None):

View File

@@ -9,7 +9,7 @@ from passbook.core.models import PropertyMapping, Provider
from passbook.lib.utils.reflection import class_to_path, path_to_class
from passbook.saml_idp.base import Processor
LOGGER = get_logger(__name__)
LOGGER = get_logger()
class SAMLProvider(Provider):

View File

@@ -21,7 +21,7 @@ from passbook.policy.engine import PolicyEngine
from passbook.saml_idp import exceptions
from passbook.saml_idp.models import SAMLProvider
LOGGER = get_logger(__name__)
LOGGER = get_logger()
URL_VALIDATOR = URLValidator(schemes=('http', 'https'))

Some files were not shown because too many files have changed in this diff Show More