Merge branch 'release/0.6.0' into production

This commit is contained in:
Jan Dittberner 2020-04-10 12:06:52 +02:00
commit 8898c1506e
29 changed files with 994 additions and 302 deletions

3
.gitignore vendored
View file

@ -41,3 +41,6 @@ Desktop.ini
.ropeproject .ropeproject
_build/ _build/
*.sqlite3 *.sqlite3
.vagrant/
.idea/
.env

45
Dockerfile Normal file
View file

@ -0,0 +1,45 @@
ARG DEBIAN_RELEASE=buster
FROM debian:$DEBIAN_RELEASE
LABEL maintainer="Jan Dittberner <jan@dittberner.info>"
ENV LC_ALL=C.UTF-8
ENV LANG=C.UTF-8
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
build-essential \
dumb-init \
gettext \
git \
python3-dev \
python3-pip \
python3-setuptools \
python3-virtualenv \
python3-wheel \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*.*
RUN python3 -m pip install --prefix=/usr/local pipenv
ARG GVAGID=2000
ARG GVAUID=2000
ARG GVAAPP=gvafile
WORKDIR /srv/$GVAAPP
COPY Pipfile Pipfile.lock /srv/$GVAAPP/
RUN addgroup --gid $GVAGID $GVAAPP ; \
adduser --home /home/$GVAAPP --shell /bin/bash --uid $GVAUID --gid $GVAGID --disabled-password --gecos "User for gnuviechadmin component $GVAAPP" $GVAAPP
USER $GVAAPP
RUN python3 -m virtualenv --python=python3 /home/$GVAAPP/$GVAAPP-venv ; \
/home/$GVAAPP/$GVAAPP-venv/bin/python3 -m pip install -U pip ; \
VIRTUAL_ENV=/home/$GVAAPP/$GVAAPP-venv pipenv install --deploy --ignore-pipfile --dev
VOLUME /srv/$GVAAPP
COPY gvafile.sh /srv/
ENTRYPOINT ["dumb-init", "/srv/gvafile.sh"]

View file

@ -1,4 +1,4 @@
Copyright (c) 2014, 2015 Jan Dittberner Copyright (c) 2014-2020 Jan Dittberner
Permission is hereby granted, free of charge, to any person Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation obtaining a copy of this software and associated documentation

20
Pipfile Normal file
View file

@ -0,0 +1,20 @@
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
coverage = "*"
releases = "*"
Sphinx = "*"
[packages]
amqp = "*"
anyjson = "*"
celery = "*"
kombu = "*"
pytz = "*"
redis = "*"
[requires]
python_version = "3.7"

351
Pipfile.lock generated Normal file
View file

@ -0,0 +1,351 @@
{
"_meta": {
"hash": {
"sha256": "3af06993def20da8657b69e535b6c7a3e2282f9277b45ca5ae11289ea78a51b0"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.7"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"amqp": {
"hashes": [
"sha256:6e649ca13a7df3faacdc8bbb280aa9a6602d22fd9d545336077e573a1f4ff3b8",
"sha256:77f1aef9410698d20eaeac5b73a87817365f457a507d82edf292e12cbb83b08d"
],
"index": "pypi",
"version": "==2.5.2"
},
"anyjson": {
"hashes": [
"sha256:37812d863c9ad3e35c0734c42e0bf0320ce8c3bed82cd20ad54cb34d158157ba"
],
"index": "pypi",
"version": "==0.3.3"
},
"billiard": {
"hashes": [
"sha256:bff575450859a6e0fbc2f9877d9b715b0bbc07c3565bb7ed2280526a0cdf5ede",
"sha256:d91725ce6425f33a97dfa72fb6bfef0e47d4652acd98a032bd1a7fbf06d5fa6a"
],
"version": "==3.6.3.0"
},
"celery": {
"hashes": [
"sha256:108a0bf9018a871620936c33a3ee9f6336a89f8ef0a0f567a9001f4aa361415f",
"sha256:5b4b37e276033fe47575107a2775469f0b721646a08c96ec2c61531e4fe45f2a"
],
"index": "pypi",
"version": "==4.4.2"
},
"importlib-metadata": {
"hashes": [
"sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f",
"sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e"
],
"markers": "python_version < '3.8'",
"version": "==1.6.0"
},
"kombu": {
"hashes": [
"sha256:2d1cda774126a044d91a7ff5fa6d09edf99f46924ab332a810760fe6740e9b76",
"sha256:598e7e749d6ab54f646b74b2d2df67755dee13894f73ab02a2a9feb8870c7cb2"
],
"index": "pypi",
"version": "==4.6.8"
},
"pytz": {
"hashes": [
"sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d",
"sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"
],
"index": "pypi",
"version": "==2019.3"
},
"redis": {
"hashes": [
"sha256:0dcfb335921b88a850d461dc255ff4708294943322bd55de6cfd68972490ca1f",
"sha256:b205cffd05ebfd0a468db74f0eedbff8df1a7bfc47521516ade4692991bb0833"
],
"index": "pypi",
"version": "==3.4.1"
},
"vine": {
"hashes": [
"sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87",
"sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af"
],
"version": "==1.3.0"
},
"zipp": {
"hashes": [
"sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
"sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
],
"version": "==3.1.0"
}
},
"develop": {
"alabaster": {
"hashes": [
"sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359",
"sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"
],
"version": "==0.7.12"
},
"babel": {
"hashes": [
"sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38",
"sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"
],
"version": "==2.8.0"
},
"certifi": {
"hashes": [
"sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304",
"sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519"
],
"version": "==2020.4.5.1"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"version": "==3.0.4"
},
"coverage": {
"hashes": [
"sha256:03f630aba2b9b0d69871c2e8d23a69b7fe94a1e2f5f10df5049c0df99db639a0",
"sha256:046a1a742e66d065d16fb564a26c2a15867f17695e7f3d358d7b1ad8a61bca30",
"sha256:0a907199566269e1cfa304325cc3b45c72ae341fbb3253ddde19fa820ded7a8b",
"sha256:165a48268bfb5a77e2d9dbb80de7ea917332a79c7adb747bd005b3a07ff8caf0",
"sha256:1b60a95fc995649464e0cd48cecc8288bac5f4198f21d04b8229dc4097d76823",
"sha256:1f66cf263ec77af5b8fe14ef14c5e46e2eb4a795ac495ad7c03adc72ae43fafe",
"sha256:2e08c32cbede4a29e2a701822291ae2bc9b5220a971bba9d1e7615312efd3037",
"sha256:3844c3dab800ca8536f75ae89f3cf566848a3eb2af4d9f7b1103b4f4f7a5dad6",
"sha256:408ce64078398b2ee2ec08199ea3fcf382828d2f8a19c5a5ba2946fe5ddc6c31",
"sha256:443be7602c790960b9514567917af538cac7807a7c0c0727c4d2bbd4014920fd",
"sha256:4482f69e0701139d0f2c44f3c395d1d1d37abd81bfafbf9b6efbe2542679d892",
"sha256:4a8a259bf990044351baf69d3b23e575699dd60b18460c71e81dc565f5819ac1",
"sha256:513e6526e0082c59a984448f4104c9bf346c2da9961779ede1fc458e8e8a1f78",
"sha256:5f587dfd83cb669933186661a351ad6fc7166273bc3e3a1531ec5c783d997aac",
"sha256:62061e87071497951155cbccee487980524d7abea647a1b2a6eb6b9647df9006",
"sha256:641e329e7f2c01531c45c687efcec8aeca2a78a4ff26d49184dce3d53fc35014",
"sha256:65a7e00c00472cd0f59ae09d2fb8a8aaae7f4a0cf54b2b74f3138d9f9ceb9cb2",
"sha256:6ad6ca45e9e92c05295f638e78cd42bfaaf8ee07878c9ed73e93190b26c125f7",
"sha256:73aa6e86034dad9f00f4bbf5a666a889d17d79db73bc5af04abd6c20a014d9c8",
"sha256:7c9762f80a25d8d0e4ab3cb1af5d9dffbddb3ee5d21c43e3474c84bf5ff941f7",
"sha256:85596aa5d9aac1bf39fe39d9fa1051b0f00823982a1de5766e35d495b4a36ca9",
"sha256:86a0ea78fd851b313b2e712266f663e13b6bc78c2fb260b079e8b67d970474b1",
"sha256:8a620767b8209f3446197c0e29ba895d75a1e272a36af0786ec70fe7834e4307",
"sha256:922fb9ef2c67c3ab20e22948dcfd783397e4c043a5c5fa5ff5e9df5529074b0a",
"sha256:9fad78c13e71546a76c2f8789623eec8e499f8d2d799f4b4547162ce0a4df435",
"sha256:a37c6233b28e5bc340054cf6170e7090a4e85069513320275a4dc929144dccf0",
"sha256:c3fc325ce4cbf902d05a80daa47b645d07e796a80682c1c5800d6ac5045193e5",
"sha256:cda33311cb9fb9323958a69499a667bd728a39a7aa4718d7622597a44c4f1441",
"sha256:db1d4e38c9b15be1521722e946ee24f6db95b189d1447fa9ff18dd16ba89f732",
"sha256:eda55e6e9ea258f5e4add23bcf33dc53b2c319e70806e180aecbff8d90ea24de",
"sha256:f372cdbb240e09ee855735b9d85e7f50730dcfb6296b74b95a3e5dea0615c4c1"
],
"index": "pypi",
"version": "==5.0.4"
},
"docutils": {
"hashes": [
"sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
"sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
],
"version": "==0.16"
},
"idna": {
"hashes": [
"sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
"sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
],
"version": "==2.9"
},
"imagesize": {
"hashes": [
"sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",
"sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"
],
"version": "==1.2.0"
},
"jinja2": {
"hashes": [
"sha256:93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250",
"sha256:b0eaf100007721b5c16c1fc1eecb87409464edc10469ddc9a22a27a99123be49"
],
"version": "==2.11.1"
},
"markupsafe": {
"hashes": [
"sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
"sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42",
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
"sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b",
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
"sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15",
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
"sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
"sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
"sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
"sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
"sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
"sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
"sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
"sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
"sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
"sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
"sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2",
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
"sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
],
"version": "==1.1.1"
},
"packaging": {
"hashes": [
"sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3",
"sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"
],
"version": "==20.3"
},
"pygments": {
"hashes": [
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
"sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
],
"version": "==2.6.1"
},
"pyparsing": {
"hashes": [
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
],
"version": "==2.4.7"
},
"pytz": {
"hashes": [
"sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d",
"sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"
],
"index": "pypi",
"version": "==2019.3"
},
"releases": {
"hashes": [
"sha256:555ae4c97a671a420281c1c782e9236be25157b449fdf20b4c4b293fe93db2f1",
"sha256:cb3435ba372a6807433800fbe473760cfa781171513f670f3c4b76983ac80f18"
],
"index": "pypi",
"version": "==1.6.3"
},
"requests": {
"hashes": [
"sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
"sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
],
"version": "==2.23.0"
},
"semantic-version": {
"hashes": [
"sha256:2a4328680073e9b243667b201119772aefc5fc63ae32398d6afafff07c4f54c0",
"sha256:2d06ab7372034bcb8b54f2205370f4aa0643c133b7e6dbd129c5200b83ab394b"
],
"version": "==2.6.0"
},
"six": {
"hashes": [
"sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
"sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
],
"version": "==1.14.0"
},
"snowballstemmer": {
"hashes": [
"sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0",
"sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"
],
"version": "==2.0.0"
},
"sphinx": {
"hashes": [
"sha256:6a099e6faffdc3ceba99ca8c2d09982d43022245e409249375edf111caf79ed3",
"sha256:b63a0c879c4ff9a4dffcb05217fa55672ce07abdeb81e33c73303a563f8d8901"
],
"index": "pypi",
"version": "==3.0.0"
},
"sphinxcontrib-applehelp": {
"hashes": [
"sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
"sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
],
"version": "==1.0.2"
},
"sphinxcontrib-devhelp": {
"hashes": [
"sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
"sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
],
"version": "==1.0.2"
},
"sphinxcontrib-htmlhelp": {
"hashes": [
"sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f",
"sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"
],
"version": "==1.0.3"
},
"sphinxcontrib-jsmath": {
"hashes": [
"sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
"sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
],
"version": "==1.0.1"
},
"sphinxcontrib-qthelp": {
"hashes": [
"sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
"sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
],
"version": "==1.0.3"
},
"sphinxcontrib-serializinghtml": {
"hashes": [
"sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",
"sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"
],
"version": "==1.1.4"
},
"urllib3": {
"hashes": [
"sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc",
"sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"
],
"version": "==1.25.8"
}
}
}

View file

@ -10,4 +10,4 @@ customer management at `Jan Dittberner IT-Consulting & -Solutions
Read the :doc:`Installation instructions <install>` to get started locally. Read the :doc:`Installation instructions <install>` to get started locally.
The project page for gvafile is at http://dev.gnuviech-server.de/gvafile. The project page for gvafile is at http://git.dittberner.info/gnuviech/gvafile.

29
Vagrantfile vendored Normal file
View file

@ -0,0 +1,29 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
Vagrant.configure(2) do |config|
config.vm.box = "debian/buster64"
config.vm.hostname = "gvafile.local"
config.vm.network "private_network", ip: "172.16.3.4"
config.vm.synced_folder "../gvasalt/states/", "/srv/salt/"
config.vm.synced_folder "../gvasalt/pillar/", "/srv/pillar/"
config.vm.provider :libvirt do |libvirt|
libvirt.memory = 1024
end
config.vm.provision :shell, path: "change-vmdebootstrap-default-dhcp.sh"
config.vm.provision :salt do |salt|
salt.bootstrap_options = "-x python3"
salt.minion_id = "gvafile"
salt.masterless = true
salt.run_highstate = true
salt.verbose = true
salt.colorize = true
salt.log_level = "warning"
salt.grains_config = "salt/grains"
end
end

View file

@ -0,0 +1,15 @@
#!/bin/sh
set -e
debootstrap_network=/etc/systemd/network/99-dhcp.network
if grep -q '^Name=\\*' "${debootstrap_network}"; then
primary_nic=$(ls -1 /sys/class/net | grep -v lo |sort | head -1)
sed -i "s/^Name=e\\*/Name=${primary_nic}/" \
"${debootstrap_network}"
systemctl restart systemd-networkd.service
echo "Changed systemd network configuration"
else
echo "Systemd network configuration has already been changed"
fi

View file

@ -1,6 +1,16 @@
Changelog Changelog
========= =========
* :release:`0.6.0 <2020-04-10>`
* :support:`-` add Docker setup for lightweight local testing
* :support:`-` update Vagrant setup to libvirt and Debian Buster
* :support:`-` move fileservertasks to top level to keep the task names when
using Python 3
* :support:`2` use Pipenv for dependency management
* :release:`0.5.1 <2019-09-08>`
* :bug:`-` change dependency URLs
* :release:`0.5.0 <2015-01-29>` * :release:`0.5.0 <2015-01-29>`
* :feature:`-` add new task set_file_ssh_authorized_keys to add SSH keys for * :feature:`-` add new task set_file_ssh_authorized_keys to add SSH keys for
users users

View file

@ -7,32 +7,26 @@ gvafile is implemented as a set of `Celery`_ tasks.
.. _Celery: http://www.celeryproject.org/ .. _Celery: http://www.celeryproject.org/
The project module :py:mod:`gvafile`
====================================
.. automodule:: gvafile
:py:mod:`celery <gvafile.celery>`
---------------------------------
.. automodule:: gvafile.celery
:members:
:py:mod:`settings <gvafile.settings>`
-------------------------------------
.. automodule:: gvafile.settings
:members:
:py:mod:`fileservertasks` module :py:mod:`fileservertasks` module
================================ ================================
.. automodule:: fileservertasks .. automodule:: fileservertasks
:py:mod:`celery <fileservertasks.celery>`
-----------------------------------------
.. automodule:: fileservertasks.celery
:members:
:py:mod:`settings <fileservertasks.settings>`
---------------------------------------------
.. automodule:: fileservertasks.settings
:members:
:py:mod:`tasks <fileservertasks.tasks>` :py:mod:`tasks <fileservertasks.tasks>`
--------------------------------------- ---------------------------------------

View file

@ -18,12 +18,15 @@ import os
# If extensions (or modules to document with autodoc) are in another directory, # If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the # add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here. # documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(os.path.join('..', 'gvafile'))) sys.path.insert(0, os.path.abspath(os.path.join("..", "gvafile")))
os.environ['GVAFILE_ALLOWED_HOSTS'] = 'localhost' os.environ["GVAFILE_BROKER_URL"] = "amqp://"
os.environ['GVAFILE_SERVER_EMAIL'] = 'root@localhost' os.environ["GVAFILE_ALLOWED_HOSTS"] = "localhost"
os.environ['GVAFILE_SFTP_DIRECTORY'] = '/home/www' os.environ["GVAFILE_MAIL_DIRECTORY"] = "/home/mail"
os.environ['GVAFILE_MAIL_DIRECTORY'] = '/home/mail' os.environ["GVAFILE_RESULTS_REDIS_URL"] = "redis://"
os.environ["GVAFILE_SERVER_EMAIL"] = "root@localhost"
os.environ["GVAFILE_SFTP_AUTHKEYS_DIRECTORY"] = "/srv/sftp/authorized_keys"
os.environ["GVAFILE_SFTP_DIRECTORY"] = "/home/www"
# -- General configuration ----------------------------------------------------- # -- General configuration -----------------------------------------------------
@ -32,36 +35,37 @@ os.environ['GVAFILE_MAIL_DIRECTORY'] = '/home/mail'
# Add any Sphinx extension module names here, as strings. They can be extensions # Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['releases', 'sphinx.ext.autodoc', 'celery.contrib.sphinx'] extensions = ["releases", "sphinx.ext.autodoc", "celery.contrib.sphinx"]
# configuration for releases extension
releases_issue_uri = "https://git.dittberner.info/gnuviech/gvafile/issues/%s"
releases_release_uri = "https://git.dittberner.info/gnuviech/gvafile/src/tag/%s"
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates'] templates_path = ["_templates"]
releases_issue_uri = 'https://dev.gnuviech-server.de/gvafile/ticket/%s'
releases_release_uri = 'https://dev.gnuviech-server.de/gvafile/milestone/%s'
# The suffix of source filenames. # The suffix of source filenames.
source_suffix = '.rst' source_suffix = ".rst"
# The encoding of source files. # The encoding of source files.
# source_encoding = 'utf-8-sig' # source_encoding = 'utf-8-sig'
# The master toctree document. # The master toctree document.
master_doc = 'index' master_doc = "index"
# General information about the project. # General information about the project.
project = u'gvafile' project = u"gvafile"
copyright = u'2014, 2015 Jan Dittberner' copyright = u"2014-2020, Jan Dittberner"
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
# built documents. # built documents.
# #
# The short X.Y version.
version = '0.5'
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = '0.5.0' from fileservertasks import __version__ as release
# The short X.Y version.
version = ".".join(release.split(".")[:2])
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.
@ -75,7 +79,7 @@ release = '0.5.0'
# List of patterns, relative to source directory, that match files and # List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files. # directories to ignore when looking for source files.
exclude_patterns = ['_build'] exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all documents. # The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None # default_role = None
@ -92,7 +96,7 @@ exclude_patterns = ['_build']
# show_authors = False # show_authors = False
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx' pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting. # A list of ignored prefixes for module index sorting.
# modindex_common_prefix = [] # modindex_common_prefix = []
@ -102,7 +106,7 @@ pygments_style = 'sphinx'
# The theme to use for HTML and HTML Help pages. See the documentation for # The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. # a list of builtin themes.
html_theme = 'default' html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme # Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the # further. For a list of options available for each theme, see the
@ -131,7 +135,7 @@ html_theme = 'default'
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static'] html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format. # using the given strftime format.
@ -175,7 +179,7 @@ html_static_path = ['_static']
# html_file_suffix = None # html_file_suffix = None
# Output file base name for HTML help builder. # Output file base name for HTML help builder.
htmlhelp_basename = 'gvafiledoc' htmlhelp_basename = "gvafiledoc"
# -- Options for LaTeX output -------------------------------------------------- # -- Options for LaTeX output --------------------------------------------------
@ -183,10 +187,8 @@ htmlhelp_basename = 'gvafiledoc'
latex_elements = { latex_elements = {
# The paper size ('letterpaper' or 'a4paper'). # The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper', #'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt'). # The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt', #'pointsize': '10pt',
# Additional stuff for the LaTeX preamble. # Additional stuff for the LaTeX preamble.
#'preamble': '', #'preamble': '',
} }
@ -194,8 +196,7 @@ latex_elements = {
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]). # (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [ latex_documents = [
('index', 'gvafile.tex', u'gvafile Documentation', ("index", "gvafile.tex", u"gvafile Documentation", u"Jan Dittberner", "manual")
u'Jan Dittberner', 'manual'),
] ]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
@ -223,10 +224,7 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [("index", "gvafile", u"gvafile Documentation", [u"Jan Dittberner"], 1)]
('index', 'gvafile', u'gvafile Documentation',
[u'Jan Dittberner'], 1)
]
# If true, show URL addresses after external links. # If true, show URL addresses after external links.
# man_show_urls = False # man_show_urls = False
@ -238,9 +236,15 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
('index', 'gvafile', u'gvafile Documentation', (
u'Jan Dittberner', 'gvafile', 'GNUViech admin fileserver tools.', "index",
'Miscellaneous'), "gvafile",
u"gvafile Documentation",
u"Jan Dittberner",
"gvafile",
"GNUViech admin fileserver tools.",
"Miscellaneous",
)
] ]
# Documents to append as an appendix to all manuals. # Documents to append as an appendix to all manuals.

View file

@ -7,4 +7,4 @@ of the following steps:
* installation of native dependencies * installation of native dependencies
* setup of a virtualenv * setup of a virtualenv
* installation of gvafile production dependencies inside the virtualenv * installation of gvafile production dependencies inside the virtualenv
* setup of celery worker under control of supervisord * setup of celery worker under control of systemd

View file

@ -7,77 +7,30 @@ Install
Working Environment Working Environment
=================== ===================
You have several options in setting up your working environment. We recommend To get a running work environment use `pipenv`_.
using virtualenv to separate the dependencies of your project from your
system's python environment. If on Linux or Mac OS X, you can also use
virtualenvwrapper to help manage multiple virtualenvs across different
projects.
.. index:: virtualenv .. _pipenv: https://pipenv.kennethreitz.org/en/latest/
Virtualenv Only To get started install `pip` and `pipenv` and use `pipenv install --dev`:
---------------
First, make sure you are using `virtualenv`_. Once that's installed, create
your virtualenv:
.. code-block:: sh .. code-block:: sh
$ virtualenv --distribute gvafile $ apt install python3-pip
$ python3 -m pip install --user -U pipenv
.. _virtualenv: https://virtualenv.pypa.io/en/latest/ $ pipenv install --dev
You will also need to ensure that the virtualenv has the project directory
added to the path.
.. index:: virtualenvwrapper
Virtualenv with virtualenvwrapper
------------------------------------
In Linux and Mac OSX, you can install `virtualenvwrapper
<http://virtualenvwrapper.readthedocs.org/en/latest/>`_, which will take care
of managing your virtual environments and adding the project path to the
`site-directory` for you:
.. code-block:: sh
$ mkdir gvafile
$ mkvirtualenv -a gvafile gvafile-dev
$ cd gvafile && add2virtualenv `pwd`
.. index:: pip, requirements, dependencies
Installation of Dependencies
=============================
Depending on where you are installing dependencies:
In development:
.. code-block:: sh
$ pip install -r requirements/local.txt
For production:
.. code-block:: sh
$ pip install -r requirements.txt
.. index:: celery, worker, file queue .. index:: celery, worker, file queue
Running the Celery worker Running the Celery worker
========================= =========================
gvafile uses the `Celery`_ distributed task queue system. The gvafile logix is gvafile uses the `Celery`_ distributed task queue system. The gvafile logic is
executed by a celery worker. After all dependencies are installed you can go executed by a celery worker. After all dependencies are installed you can go
into the gvafile directory and run the celery worker with: into the gvafile directory and run the celery worker with:
.. code-block:: sh .. code-block:: sh
$ cd gvafile $ cd gvafile
$ celery -A gvafile worker -Q file -l info $ pipenv run celery -A filerservertasks worker -Q web -l info
.. _Celery: http://www.celeryproject.org/ .. _Celery: http://www.celeryproject.org/

7
gvafile.sh Executable file
View file

@ -0,0 +1,7 @@
#!/bin/sh
set -e
. /home/gvafile/gvafile-venv/bin/activate
cd /srv/gvafile/gvafile
celery -A fileservertasks worker -Q file -l info

View file

@ -2,3 +2,8 @@
This module contains :py:mod:`fileservertasks.tasks`. This module contains :py:mod:`fileservertasks.tasks`.
""" """
__version__ = "0.6.0"
from fileservertasks.celery import app as celery_app
__all__ = ('celery_app',)

View file

@ -9,7 +9,7 @@ from __future__ import absolute_import
from celery import Celery from celery import Celery
#: The Celery application #: The Celery application
app = Celery('gvafile') app = Celery('fileservertasks')
app.config_from_object('gvafile.settings') app.config_from_object('fileservertasks.settings', namespace="CELERY")
app.autodiscover_tasks(['fileservertasks'], force=True) app.autodiscover_tasks(['fileservertasks.tasks'], force=True)

View file

@ -8,7 +8,7 @@ Common settings and globals.
from os import environ from os import environ
def get_env_setting(setting): def get_env_variable(setting):
""" """
Get the environment setting or return exception. Get the environment setting or return exception.
@ -25,23 +25,20 @@ def get_env_setting(setting):
########## CELERY CONFIGURATION ########## CELERY CONFIGURATION
CELERY_TIMEZONE = 'Europe/Berlin' CELERY_BROKER_URL = get_env_variable("GVAFILE_BROKER_URL")
CELERY_ENABLE_UTC = True CELERY_RESULT_BACKEND = get_env_variable("GVAFILE_RESULTS_REDIS_URL")
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_PERSISTENT = True CELERY_RESULT_PERSISTENT = True
CELERY_TASK_RESULT_EXPIRES = None CELERY_TASK_RESULT_EXPIRES = None
CELERY_ROUTES = ( CELERY_ROUTES = ("gvacommon.celeryrouters.GvaRouter",)
'gvacommon.celeryrouters.GvaRouter', CELERY_TIMEZONE = "Europe/Berlin"
) CELERY_ENABLE_UTC = True
CELERY_ACCEPT_CONTENT = ['json'] CELERY_ACCEPT_CONTENT = ["json"]
CELERY_TASK_SERIALIZER = 'json' CELERY_TASK_SERIALIZER = "json"
CELERY_RESULT_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = "json"
BROKER_URL = get_env_setting('GVAFILE_BROKER_URL')
########## END CELERY CONFIGURATION ########## END CELERY CONFIGURATION
########## GVAFILE CONFIGURATION ########## GVAFILE CONFIGURATION
GVAFILE_SFTP_DIRECTORY = get_env_setting('GVAFILE_SFTP_DIRECTORY') GVAFILE_SFTP_DIRECTORY = get_env_variable("GVAFILE_SFTP_DIRECTORY")
GVAFILE_MAIL_DIRECTORY = get_env_setting('GVAFILE_MAIL_DIRECTORY') GVAFILE_MAIL_DIRECTORY = get_env_variable("GVAFILE_MAIL_DIRECTORY")
GVAFILE_SFTP_AUTHKEYS_DIRECTORY = get_env_setting( GVAFILE_SFTP_AUTHKEYS_DIRECTORY = get_env_variable("GVAFILE_SFTP_AUTHKEYS_DIRECTORY")
'GVAFILE_SFTP_AUTHKEYS_DIRECTORY')
########## END GVAFILE CONFIGURATION ########## END GVAFILE CONFIGURATION

View file

@ -6,11 +6,12 @@ This module defines `Celery`_ tasks to manage file system entities.
""" """
from __future__ import absolute_import, unicode_literals from __future__ import absolute_import, unicode_literals
from copy import deepcopy
import os import os
import subprocess import subprocess
from tempfile import mkstemp from tempfile import mkstemp
from gvafile import settings from fileservertasks import settings
from celery import shared_task from celery import shared_task
from celery.utils.log import get_task_logger from celery.utils.log import get_task_logger
@ -68,15 +69,25 @@ def _build_mail_directory_name(username):
@shared_task @shared_task
def setup_file_sftp_userdir(username): def setup_file_sftp_userdir(username, *args, **kwargs):
""" """
This task creates the home directory for an SFTP user if it does not exist This task creates the home directory for an SFTP user if it does not exist
yet. yet.
:param str username: the username :param str username: the username
:raises Exception: if the SFTP directory of the user cannot be created :raises Exception: if the SFTP directory of the user cannot be created
:return: the created directory name :return: a dictionary with the key :py:const:`username` set to the
:rtype: str username value and a new key :py:const:`sftp_directory` set to the
path of the created SFTP directory
:rtype: dict
.. note::
This variant can only be used at the beginning of a Celery task chain
or as a standalone task.
Use :py:func:`fileservertasks.tasks.setup_file_sftp_userdir_chained`
at other positions in the task chain.
""" """
sftp_directory = _build_sftp_directory_name(username) sftp_directory = _build_sftp_directory_name(username)
@ -89,22 +100,55 @@ def setup_file_sftp_userdir(username):
sftp_directory], stderr=subprocess.STDOUT) sftp_directory], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as cpe: except subprocess.CalledProcessError as cpe:
log_and_raise( log_and_raise(
cpe, 'cold not create SFTP directory for user %s', username) cpe, 'could not create SFTP directory %s for user %s',
sftp_directory, username)
_LOGGER.info( _LOGGER.info(
'created sftp directory %s for user %s', sftp_directory, username) 'created sftp directory %s for user %s', sftp_directory, username)
return sftp_directory return {'username': username, 'sftp_directory': sftp_directory}
@shared_task @shared_task
def delete_file_sftp_userdir(username): def setup_file_sftp_userdir_chained(previous_result, *args, **kwargs):
"""
This task creates the home directory for an SFTP user if it does not exist
yet.
:param dict previous_result: a dictionary describing the result of the
previous step in the Celery task chain. This dictionary must contain a
:py:const:`username` key
:raises Exception: if the SFTP directory of the user cannot be created
:return: a copy of the :py:obj:`previous_result` dictionary with a new
:py:const:`sftp_directory` key set to the path of the created SFTP
directory
:rtype: dict
"""
username = previous_result['username']
retval = deepcopy(previous_result)
retval.update(setup_file_sftp_userdir(username))
return retval
@shared_task
def delete_file_sftp_userdir(username, *args, **kwargs):
""" """
This task recursively deletes the home directory of an SFTP user if it This task recursively deletes the home directory of an SFTP user if it
does not exist yet. exists.
:param str username: the username :param str username: the username
:raises Exception: if the SFTP directory of the user cannot be removed :raises Exception: if the SFTP directory of the user cannot be removed
:return: the removed directory name :return: a dictionary with the key :py:const:`username` set to the username
:rtype: str value and the new key :py:const:`sftp_directory` set to the path of the
deleted SFTP directory
:rtype: dict
.. note::
This variant can only be used at the beginning of a Celery task chain
or as a standalone task.
Use :py:func:`fileservertasks.tasks.delete_file_sftp_userdir_chained`
at other positions in the task chain.
""" """
sftp_directory = _build_sftp_directory_name(username) sftp_directory = _build_sftp_directory_name(username)
@ -117,11 +161,33 @@ def delete_file_sftp_userdir(username):
cpe, 'could not remove SFTP directory for user %s', username) cpe, 'could not remove SFTP directory for user %s', username)
_LOGGER.info( _LOGGER.info(
"deleted sftp directory %s of user %s", sftp_directory, username) "deleted sftp directory %s of user %s", sftp_directory, username)
return sftp_directory return {'username': username, 'sftp_directory': sftp_directory}
@shared_task @shared_task
def setup_file_mail_userdir(username): def delete_file_sftp_userdir_chained(previous_result, *args, **kwargs):
"""
This task recursively deletes the home directory of an SFTP user if it
exists.
:param dict previous_result: a dictionary describing the result of the
previous step in the Celery task chain. This dictionary must contain a
:py:const:`username` key
:raises Exception: if the SFTP directory of the user cannot be removed
:return: a copy of the :py:obj:`previous_result` dictionary with a new
:py:const:`sftp_directory` key set to the path of the removed SFTP
directory
:rtype: dict
"""
username = previous_result['username']
retval = deepcopy(previous_result)
retval.update(delete_file_sftp_userdir(username))
return retval
@shared_task
def setup_file_mail_userdir(username, *args, **kwargs):
""" """
This task creates the mail base directory for a user if it does not exist This task creates the mail base directory for a user if it does not exist
yet. yet.
@ -129,8 +195,18 @@ def setup_file_mail_userdir(username):
:param str username: the username :param str username: the username
:raises Exception: if the mail base directory for the user cannot be :raises Exception: if the mail base directory for the user cannot be
created created
:return: the created directory name :return: a dictionary with the key :py:const:`username` set to the
:rtype: str username value and a new key :py:const:`mail_directory` set to the path
of the created mail directory
:rtype: dict
.. note::
This variant can only be used at the beginning of a Celery task chain
or as a standalone task.
Use :py:func:`fileservertasks.tasks.setup_file_mail_userdir_chained`
at other positions in the task chain.
""" """
mail_directory = _build_mail_directory_name(username) mail_directory = _build_mail_directory_name(username)
@ -143,19 +219,52 @@ def setup_file_mail_userdir(username):
cpe, 'could not create mail base directory for user %s', username) cpe, 'could not create mail base directory for user %s', username)
_LOGGER.info( _LOGGER.info(
'created mail directory %s for user %s', mail_directory, username) 'created mail directory %s for user %s', mail_directory, username)
return mail_directory return {'username': username, 'mail_directory': mail_directory}
@shared_task @shared_task
def delete_file_mail_userdir(username): def setup_file_mail_userdir_chained(previous_result, *args, **kwargs):
"""
This task creates the mail base directory for a user if it does not exist
yet.
:param dict previous_result: a dictionary containing the result of the
previous step in the Celery task chain. This dictionary must contain a
:py:const:`username` key
:raises Exception: if the mail base directory for the user cannot be
created
:return: a copy of the :py:obj:`previous_result` dictionary with a new
:py:const:`mail_directory` key set to the path of the created mail
directory
:rtype: dict
"""
username = previous_result['username']
retval = deepcopy(previous_result)
retval.update(setup_file_mail_userdir(username))
return retval
@shared_task
def delete_file_mail_userdir(username, *args, **kwargs):
""" """
This task recursively deletes the mail base directory for a user if it This task recursively deletes the mail base directory for a user if it
does not exist yet. does not exist yet.
:param str username: the username :param str username: the username
:raises Exception: if the mail base directory of the user cannot be removed :raises Exception: if the mail base directory of the user cannot be deleted
:return: the removed directory name :return: a dictionary with the key :py:const:`username` set to the
:rtype: str username value and a new key :py:const:`mail_directory` set to the path
of the deleted mail directory
:rtype: dict
.. note::
This variant can only be used at the beginning of a Celery task chain
or as a standalone task.
Use :py:func:`fileservertasks.tasks.delete_file_mail_userdir_chained`
at other positions in the task chain.
""" """
mail_directory = _build_mail_directory_name(username) mail_directory = _build_mail_directory_name(username)
@ -168,11 +277,32 @@ def delete_file_mail_userdir(username):
cpe, 'could not remove mail base directory of user %s', username) cpe, 'could not remove mail base directory of user %s', username)
_LOGGER.info( _LOGGER.info(
'deleted mail directory %s of user %s', mail_directory, username) 'deleted mail directory %s of user %s', mail_directory, username)
return mail_directory return {'username': username, 'mail_directory': mail_directory}
@shared_task
def delete_file_mail_userdir_chained(previous_result, *args, **kwargs):
"""
This task recursively deletes the mail base directory for a user if it
does not exist yet.
:param dict previous_result: a dictionary describing the result of the
previous step in the Celery task chain. This dictionary must contain a
:py:const:`username` key
:raises Exception: if the mail base directory of the user cannot be deleted
:return: a copy of the :py:obj:`previous_result` dictionary with a new
:py:const:`mail_directory` key set to the path of the deleted mail
directory
:rtype: str
"""
username = previous_result['username']
retval = deepcopy(previous_result)
retval.update(delete_file_mail_userdir(username))
return retval
@shared_task @shared_task
def create_file_mailbox(username, mailboxname): def create_file_mailbox(username, mailboxname, *args, **kwargs):
""" """
This task creates a new mailbox directory for the given user and mailbox This task creates a new mailbox directory for the given user and mailbox
name. name.
@ -180,8 +310,19 @@ def create_file_mailbox(username, mailboxname):
:param str username: the username :param str username: the username
:param str mailboxname: the mailbox name :param str mailboxname: the mailbox name
:raises Exception: if the mailbox directory cannot be created :raises Exception: if the mailbox directory cannot be created
:return: the created mailbox directory name :return: a dictionary with the keys :py:const:`username` and
:rtype: str :py:const:`mailboxname` set to the values of username and mailboxname
and a new key :py:const:`mailbox_directory` set to the path of the
created mailbox directory
:rtype: dict
.. note::
This variant can only be used at the beginning of a Celery task chain
or as a standalone task.
Use :py:func:`fileservertasks.tasks.create_file_mailbox_chained` at
other positions in the task chain.
""" """
mailbox_directory = os.path.join( mailbox_directory = os.path.join(
@ -197,19 +338,56 @@ def create_file_mailbox(username, mailboxname):
_LOGGER.info( _LOGGER.info(
'created mailbox directory %s for user %s', mailbox_directory, 'created mailbox directory %s for user %s', mailbox_directory,
username) username)
return mailbox_directory return {
'username': username, 'mailboxname': mailboxname,
'mailbox_directory': mailbox_directory
}
@shared_task @shared_task
def delete_file_mailbox(username, mailboxname): def create_file_mailbox_chained(previous_result, *args, **kwargs):
"""
This task creates a new mailbox directory for the given user and mailbox
name.
:param dict previous_result: a dictionary describing the result of the
previous step in the Celery task chain. This dictionary must contain a
:py:const:`username` and a :py:const:`mailboxname` key
:raises Exception: if the mailbox directory cannot be created
:return: a copy of the :py:obj:`previous_result` dictionary with a new
:py:const:`mailbox_directory` key set to the path of the created
mailbox directory
:rtype: dict
"""
username = previous_result['username']
mailboxname = previous_result['mailboxname']
retval = deepcopy(previous_result)
retval.update(create_file_mailbox(username, mailboxname))
return retval
@shared_task
def delete_file_mailbox(username, mailboxname, *args, **kwargs):
""" """
This task deletes the given mailbox of the given user. This task deletes the given mailbox of the given user.
:param str username: the username :param str username: the username
:param str mailboxname: the mailbox name :param str mailboxname: the mailbox name
:raises Exception: if the mailbox directory cannot be deleted :raises Exception: if the mailbox directory cannot be deleted
:return: the deleted mailbox directory name :return: a dictionary with the keys :py:const:`username` and
:rtype: str :py:const:`mailboxname` set to the values of username and mailboxname
and a new key :py:const:`mailbox_directory` set to the path of the
deleted mailbox directory
:rtype: dict
.. note::
This variant can only be used at the beginning of a Celery task chain
or as a standalone task.
Use :py:func:`fileservertasks.tasks.delete_file_mailbox_chained` for
other positions in the task chain.
""" """
mailbox_directory = os.path.join( mailbox_directory = os.path.join(
@ -224,20 +402,56 @@ def delete_file_mailbox(username, mailboxname):
username) username)
_LOGGER.info( _LOGGER.info(
'deleted mailbox directory %s of user %s', mailbox_directory, username) 'deleted mailbox directory %s of user %s', mailbox_directory, username)
return mailbox_directory return {
'username': username, 'mailboxname': mailboxname,
'mailbox_directory': mailbox_directory
}
@shared_task
def delete_file_mailbox_chained(previous_result, *args, **kwargs):
"""
This task deletes the given mailbox of the given user.
:param dict previous_result: a dictionary describing the result of the
previous step in the Celery task chain. This dictionary must contain a
:py:const:`username` and a :py:const:`mailboxname` key
:raises Exception: if the mailbox directory cannot be deleted
:return: a copy of the :py:obj:`previous_result` dictionary with a new
:py:const:`mailbox_directory` key set to the path of the deleted
mailbox directory
:rtype: dict
"""
username = previous_result['username']
mailboxname = previous_result['mailboxname']
retval = deepcopy(previous_result)
retval.update(delete_file_mailbox(username, mailboxname))
return retval
@shared_task @shared_task
def create_file_website_hierarchy(username, sitename): def create_file_website_hierarchy(username, sitename, *args, **kwargs):
""" """
This task creates the directory hierarchy for a website. This task creates the directory hierarchy for a website.
:param str username: the username :param str username: the username
:param str sitename: name of the website :param str sitename: the sitename
:raises Exception: if the website directory hierarchy directory cannot be :raises Exception: if the website directory hierarchy directory cannot be
created created
:return: the directory name :return: a dictionary with the keys :py:const:`username` and
:rtype: str :py:const:`sitename` set to the values of username and sitename and a
new key :py:const:`website_directory` set to the path of the created
website directory
:rtype: dict
.. note::
This variant can only be used at the beginning of a Celery task chain
or as a standalone task.
Use
:py:func:`fileservertasks.tasks.create_file_website_hierarchy_chained`
at other positions in the task chain
""" """
website_directory = os.path.join( website_directory = os.path.join(
@ -271,20 +485,55 @@ def create_file_website_hierarchy(username, sitename):
_LOGGER.info( _LOGGER.info(
'created website directory %s for user %s', website_directory, 'created website directory %s for user %s', website_directory,
username) username)
return website_directory return {
'username': username, 'sitename': sitename,
'website_directory': website_directory,
}
@shared_task @shared_task
def delete_file_website_hierarchy(username, sitename): def create_file_website_hierarchy_chained(previous_result, *args, **kwargs):
""" """
This task deletes the website hierarchy recursively. This task creates the directory hierarchy for a website.
:param str username: the user name :param dict previous_result: a dictionary describing the result of the
:param str sitename: name of the website previous step in the Celery task chain. This dictionary must contain a
:py:const:`username` and a :py:const:`sitename` key
:raises Exception: if the website directory hierarchy directory cannot be :raises Exception: if the website directory hierarchy directory cannot be
deleted created
:return: the directory name :return: a copy of the :py:obj:`previous_result` dictionary with a new
:rtype: str :py:const:`website_directory` key set to the path of the created
website directory
:rtype: dict
"""
username = previous_result['username']
sitename = previous_result['sitename']
retval = deepcopy(previous_result)
retval.update(create_file_website_hierarchy(username, sitename))
return retval
@shared_task
def delete_file_website_hierarchy(username, sitename, *args, **kwargs):
"""
This task deletes a website hierarchy recursively.
:param str username: a username
:param str sitename: a site name
:return: a dictionary with the keys :py:const:`username` and
:py:const:`sitename` set to their original values and a new key
:py:const:`website_directory` set to the path of the deleted website
:rtype: dict
.. note::
This variant can only be used at the beginning of a Celery task chain
or as a standalone task.
Use
:py:func:`fileservertasks.tasks.delete_file_website_hierarchy_chained`
at other positions in the task chain
""" """
website_directory = os.path.join( website_directory = os.path.join(
@ -300,20 +549,58 @@ def delete_file_website_hierarchy(username, sitename):
"website %s"), username, sitename) "website %s"), username, sitename)
_LOGGER.info( _LOGGER.info(
'deleted website directory %s of user %s', website_directory, username) 'deleted website directory %s of user %s', website_directory, username)
return website_directory return {
'username': username, 'sitename': sitename,
'website_directory': website_directory,
}
@shared_task @shared_task
def set_file_ssh_authorized_keys(username, ssh_keys): def delete_file_website_hierarchy_chained(previous_result, *args, **kwargs):
""" """
This task sets the authorized keys for ssh logins. This task deletes the website hierarchy recursively.
:param str username: the user name :param dict previous_result: a dictionary describing the result of the
:param list ssh_key: an ssh_key previous step in the Celery task chain. This dictionary must contain a
:py:const:`username` and a :py:const:`sitename` key
:raises Exception: if the website directory hierarchy directory cannot be
deleted
:return: a copy of the :py:obj:`previous_result` dictionary with a new
:py:const:`website_directory` set to the path of the deleted website
directory
:rtype: dict
"""
username = previous_result['username']
sitename = previous_result['sitename']
retval = deepcopy(previous_result)
retval.update(delete_file_website_hierarchy(username, sitename))
return retval
@shared_task
def set_file_ssh_authorized_keys(username, ssh_keys, *args, **kwargs):
"""
This task set the authorized keys for ssh logins.
:param str username: a username
:param list ssh_keys: a list of ssh keys
:raises Exception: if the update of the creation or update of ssh :raises Exception: if the update of the creation or update of ssh
authorized_keys failed authorized_keys failed
:return: the name of the authorized_keys file :return: a dictionary with the keys :py:const:`username` and
:rtype: str :py:const:`ssh_keys` set to their original values and a new key
:py:const:`ssh_authorized_keys` set to the path of the SSH
authorized_keys file
:rtype: dict
.. note::
This variant can only be used at the beginning of a Celery task chain
or as a standalone task.
Use
:py:func:`fileservertasks.tasks.set_file_ssh_authorized_keys_chained`
at other positions in the task chain
""" """
ssh_authorized_keys_file = _build_authorized_keys_path(username) ssh_authorized_keys_file = _build_authorized_keys_path(username)
@ -354,4 +641,30 @@ def set_file_ssh_authorized_keys(username, ssh_keys):
log_and_raise( log_and_raise(
cpe, 'could not remove the authorized_keys file of user %s', cpe, 'could not remove the authorized_keys file of user %s',
username) username)
return ssh_authorized_keys_file return {
'username': username, 'ssh_keys': ssh_keys,
'ssh_authorized_keys': ssh_authorized_keys_file,
}
@shared_task
def set_file_ssh_authorized_keys_chained(previous_result, *args, **kwargs):
"""
This task sets the authorized keys for ssh logins.
:param dict previous_result: a dictionary describing the result of the
previous step in the Celery task chain. This dictionary must contain a
:py:const:`username` and a :py:const:`ssh_keys` key
:raises Exception: if the update of the creation or update of ssh
authorized_keys failed
:return: a copy of the :py:obj:`previous_result` dictionary with a new
:py:const:`ssh_authorized_keys` set to the path of the SSH
authorized_keys file
:rtype: dict
"""
username = previous_result['username']
ssh_keys = previous_result['ssh_keys']
retval = deepcopy(previous_result)
retval.update(set_file_ssh_authorized_keys(username, ssh_keys))
return retval

View file

@ -1,3 +0,0 @@
.*.swp
*.pyc
.ropeproject/

View file

@ -1,15 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals
class GvaRouter(object): class GvaRouter(object):
def route_for_task(self, task, args=None, kwargs=None): def route_for_task(self, task, args=None, kwargs=None):
for route in ['ldap', 'file', 'mysql', 'pgsql', 'web']: for route in ["ldap", "file", "mysql", "pgsql", "web"]:
if route in task: if route in task:
return { return {"exchange": route, "exchange_type": "direct", "queue": route}
'exchange': route,
'exchange_type': 'direct',
'queue': route,
}
return None return None

View file

@ -1,42 +0,0 @@
"""
This module defines mixins for gnuviechadmin views.
"""
from __future__ import unicode_literals
from django.http import HttpResponseForbidden
from django.utils.translation import ugettext as _
from braces.views import LoginRequiredMixin
class StaffOrSelfLoginRequiredMixin(LoginRequiredMixin):
"""
Mixin that makes sure that a user is logged in and matches the current
customer or is a staff user.
"""
def dispatch(self, request, *args, **kwargs):
if (
request.user.is_staff or
request.user == self.get_customer_object()
):
return super(StaffOrSelfLoginRequiredMixin, self).dispatch(
request, *args, **kwargs
)
return HttpResponseForbidden(
_('You are not allowed to view this page.')
)
def get_customer_object(self):
"""
Views based on this mixin have to implement this method to return
the customer that must be an object of the same class as the
django.contrib.auth user type.
:return: customer
:rtype: settings.AUTH_USER_MODEL
"""
raise NotImplemented("subclass has to implement get_customer_object")

View file

@ -1,3 +0,0 @@
# This file is here because many Platforms as a Service look for
# requirements.txt in the root directory of a project.
-r requirements/production.txt

View file

@ -1,10 +0,0 @@
amqp==1.4.9
bpython==0.13.1
anyjson==0.3.3
argparse==1.2.1
billiard==3.3.0.23
celery==3.1.26.post2
kombu==3.0.37
pytz==2019.2
wsgiref==0.1.2
PyYAML==3.13

View file

@ -1,5 +0,0 @@
# Local development dependencies go here
-r base.txt
coverage==3.7.1
Sphinx==1.2.3
releases==0.7.0

View file

@ -1,3 +0,0 @@
# Pro-tip: Try not to put anything here. There should be no dependency in
# production that isn't in development.
-r base.txt

View file

@ -1,3 +0,0 @@
# Test dependencies go here.
-r base.txt
coverage==3.7.1

12
salt/grains Normal file
View file

@ -0,0 +1,12 @@
gnuviechadmin:
user: vagrant
group: vagrant
checkout: /vagrant
home: /home/vagrant
update_git: False
roles:
- vagrant
- ldapclient
- nfsserver
- gnuviechadmin.gvafile

11
salt/minion Normal file
View file

@ -0,0 +1,11 @@
file_client: local
file_roots:
base:
- /srv/salt/
pillar_roots:
base:
- /srv/pillar
log_file: file:///dev/log