Compare commits
1104 Commits
92ac996952
...
46d0e78d3c
Author | SHA1 | Date | |
---|---|---|---|
46d0e78d3c | |||
2c094f763b | |||
ec502bdd15 | |||
0eaf52272a | |||
15a5dae9a7 | |||
|
a0f9007d37 | ||
6bab989d42 | |||
6ad026e3d8 | |||
4173f00459 | |||
3dc450437b | |||
f3450b5991 | |||
342c6f0189 | |||
835e24a027 | |||
cbd17ea0b6 | |||
7e588170f6 | |||
c713b7e3e1 | |||
543249335f | |||
c819f88c50 | |||
e1e98da7d8 | |||
|
31daa56df0 | ||
f92407fdeb | |||
2f297a1a81 | |||
05565f5c74 | |||
|
8315aa7b5d | ||
|
9a4760aef5 | ||
3f76b59ede | |||
1f381efab0 | |||
|
306860bf51 | ||
|
b10b81eac2 | ||
|
ea236e895e | ||
|
910f9e7db4 | ||
|
123f35563b | ||
f9df9cd4cd | |||
10de956256 | |||
42bda5fd22 | |||
a4b3112232 | |||
c86d38229a | |||
2855b6880b | |||
02f7ba72bd | |||
e2f9403466 | |||
4b12ea0d6d | |||
8baf204516 | |||
8ca2dc420a | |||
7023e7f6fd | |||
25f7588064 | |||
c0c41f5c1c | |||
1129d552a5 | |||
f9a97764c8 | |||
a529004ed0 | |||
247d224a52 | |||
6b2b99e161 | |||
1fa0596f6a | |||
0668449f95 | |||
8b9d5ad766 | |||
4a4c358eb3 | |||
d1e128d708 | |||
4162e4a55d | |||
3f40545712 | |||
6091d255d2 | |||
d7928e7578 | |||
b55eb2a62c | |||
91fe6a590d | |||
b2348f2129 | |||
004a6e4b9f | |||
9d6c2f8bb8 | |||
9ec850abfe | |||
3d6666cc40 | |||
45b0dd6ffe | |||
c74922f3cf | |||
972b9492d8 | |||
361399f40c | |||
cd7af1ac5c | |||
c8378374b0 | |||
cfbbe2095d | |||
d1e4f2ca9d | |||
8f05ba9884 | |||
dddfd06b24 | |||
a94918a8ed | |||
0f2fad7209 | |||
b325e648f4 | |||
62d122d414 | |||
7bd1c9f175 | |||
8995a8fc98 | |||
7e1e9d5f8c | |||
c6681f40db | |||
e0d6ba8b78 | |||
395dbe4418 | |||
945afebb99 | |||
30a820f302 | |||
e1d4de0ea2 | |||
5b64557d74 | |||
51b9aa2b3f | |||
42fa0f1844 | |||
b9c8fd5b4c | |||
67276a7e49 | |||
ef6cefe9aa | |||
9224c546a9 | |||
03056d559f | |||
d7aa8c824e | |||
bc41a71219 | |||
a28de294fa | |||
a460211809 | |||
e00bea9faa | |||
28cdf64f76 | |||
7932139ca6 | |||
f0e012cc03 | |||
7112b19cca | |||
f5ff777aa9 | |||
afcf1c3485 | |||
0f5dbee24b | |||
b8a0effbb2 | |||
43a9595dd3 | |||
c8b1de5eea | |||
b47c773b16 | |||
64e328c137 | |||
8010058969 | |||
d946348fec | |||
7c0b44477a | |||
672023f539 | |||
945b365278 | |||
d8e72a8168 | |||
c2e566bc30 | |||
b5f69b6b98 | |||
0e629151ba | |||
5f302a0de7 | |||
32585495be | |||
c4185b15ca | |||
8dcc61817f | |||
fc62da9c5c | |||
e574d71fdb | |||
682feeacf7 | |||
8905735886 | |||
efb29d4254 | |||
159fe03cef | |||
7ff628d195 | |||
b3668025b3 | |||
02ecd49e85 | |||
dea75cf019 | |||
3ec76e4cfd | |||
672a963ea6 | |||
066dcd6a30 | |||
|
e809299ca1 | ||
|
23938aa075 | ||
|
603646947a | ||
|
fb0f30dbec | ||
fbc9eedef9 | |||
a9f20e7bdf | |||
db9bd91f97 | |||
1de7706216 | |||
4b1da0fd92 | |||
01f8039379 | |||
d7ec3f6e87 | |||
86c8e4f102 | |||
f89c6cff30 | |||
55886eeec9 | |||
008be53be3 | |||
f724ec9c30 | |||
832a29f0c5 | |||
e40383c0fd | |||
|
c288df0905 | ||
|
b425110210 | ||
115f4fcaec | |||
3587bd8633 | |||
be3d14b75e | |||
44e6f17c64 | |||
41f66635fa | |||
233634ba29 | |||
2f7b521fd2 | |||
a2b0eec09c | |||
bf418b6a15 | |||
54c85fb2af | |||
855006f95a | |||
4b5fa5ab12 | |||
1ecea894d3 | |||
4f121d0541 | |||
2d76aaf87d | |||
3c8a4ae7e4 | |||
0a6e9a6449 | |||
a8da1ff2e5 | |||
8504b77e57 | |||
6a3ce847d6 | |||
0c0b7ce3a7 | |||
24a0f37faa | |||
50d455920c | |||
6dea73b9cc | |||
09d9f3168a | |||
847f4bea92 | |||
590b37c085 | |||
118a99aedd | |||
fbb388b67c | |||
c88d5add1f | |||
8347826d57 | |||
9bb80f6dce | |||
226008a0c2 | |||
14933b2405 | |||
0257a8cd63 | |||
50a210a9b2 | |||
1a4d4d2fd1 | |||
dde74fb402 | |||
51f31fc4df | |||
495a6855a1 | |||
1444a52a11 | |||
1c225da510 | |||
81c9bd9c9b | |||
9d41af9eca | |||
e64adb796e | |||
be1157b310 | |||
983c470705 | |||
41f6969a4a | |||
ab1a04d800 | |||
21972b7ebf | |||
c954b73da4 | |||
e8f6a184e9 | |||
|
79333fc9e1 | ||
|
a9761b0917 | ||
|
02a7c4b05e | ||
|
8bdc0b3dc4 | ||
|
5ecc92cd7a | ||
|
1260acc988 | ||
|
9dfd547511 | ||
|
55c16427ec | ||
|
00a6cfbb8e | ||
|
aebe058ab5 | ||
|
0fd3627c5a | ||
|
d650c06ab0 | ||
|
21bef4676b | ||
|
328a0683a1 | ||
|
0476df3388 | ||
b61a34eef8 | |||
f0943191ab | |||
b31fc7e4c6 | |||
281493ad25 | |||
cebe0a8c87 | |||
56b287ba78 | |||
fade0a06a4 | |||
a9a3591c47 | |||
|
61c263151f | ||
d380b34336 | |||
4ee8fd4f16 | |||
c811ad2be6 | |||
d77cde5042 | |||
c6d065b607 | |||
|
4ff5c60685 | ||
|
26f35a5aa9 | ||
|
52addc7703 | ||
|
6c258225fb | ||
|
c8422e9ba5 | ||
|
0985e56940 | ||
|
7da3ce6d56 | ||
|
f76e0953ed | ||
|
c029f1ab38 | ||
|
a5eefcf5c0 | ||
cc761c81a5 | |||
4d4e6cdae1 | |||
9bff599d59 | |||
7bbfb05065 | |||
4aee074527 | |||
859b716f2d | |||
f07781afe6 | |||
517854198f | |||
e8aa7372d5 | |||
dc45311e9d | |||
adb14f5c4f | |||
152107a9bb | |||
f9d94a4a05 | |||
0ed69e36e7 | |||
edcc8d2ce0 | |||
0911c9a7e5 | |||
868aa018a4 | |||
f88941993c | |||
2c59b93402 | |||
d3def0999b | |||
ddf911bbd0 | |||
497883fcce | |||
9f91cca187 | |||
500c5a42e1 | |||
67adbd277c | |||
2dd2d8dc41 | |||
f252d7882d | |||
489844245b | |||
29980025fb | |||
12e0e7441b | |||
34119cb221 | |||
d7ba3d6408 | |||
14c325f155 | |||
c2f9295585 | |||
7d85e96b49 | |||
358694ce65 | |||
5d3ab17f74 | |||
0aeecdcbba | |||
667887c06c | |||
b400d010e0 | |||
e0922259ac | |||
4c1752c07d | |||
e9d210e846 | |||
71bb66f513 | |||
ca064a7617 | |||
b4a98859e0 | |||
33aaf68c5e | |||
5a30f60d6e | |||
aa39c8a37a | |||
0c5da88d7d | |||
98768cb7dd | |||
5079f8985f | |||
b29e45f7f5 | |||
da8ed5ea6b | |||
2dd0a04ed8 | |||
597ef096fa | |||
50dd0fac6a | |||
d6fa8b989f | |||
74d5c93bcb | |||
354d124943 | |||
beb8c3ebaa | |||
7430422adc | |||
6ffce428c5 | |||
eb7d34c92d | |||
3b449ffa02 | |||
e9b8f9048a | |||
1fb93238c5 | |||
332b001533 | |||
5e2454243b | |||
42b878abbf | |||
3529dbb359 | |||
3aea713a1b | |||
e02045c9ac | |||
844a2c9158 | |||
bad09f54b4 | |||
4b5d0181b5 | |||
f25fa9f025 | |||
715e7c6de2 | |||
8f48f894b8 | |||
6bf68b869c | |||
36014201c6 | |||
ff267cc32c | |||
f85c21ef3c | |||
3c285ce635 | |||
fbe61f6eec | |||
64cf1eeaa0 | |||
093ce1747c | |||
67b500e626 | |||
39fde2cefc | |||
58a6b2a418 | |||
50020af2ab | |||
a47a7f7ff6 | |||
5a03d7f01e | |||
a93ac00120 | |||
5d35910b4d | |||
ec169e6dc7 | |||
5cb3264045 | |||
e8f8ca02da | |||
06f03e1ce9 | |||
87863b3baf | |||
665a2bddef | |||
680039fa51 | |||
8d8a399f33 | |||
d535113ca5 | |||
0d19cd8662 | |||
665c0f61bc | |||
0b118c57fc | |||
|
1196012193 | ||
|
1e952f0623 | ||
c4f6e5b997 | |||
d949377e17 | |||
82f3271726 | |||
3d5c388ca4 | |||
bc96b0b3f6 | |||
704333e7a2 | |||
d854c326e8 | |||
f556e21d51 | |||
|
0ab495f8c5 | ||
|
00cdbf0160 | ||
6cd1b91dfd | |||
cf6b37b0cf | |||
20122427e7 | |||
2cbb872248 | |||
dbe57cffb2 | |||
1457714642 | |||
43507024b7 | |||
3fd1b016b0 | |||
a914171cb3 | |||
8f56a74a55 | |||
52b9ab314e | |||
da510f2ab4 | |||
14f3e46586 | |||
e13d0a0604 | |||
5c6486215c | |||
809e0702f3 | |||
4042c37112 | |||
987e2a7465 | |||
8b3fe7434e | |||
e6aaef9bf1 | |||
1d84423694 | |||
5804995376 | |||
45d36bf3e1 | |||
7acad94f68 | |||
eedb546db5 | |||
fddb4e1c1a | |||
0a7bb7e963 | |||
c7358239b1 | |||
29edacec04 | |||
25a3f72417 | |||
68f9085b47 | |||
cf79c335c2 | |||
69126440d3 | |||
64a9d2b02e | |||
9e14fde11e | |||
08ab4b2924 | |||
26cae32c0b | |||
0476bc7e31 | |||
5b12091584 | |||
4826f6dc8c | |||
dd7c7102c8 | |||
fe7578d74d | |||
41ca60cd6d | |||
|
22ad4ed31c | ||
|
20a5e3c74e | ||
554544bde6 | |||
72108a0dcb | |||
fa8f116281 | |||
a3b9bc0d90 | |||
6b76057cab | |||
7623ef68fe | |||
2fd42f7b6c | |||
9d15ea7334 | |||
42d6cb35d7 | |||
bf5c300c6c | |||
fd83d171e9 | |||
2b6aebc399 | |||
831280d73a | |||
db3e1290e4 | |||
94b030523f | |||
4079896dc8 | |||
7b5dac7c6e | |||
e501427f38 | |||
af3f4ac614 | |||
e8557cbf37 | |||
065afd966d | |||
07559714ff | |||
cac6a7223f | |||
b4510831f6 | |||
ef24a264a6 | |||
82ab56668b | |||
822cb9ec5c | |||
70764ee53e | |||
09ec97fccc | |||
bebbdd5762 | |||
02f080df77 | |||
428383993a | |||
4af72a43e5 | |||
bbc9f03bdf | |||
be40624331 | |||
04fa121fe9 | |||
fc70bbca00 | |||
42ad1ac327 | |||
e511723c07 | |||
66784c457a | |||
5edf23de0e | |||
64e7f4d8f4 | |||
b8dc5467d8 | |||
4ab5ead4eb | |||
e928937d43 | |||
c3deb3d560 | |||
|
293209b3e0 | ||
|
8631c99347 | ||
7b74a58720 | |||
ee8dd8897e | |||
0b25fa173b | |||
3a91f14b29 | |||
ef44815d7a | |||
8edff64d94 | |||
90b14026d0 | |||
bef5ac437a | |||
|
b0b480c95b | ||
|
3a08a671c0 | ||
3dd606b0af | |||
6cc2b44711 | |||
0b27a8bd5d | |||
441e86ec2b | |||
596813bee7 | |||
66fb4767ca | |||
35b4ba08e6 | |||
c2ea455750 | |||
|
3ecafb6683 | ||
26f581b449 | |||
e75cd254d8 | |||
520d11c236 | |||
ed88c2cbed | |||
|
d7b713bbf2 | ||
c741c498cb | |||
95295a712e | |||
33a95d0604 | |||
466453bf8a | |||
|
5a8b72fa25 | ||
|
f4412d1cf5 | ||
77ce7c0f9b | |||
b4cf41edff | |||
7aebffa329 | |||
feff5914ff | |||
18c8ab1ea9 | |||
996e063014 | |||
e1aea8efcf | |||
|
f7788051d9 | ||
c2705338f5 | |||
7dfc32f4df | |||
77def611cc | |||
fa8d68ad52 | |||
58b3e4bcc5 | |||
8f0b278dd4 | |||
30b1488a18 | |||
77653e9eb9 | |||
1f5f99c971 | |||
ccd44a063b | |||
fd0e4b290a | |||
5c4e0943d0 | |||
e6d230a04e | |||
370cda0dcc | |||
fa36a1bec9 | |||
aa5a01681f | |||
c98efeff3a | |||
d7101195ca | |||
17da308f37 | |||
7a50d31e67 | |||
17377e1856 | |||
0183b8a1e1 | |||
|
35d2dd22f1 | ||
9afa2fce32 | |||
e9727fc5d2 | |||
3a1d5e9c66 | |||
9cfde3e3ca | |||
f88292a305 | |||
349b5941fb | |||
403b0be895 | |||
393bf04180 | |||
f4d7060e5b | |||
8f83749352 | |||
8fa5131364 | |||
a7b1009d64 | |||
|
4226df2684 | ||
|
d9f07a3802 | ||
|
ee528c06d9 | ||
|
5db9c47d88 | ||
|
04b5bb2cd8 | ||
|
bb9d3f1716 | ||
|
137f37b2fe | ||
|
b2b4105d0f | ||
2fee937a5a | |||
b272469625 | |||
1924ea9399 | |||
832397ec20 | |||
|
7aae12e707 | ||
|
defbe64a7f | ||
7dabdc4809 | |||
|
d1f5a0088d | ||
|
153d629bd9 | ||
|
fdaea315a1 | ||
|
b725acb2d1 | ||
|
75ed8fee4f | ||
|
0017971166 | ||
884cdafacb | |||
0c184e9b1d | |||
1ae93791f9 | |||
|
7fe9401d79 | ||
b14bd0ebf0 | |||
3fa7e86a9f | |||
a0c9c52475 | |||
74dfdc6c06 | |||
e6beab0ff6 | |||
f622c72fc7 | |||
|
15e1821817 | ||
|
d2191bae2c | ||
|
2548953e42 | ||
|
894ad568cc | ||
|
430ee32efc | ||
|
9920a729a0 | ||
|
659fb141ff | ||
|
9c9426679c | ||
|
d740cf8aa9 | ||
|
89401029cb | ||
|
cd089988c2 | ||
|
5419c560ec | ||
|
3042c00aa7 | ||
cad4caf518 | |||
3f34a3aaf8 | |||
9a3a9db4ee | |||
677b4086e5 | |||
|
2e4b82d829 | ||
0763174494 | |||
b8f8252906 | |||
|
5abb91d838 | ||
|
6867eb5d8b | ||
|
7a95d734de | ||
868e3636dc | |||
|
28fad66f7e | ||
605a4ff40b | |||
52691e8f0b | |||
8630a9b66c | |||
8874850b2f | |||
94d2a6f0f6 | |||
7fb906dbc6 | |||
3529916726 | |||
|
cfe4cb3382 | ||
8a37626106 | |||
5af88cf479 | |||
|
bd7b0ac62f | ||
1e175fa813 | |||
|
20c9898174 | ||
219f98dab4 | |||
6a541a8544 | |||
|
0c5c20f1fc | ||
|
1163f9553b | ||
|
2ba546d94c | ||
|
b169f1372b | ||
|
1cde290002 | ||
|
04d53a331b | ||
|
5de5cc7fdd | ||
|
03687402c9 | ||
|
8f536b0242 | ||
|
a0d5c0bf26 | ||
|
1115daa677 | ||
247e221ac6 | |||
|
62154cb5d5 | ||
a7051f80ac | |||
6a6fa7d504 | |||
5458e4d408 | |||
6b841f3a78 | |||
53ae4c31bb | |||
c8d5cece83 | |||
21d1f3106f | |||
e516ab1263 | |||
276e9b9b5b | |||
9156bcdba3 | |||
af68f6b941 | |||
4b74bc8ade | |||
e8878fc02e | |||
e925a184c3 | |||
d990a64efb | |||
750eed0fb6 | |||
c3238f8b75 | |||
d7c1f8e291 | |||
6337b314c7 | |||
3b412d35e4 | |||
a87993b9de | |||
944be50dbf | |||
a083a7814f | |||
d56a530312 | |||
e69a65ae0b | |||
5ad6547028 | |||
67a019811b | |||
19fa620296 | |||
4bc2965f60 | |||
ed8df13946 | |||
928a07d29c | |||
429a6c0354 | |||
03ebe8c7aa | |||
3f4cd0e3f5 | |||
19e2d250de | |||
e8198f7b2a | |||
d31d1a78d0 | |||
abcea93050 | |||
aaa29715c6 | |||
93d3731507 | |||
7e1385331b | |||
c9482b6ec6 | |||
7f0913006c | |||
982e53657f | |||
bf0030504a | |||
2ff436eef5 | |||
d5b8ffa0e2 | |||
de68fd79b7 | |||
84a479752b | |||
127ce130a4 | |||
8454a8faf6 | |||
4afedefd89 | |||
7f9dd5119b | |||
4420a675d3 | |||
8d82b7e85b | |||
ced94ae019 | |||
06fffca262 | |||
e263a6847f | |||
6aa59a46dc | |||
ca8083859c | |||
3bf8743e9a | |||
1cdb236a7e | |||
9a105908a3 | |||
6603eb56ac | |||
b45f86e0cc | |||
649b2061bc | |||
5e1d62caf3 | |||
131a8a46a6 | |||
ddb83a1b58 | |||
440d389f16 | |||
3b8e0097a1 | |||
faf6969f7e | |||
854db4b258 | |||
f55d8c609e | |||
dcdfbfa953 | |||
44d50735bf | |||
28b8de41a8 | |||
164aa3c9c7 | |||
5f08bd5e01 | |||
7e2a4ba673 | |||
2d7c67a207 | |||
bfd90768c2 | |||
2cc21d360d | |||
3133114f93 | |||
6cd0ea7bd0 | |||
707e89595e | |||
|
0b09246cc8 | ||
|
bb25ca0e05 | ||
|
38921b64bb | ||
9bc0f5ca32 | |||
bd5387f6d4 | |||
f795cd6d40 | |||
089a3f2fd6 | |||
117928a256 | |||
ece2b41b41 | |||
de799d4983 | |||
|
2917b56325 | ||
|
8bde9fde0e | ||
|
6634947107 | ||
4e6df6ebd0 | |||
bc81358c5a | |||
67b01629aa | |||
08f8f568c2 | |||
f1550fb7be | |||
0fb481ca94 | |||
0c7c9cd4ca | |||
8a127f5b03 | |||
6588acdede | |||
|
693219d49e | ||
7111ccb821 | |||
|
35d62d6b7c | ||
|
9fa6210345 | ||
655a13547a | |||
ee59691dca | |||
0dd7e5a00e | |||
890fa996af | |||
9b8102a542 | |||
bf6abb8737 | |||
0d1ab359a4 | |||
218dc60fb4 | |||
daee96ce3f | |||
9f5d54d83f | |||
dd526bd824 | |||
e4ad7e9865 | |||
83cba3e896 | |||
|
7297e30eb9 | ||
8f42f7ad80 | |||
335e152ffb | |||
|
d869d0b70b | ||
9524803ca3 | |||
250ea89aa0 | |||
b91f3bfb01 | |||
f7469d67b9 | |||
0f826015f7 | |||
1e5250eb70 | |||
570c5f2aeb | |||
edb2128cbf | |||
04e0bbc7a2 | |||
49b2cb48ab | |||
792a7ed62b | |||
892bb09e4c | |||
33e9ddd30f | |||
6be0778b39 | |||
d5be3b65de | |||
0c2b011055 | |||
6768211a78 | |||
a849b5f887 | |||
e3514a1768 | |||
9281abf853 | |||
ee7c9d53d1 | |||
b9a7d118dc | |||
638ec37faa | |||
d8c93fed06 | |||
e93c936967 | |||
8561ed2ff4 | |||
f3d96b4019 | |||
2a381402e2 | |||
f3167a245e | |||
665d4e68bd | |||
bdbab31a17 | |||
942ecab890 | |||
ca808e1d67 | |||
a6e1876b93 | |||
5a2c022dfd | |||
74e0bc9754 | |||
8ddb1ca949 | |||
c5767f033c | |||
1e685187df | |||
41e1359516 | |||
00bcf3fee7 | |||
e70e312843 | |||
9e39fc8eb1 | |||
3141891a27 | |||
16a989fdd8 | |||
82316e914a | |||
9bfd44199b | |||
5311c31480 | |||
537b0e7339 | |||
e099490f8b | |||
f23c3ecc4c | |||
84b83baf13 | |||
97e1d78acd | |||
e2a3f5a961 | |||
77b2db0b06 | |||
|
ac3bd5ba49 | ||
|
ec32c670ab | ||
|
134401dbb8 | ||
|
c86fa197ed | ||
e0c36f69ef | |||
2656ed3ff0 | |||
9ad64cbf5a | |||
86f4862dbe | |||
8e9855c408 | |||
fa4777732e | |||
|
7c9cbfa1c3 | ||
|
163d0eff55 | ||
|
e07cb9a1af | ||
|
57b3b7d869 | ||
0231394cef | |||
146d324025 | |||
9b98ebf23a | |||
bdbf21f75a | |||
ea3f1fc6e4 | |||
453233133f | |||
33e5b7d8e6 | |||
828298edbd | |||
7273eaf815 | |||
bfe00ae040 | |||
594601d017 | |||
ce3b04baea | |||
7cfe73580b | |||
99c1c53f12 | |||
d0402ea388 | |||
e14cb2e762 | |||
89b3ea6e4f | |||
5367470daa | |||
32eeebaf00 | |||
93fac195ae | |||
77b1f932b4 | |||
2ffaa4c28c | |||
6e2ac25752 | |||
23c20a2c5c | |||
b4f52fb37a | |||
4de91204f9 | |||
1d0456089f | |||
def5c31d0b | |||
d99e4e3b23 | |||
7d370fe4b2 | |||
d7a529dd94 | |||
3d85448b0e | |||
1cbf951ab4 | |||
3b6cd0a392 | |||
276254f71d | |||
ea99dd3374 | |||
9223da6b90 | |||
20c93cd5c4 | |||
8d4a63fbfc | |||
0c3805d86d | |||
d9ad1b5fe3 | |||
118c32ca7c | |||
459bff6756 | |||
7889fe60e2 | |||
d3a7616686 | |||
9df45f971f | |||
e5ee4aba42 | |||
01703d55dc | |||
|
a5fdc70a2d | ||
|
27417852cd | ||
e6c172eb95 | |||
|
adaa0e0d3c | ||
42bc5999a1 | |||
f03abc989e | |||
a3db260e08 | |||
3a9fd875b2 | |||
|
edcb5fe80e | ||
|
9fb122aed4 | ||
|
cd754871a0 | ||
|
79800b3562 | ||
2d20d4ab1e | |||
5da382ee35 | |||
84199b7e92 | |||
|
f90a0bb497 | ||
2e87b0d992 | |||
5a00f4e492 | |||
6e23250eb0 | |||
4153f7ed6e | |||
301b1603ef | |||
464db5cf28 | |||
8ff4997810 | |||
1cc3087148 | |||
8b1bf2447b | |||
bc067f5d6d | |||
7f46c1f747 | |||
ba5361f660 | |||
091c81e411 | |||
529bcaa1de | |||
a3f113de6d | |||
1db63c9615 | |||
244a78d8b5 | |||
38500b3c94 | |||
05642c01a3 | |||
5bf9b26180 | |||
314e4da620 | |||
4a117eff83 | |||
221e83ccab | |||
f82c7e4a73 | |||
ecefa479ee | |||
e46b7fab01 | |||
bbd57e1885 | |||
4ff0900a20 | |||
bcb34cf63e | |||
d4f5ab03cf | |||
37d0030966 | |||
4ecb84bc96 | |||
37c2aeb5b3 | |||
ce4602018b | |||
73362b69c6 | |||
|
fb5c51bbce | ||
b4b94c277f | |||
9de7368174 | |||
c2035924bb | |||
bcb8108f4d | |||
68537b824e | |||
dac93abd78 | |||
65ffdcc00f | |||
063c7bd1f5 | |||
ed13e203f8 | |||
1abd866252 | |||
de09975286 | |||
87b71c00b9 | |||
fb41b3b3d9 | |||
50a5435dbc | |||
ca54c48d39 | |||
c57c782eb5 | |||
fe758271f2 | |||
add5ccf6b4 | |||
67831a1c86 | |||
aa432b3a9e | |||
7dfe0ebab5 | |||
32bfb49152 | |||
8a3b09ed9b | |||
cd27c1ee4b | |||
50497b83b0 | |||
613369d9e9 | |||
8555fbfa0f | |||
2aff4e97b6 | |||
f88256eb74 | |||
a757534061 | |||
be6375566e | |||
3e03159164 | |||
241199a3d0 | |||
8b9486fc5f | |||
dcfc16d20d | |||
6954a1a137 | |||
db27345e76 | |||
a68008a942 | |||
4145ed3cc8 | |||
ca69ddf6f1 | |||
07fdd0a9d4 | |||
8c630ada1f | |||
3f37dd8940 | |||
da7aa42251 | |||
3400f80f42 | |||
091880b034 | |||
7d2f99072f | |||
c9c0145bbb | |||
d4871feb8d | |||
ef16bf1cb3 | |||
2f82337d59 | |||
42cd4ef0e7 | |||
5837694a2d | |||
095fec4681 | |||
c45a671cff | |||
c9b572dbc0 | |||
6bb3cf1f23 | |||
f70861b7c0 | |||
dfb3c7cacc | |||
ca7250171a | |||
3d2ddcc206 | |||
ae6f71a236 | |||
67c1d480df | |||
6fa73af0be | |||
35dc736017 | |||
5ffb6c034a | |||
49f6da8ae5 | |||
47cf9532e7 | |||
82b9955a56 | |||
dd68adfe68 | |||
f4af053c91 | |||
787fb063d1 | |||
ad29bb39eb | |||
63fd53f42a | |||
c233ebbb74 | |||
52b6896cab | |||
47006aa753 | |||
55c8b787df | |||
2a607bfa11 | |||
a6ca9a1a6e | |||
4febe1ce47 | |||
cc900595df | |||
aa9f1ae861 | |||
c6fd53dded | |||
4082b76ebd | |||
2fd1d52a71 | |||
668a0dd349 | |||
f75b782afd | |||
5d4ef292d6 | |||
28e79b9362 | |||
d5cae8ce65 | |||
881608b4df | |||
18e7c6c77d | |||
202dc1f999 | |||
c865480c8b | |||
1c54bd5923 | |||
b927491084 | |||
9e40c5d807 | |||
e250af14f7 | |||
036623d473 | |||
0189713ff1 | |||
f1b6be922d | |||
d1efbe1abf | |||
f3fe029d3e | |||
bad18e069a | |||
30294e7332 | |||
51b6be93cc | |||
3565bddf8d | |||
26845a5921 | |||
ee520b8c3f | |||
1ebb1f64ad | |||
54e885acb5 | |||
3117028023 | |||
efcc4b847d | |||
0326950c54 | |||
c501eb3c1a | |||
5c517ed41f | |||
004f98f0a8 | |||
b06a741763 | |||
c9fa795824 | |||
5130c8e169 | |||
f5cfc9a1d6 | |||
59d495b91d | |||
4bde37cd8a | |||
5a6e7914c9 | |||
beb9072291 | |||
63d83ec2f6 | |||
baeda4acb9 | |||
9fd7e76486 | |||
7d3d06033d | |||
8296295937 | |||
c1e682478b | |||
bfc757cace | |||
26c8180ca0 | |||
f2addd45b1 | |||
6789090ba2 | |||
7eb46f39db | |||
0c1f82a3c7 | |||
e24de87557 | |||
ff72500e70 | |||
f44d1d9d98 | |||
273e514ee3 | |||
3f293f2cea | |||
c0fa194818 | |||
5ac30ebf6c | |||
77e90e090f | |||
f29f8a9c75 | |||
c6182c8984 | |||
6dcdeffa9e | |||
b840889379 | |||
664859dd26 | |||
66c959db59 | |||
9719867ebd | |||
78f67e845b | |||
8b48ff9c05 | |||
0a13f52053 | |||
15f736809d | |||
f84171de83 | |||
72d502d0b7 | |||
3af9f51fb0 | |||
a3e14c434d | |||
eb472c815e | |||
aa9045e49e | |||
872a51f5f5 | |||
5a69e5d88d | |||
047833b5cc | |||
6c7464fe67 | |||
b214dc230f | |||
c9faca94ad | |||
e92182724b | |||
45965b29b2 | |||
8ca8970092 | |||
712a6c533f | |||
73b6d19c19 | |||
0fe999ca97 | |||
f9c21b7001 | |||
3755969ead | |||
6a7f409986 | |||
08bd52704b | |||
cb8ac3cf63 | |||
6c8f6f7baa | |||
1839616aa9 | |||
df1490dd2d | |||
0f39901c76 | |||
94a296744c |
6
.editorconfig
Normal file
6
.editorconfig
Normal file
|
@ -0,0 +1,6 @@
|
|||
#editorconfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
.idea/
|
3
.prettierrc
Normal file
3
.prettierrc
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"singleQuote": true
|
||||
}
|
674
LICENSE
674
LICENSE
|
@ -1,21 +1,661 @@
|
|||
The MIT License (MIT)
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (c) 2020 Tanner Collin
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
Preamble
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
|
136
README.md
136
README.md
|
@ -2,139 +2,19 @@
|
|||
|
||||
Spaceport is Calgary Protospace's member portal. It tracks membership, courses, training, access cards, and more.
|
||||
|
||||
Demo: https://spaceport.dns.t0.vc
|
||||
Live: https://my.protospace.ca
|
||||
|
||||
## Documentation
|
||||
|
||||
https://docs.my.protospace.ca
|
||||
|
||||
## Development Setup
|
||||
|
||||
Install dependencies:
|
||||
|
||||
```text
|
||||
# Python:
|
||||
$ sudo apt update
|
||||
$ sudo apt install python3 python3-pip python-virtualenv python3-virtualenv
|
||||
|
||||
# Yarn / nodejs:
|
||||
# from https://yarnpkg.com/lang/en/docs/install/#debian-stable
|
||||
$ curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
|
||||
$ echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
|
||||
$ sudo apt update
|
||||
$ sudo apt install yarn
|
||||
```
|
||||
|
||||
Clone this repo:
|
||||
|
||||
```text
|
||||
$ git clone https://github.com/Protospace/spaceport.git
|
||||
$ cd spaceport
|
||||
```
|
||||
|
||||
### API Server
|
||||
|
||||
Create a venv, activate it, and install:
|
||||
|
||||
```text
|
||||
$ cd apiserver
|
||||
$ virtualenv -p python3 env
|
||||
$ source env/bin/activate
|
||||
(env) $ pip install -r requirements.txt
|
||||
```
|
||||
|
||||
Now setup Django and run it:
|
||||
|
||||
```text
|
||||
(env) $ python manage.py migrate --run-syncdb
|
||||
(env) $ python manage.py createsuperuser --email admin@example.com --username admin
|
||||
(env) $ DEBUG=true python manage.py runserver 0.0.0.0:8002
|
||||
```
|
||||
|
||||
Django will now be running on port 8002, connect to localhost:8002 to test it.
|
||||
|
||||
#### Import Old Portal Data
|
||||
|
||||
Place `old_portal.sqlite3` in the same directory as `manage.py`.
|
||||
|
||||
```text
|
||||
(env) $ bash gen_old_models.sh
|
||||
(env) $ time python import_old_portal.py
|
||||
```
|
||||
|
||||
Give it about 5 minutes to run. This will import old models into the new portal database, ready to be linked to user's emails when they sign up.
|
||||
|
||||
### Webclient
|
||||
|
||||
```text
|
||||
# In a different terminal
|
||||
$ cd webclient
|
||||
$ yarn install
|
||||
$ yarn start
|
||||
```
|
||||
|
||||
The webclient will now be running on port 3000. Make changes and refresh to see them.
|
||||
|
||||
### Reverse Proxy
|
||||
|
||||
It's easiest to point a domain to the server and reverse proxy requests according to subdomain. If you don't set up a reverse proxy, you'll need to change URL settings.
|
||||
|
||||
Domains: `example.com`, `api.example.com`, `static.example.com` should all be reverse proxied.
|
||||
|
||||
Configure nginx:
|
||||
|
||||
```text
|
||||
server {
|
||||
listen 80;
|
||||
root /var/www/html;
|
||||
index index.html index.htm;
|
||||
|
||||
server_name example.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:3000/;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
root /var/www/html;
|
||||
index index.html index.htm;
|
||||
|
||||
server_name api.example.com;
|
||||
|
||||
client_max_body_size 20M;
|
||||
|
||||
location / {
|
||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'content-type, authorization' always;
|
||||
add_header 'Access-Control-Allow-Methods' 'HEAD,GET,POST,PUT,PATCH,DELETE' always;
|
||||
add_header 'Access-Control-Max-Age' '600' always;
|
||||
proxy_pass http://127.0.0.1:8002/;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
root /home/you/spaceport/apiserver/data/static;
|
||||
index index.html;
|
||||
|
||||
server_name static.example.com;
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
https://docs.my.protospace.ca/dev_apiserver.html
|
||||
|
||||
## License
|
||||
|
||||
This program is free and open-source software licensed under the MIT License. Please see the `LICENSE` file for details.
|
||||
This program is free and open-source software licensed under the GNU Affero General Public License. Please see the `LICENSE` file for details.
|
||||
|
||||
That means you have the right to study, change, and distribute the software and source code to anyone and for any purpose. You deserve these rights.
|
||||
|
||||
|
@ -142,4 +22,6 @@ That means you have the right to study, change, and distribute the software and
|
|||
|
||||
Thanks to the Protospace Portal Committee.
|
||||
|
||||
Thanks to Emrah for lockout certification code, Pat for LDAP code, Murray for the blank member form PDF, and Kent for a ton of features.
|
||||
|
||||
Thanks to all the devs behind Python, Django, DRF, Node, React, Quill, and Bleach.
|
||||
|
|
10
apiserver/.gitignore
vendored
10
apiserver/.gitignore
vendored
|
@ -109,3 +109,13 @@ old_models.py
|
|||
migrations/
|
||||
data/
|
||||
old_photos/
|
||||
old_paypal/
|
||||
missing_paypal/
|
||||
backups/
|
||||
secrets.py
|
||||
old_counts.csv
|
||||
scans.csv
|
||||
output.*
|
||||
out.*
|
||||
*.csv
|
||||
*.txt
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
default_app_config = 'apiserver.api.apps.ApiConfig'
|
|
@ -1,3 +1,22 @@
|
|||
from django.apps import apps
|
||||
from django.contrib import admin
|
||||
from django.contrib.admin.sites import AlreadyRegistered
|
||||
from simple_history.admin import SimpleHistoryAdmin
|
||||
|
||||
# Register your models here.
|
||||
app_models = apps.get_app_config('api').get_models()
|
||||
for model in app_models:
|
||||
if model._meta.model_name.startswith('historical'):
|
||||
continue
|
||||
|
||||
class MyAdmin(SimpleHistoryAdmin):
|
||||
pass
|
||||
|
||||
try:
|
||||
if hasattr(model, 'list_display'):
|
||||
MyAdmin.list_display = model.list_display
|
||||
if hasattr(model, 'search_fields'):
|
||||
MyAdmin.search_fields = model.search_fields
|
||||
|
||||
admin.site.register(model, MyAdmin)
|
||||
except AlreadyRegistered:
|
||||
pass
|
||||
|
|
|
@ -2,4 +2,7 @@ from django.apps import AppConfig
|
|||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = 'api'
|
||||
name = 'apiserver.api'
|
||||
|
||||
def ready(self):
|
||||
from . import signals
|
||||
|
|
14
apiserver/apiserver/api/emails/ical.html
Normal file
14
apiserver/apiserver/api/emails/ical.html
Normal file
|
@ -0,0 +1,14 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title></title>
|
||||
<style type="text/css">p.MsoNormal,p.MsoNoSpacing{margin:0}</style>
|
||||
</head>
|
||||
<body>
|
||||
<div>Hi [name],<br></div>
|
||||
<div><br></div>
|
||||
<div>Please find attached the iCalendar file for [class] on [date].<br></div>
|
||||
<div><br></div>
|
||||
<div>Spaceport<br></div>
|
||||
</body>
|
||||
</html>
|
5
apiserver/apiserver/api/emails/ical.txt
Normal file
5
apiserver/apiserver/api/emails/ical.txt
Normal file
|
@ -0,0 +1,5 @@
|
|||
Hi [name],
|
||||
|
||||
Please find attached the iCalendar file for [class] on [date].
|
||||
|
||||
Spaceport
|
19
apiserver/apiserver/api/emails/interest.html
Normal file
19
apiserver/apiserver/api/emails/interest.html
Normal file
|
@ -0,0 +1,19 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title></title>
|
||||
<style type="text/css">p.MsoNormal,p.MsoNoSpacing{margin:0}</style>
|
||||
</head>
|
||||
<body>
|
||||
<div>Hi [name],<br></div>
|
||||
<div><br></div>
|
||||
<div>There's been a class scheduled for [course] that you expressed interest in.<br></div>
|
||||
<div><br></div>
|
||||
<div>You can find the class on its course page here:<br></div>
|
||||
<div><a href="[link]">[link]</a><br></div>
|
||||
<div><br></div>
|
||||
<div>Your "interest" in this course is now removed and you won't receive any more notifications about its classes until you press the "interested" button again.<br></div>
|
||||
<div><br></div>
|
||||
<div>Spaceport<br></div>
|
||||
</body>
|
||||
</html>
|
11
apiserver/apiserver/api/emails/interest.txt
Normal file
11
apiserver/apiserver/api/emails/interest.txt
Normal file
|
@ -0,0 +1,11 @@
|
|||
Hi [name],
|
||||
|
||||
There's been a class scheduled for [course] that you expressed interest in.
|
||||
|
||||
You can find the class on its course page here:
|
||||
[link]
|
||||
|
||||
Your "interest" in this course is now removed and you won't receive any more
|
||||
notifications about its classes until you press the "interested" button again.
|
||||
|
||||
Spaceport
|
26
apiserver/apiserver/api/emails/overdue.html
Normal file
26
apiserver/apiserver/api/emails/overdue.html
Normal file
|
@ -0,0 +1,26 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title></title>
|
||||
<style type="text/css">p.MsoNormal,p.MsoNoSpacing{margin:0}</style>
|
||||
</head>
|
||||
<body>
|
||||
<div>Hi [name],<br></div>
|
||||
<div><br></div>
|
||||
<div>Your Protospace member dues are behind by two months and you are now "overdue".<br></div>
|
||||
<div><br></div>
|
||||
<div>You are paid up until [date]. Please pay your dues to prevent having your card and account deactivated by the system.<br></div>
|
||||
<div><br></div>
|
||||
<div>You can log into the portal and pay here:<br></div>
|
||||
<div><a href="https://my.protospace.ca/paymaster">https://my.protospace.ca/paymaster</a><br></div>
|
||||
<div><br></div>
|
||||
<div>Or send e-Transfer to info@protospace.ca or hand a director cash.<br></div>
|
||||
<div><br></div>
|
||||
<div>If there has been an error or you want to reply to this email, please click "reply-all" since the Spaceport inbox does not exist.<br></div>
|
||||
<div><br></div>
|
||||
<div>You won't recieve any other emails about this.<br></div>
|
||||
<div><br></div>
|
||||
<div>Thanks,</div>
|
||||
<div>Spaceport<br></div>
|
||||
</body>
|
||||
</html>
|
19
apiserver/apiserver/api/emails/overdue.txt
Normal file
19
apiserver/apiserver/api/emails/overdue.txt
Normal file
|
@ -0,0 +1,19 @@
|
|||
Hi [name],
|
||||
|
||||
Your Protospace member dues are behind by two months and you are now "overdue".
|
||||
|
||||
You are paid up until [date]. Please pay your dues to prevent having your
|
||||
account and card deactivated by the system.
|
||||
|
||||
You can log into the portal and pay here:
|
||||
https://my.protospace.ca/paymaster
|
||||
|
||||
Or send e-Transfer to info@protospace.ca or hand a director cash.
|
||||
|
||||
If there has been an error or you want to reply to this email, please click
|
||||
"reply-all" since the Spaceport inbox does not exist.
|
||||
|
||||
You won't recieve any other emails about this.
|
||||
|
||||
Thanks,
|
||||
Spaceport
|
21
apiserver/apiserver/api/emails/usage_bill.txt
Normal file
21
apiserver/apiserver/api/emails/usage_bill.txt
Normal file
|
@ -0,0 +1,21 @@
|
|||
Hi [name],
|
||||
|
||||
Please find attached a summary of your [device] usage in [month]. You used
|
||||
[overage] minutes over your free allotment, which at $0.50/min comes to $[bill].
|
||||
You can pay this on the portal's Paymaster page under the consumables section,
|
||||
or send an e-Transfer to info@protospace.ca, or hand a director cash.
|
||||
|
||||
INVOICE
|
||||
Device: [device]
|
||||
Month: [month]
|
||||
Usage: [minutes] min
|
||||
Overage: [overage] min
|
||||
----------------------
|
||||
Bill: $[bill]
|
||||
|
||||
If you want to reply to this email, please click "reply-all" since the Spaceport
|
||||
inbox does not exist.
|
||||
|
||||
If you've already paid, ignore this email.
|
||||
|
||||
Spaceport
|
40
apiserver/apiserver/api/emails/welcome.html
Normal file
40
apiserver/apiserver/api/emails/welcome.html
Normal file
|
@ -0,0 +1,40 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title></title>
|
||||
<style type="text/css">p.MsoNormal,p.MsoNoSpacing{margin:0}</style>
|
||||
</head>
|
||||
<body>
|
||||
<div>Hi [name],<br></div>
|
||||
<div><br></div>
|
||||
<div>You just signed up to Spaceport with the username: [username]<br></div>
|
||||
<div><br></div>
|
||||
<div>To manage your Protospace membership go to:<br></div>
|
||||
<div><a href="https://my.protospace.ca">https://my.protospace.ca</a><br></div>
|
||||
<div><br></div>
|
||||
<div>You have automatically been added to our forum Spacebar at:<br></div>
|
||||
<div><a href="https://forum.protospace.ca">https://forum.protospace.ca</a><br></div>
|
||||
<div><br></div>
|
||||
<div>Please introduce yourself here:<br></div>
|
||||
<div><a href="https://forum.protospace.ca/c/chattymcchatface/new-user-introductions/31">https://forum.protospace.ca/c/chattymcchatface/new-user-introductions/31</a><br></div>
|
||||
<div><br></div>
|
||||
<div>If you have any questions, you will get the fastest response there.<br></div>
|
||||
<div><br></div>
|
||||
<div>Your next goal is to become vetted after:<br></div>
|
||||
<div>- paying your member dues<br></div>
|
||||
<div>- being a member for four weeks<br></div>
|
||||
<div>- attending a New Member Orientation<br></div>
|
||||
<div>- finding two members to sponsor (vouch for) you<br></div>
|
||||
<div><br></div>
|
||||
<div>You can meet members Tuesday evenings during our open house.<br></div>
|
||||
<div><br></div>
|
||||
<div>Mark [date] on your calendar as the day you can get vetted.<br></div>
|
||||
<div><br></div>
|
||||
<div>Sign up for a New Member Orientation here:<br></div>
|
||||
<div><a href="https://my.protospace.ca/classes">https://my.protospace.ca/classes</a><br></div>
|
||||
<div><br></div>
|
||||
<div>Good luck,<br></div>
|
||||
<div>Spaceport<br></div>
|
||||
<div><br></div>
|
||||
</body>
|
||||
</html>
|
30
apiserver/apiserver/api/emails/welcome.txt
Normal file
30
apiserver/apiserver/api/emails/welcome.txt
Normal file
|
@ -0,0 +1,30 @@
|
|||
Hi [name],
|
||||
|
||||
You just signed up to Spaceport with the username: [username]
|
||||
|
||||
To manage your Protospace membership go to:
|
||||
https://my.protospace.ca
|
||||
|
||||
You have automatically been added to our forum Spacebar at:
|
||||
https://forum.protospace.ca
|
||||
|
||||
Please introduce yourself here:
|
||||
https://forum.protospace.ca/c/chattymcchatface/new-user-introductions/31
|
||||
|
||||
If you have any questions, you will get the fastest response there.
|
||||
|
||||
Your next goal is to become vetted after:
|
||||
- paying your member dues
|
||||
- being a member for four weeks
|
||||
- attending a New Member Orientation
|
||||
- finding two members to sponsor (vouch for) you
|
||||
|
||||
You can meet members Tuesday evenings during our open house.
|
||||
|
||||
Mark [date] on your calendar as the day you can get vetted.
|
||||
|
||||
Sign up for a New Member Orientation here:
|
||||
https://my.protospace.ca/classes
|
||||
|
||||
Good luck,
|
||||
Spaceport
|
14
apiserver/apiserver/api/fields.py
Normal file
14
apiserver/apiserver/api/fields.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
from rest_framework import serializers
|
||||
from . import utils
|
||||
|
||||
class UserEmailField(serializers.ModelField):
|
||||
def to_representation(self, obj):
|
||||
return getattr(obj.user, 'email', obj.old_email)
|
||||
def to_internal_value(self, data):
|
||||
return serializers.EmailField().run_validation(data)
|
||||
|
||||
class HTMLField(serializers.CharField):
|
||||
def to_internal_value(self, data):
|
||||
data = utils.clean(data)
|
||||
return super().to_internal_value(data)
|
||||
|
101
apiserver/apiserver/api/management/commands/bill_trotec.py
Normal file
101
apiserver/apiserver/api/management/commands/bill_trotec.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models import Max, F, Count, Q, Sum
|
||||
from django.utils.timezone import now
|
||||
from django.core.cache import cache
|
||||
from django.db import transaction
|
||||
from dateutil import relativedelta
|
||||
import math
|
||||
|
||||
from apiserver import secrets, settings
|
||||
from apiserver.api import models, utils, utils_email
|
||||
|
||||
import time
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Bill Trotec laser usage for last month. Wise to run this on the 2nd of each month to prevent any timezone issues.'
|
||||
|
||||
FREE_MINUTES = 60 * 6
|
||||
DEVICE = 'TROTECS300'
|
||||
DEVICE_NAME = 'Trotec'
|
||||
DOLLARS_PER_MINUTE = 0.50
|
||||
|
||||
def bill_trotec(self):
|
||||
count = 0
|
||||
|
||||
now = utils.now_alberta_tz()
|
||||
current_month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
previous_month_start = current_month_start - relativedelta.relativedelta(months=1)
|
||||
|
||||
self.stdout.write('Billing from {} to {}...'.format(
|
||||
previous_month_start,
|
||||
current_month_start,
|
||||
))
|
||||
|
||||
usages = models.Usage.objects.order_by('id').filter(should_bill=True)
|
||||
month_trotec_usages = usages.filter(
|
||||
started_at__gte=previous_month_start,
|
||||
started_at__lt=current_month_start,
|
||||
device=self.DEVICE,
|
||||
)
|
||||
|
||||
month_trotec_user_ids = month_trotec_usages.values_list('user', flat=True).distinct()
|
||||
month_trotec_users = User.objects.filter(id__in=month_trotec_user_ids)
|
||||
|
||||
self.stdout.write('Found {} usages by {} users.'.format(
|
||||
month_trotec_usages.count(),
|
||||
month_trotec_users.count(),
|
||||
))
|
||||
|
||||
for user in month_trotec_users:
|
||||
if not user:
|
||||
continue
|
||||
|
||||
self.stdout.write('Billing {}:'.format(user.username))
|
||||
|
||||
users_usages = month_trotec_usages.filter(
|
||||
user=user,
|
||||
)
|
||||
|
||||
total_seconds = users_usages.aggregate(Sum('num_seconds'))['num_seconds__sum'] or 0
|
||||
total_minutes = math.ceil(total_seconds / 60.0)
|
||||
billable_minutes = total_minutes - self.FREE_MINUTES
|
||||
|
||||
self.stdout.write(' Total seconds: {}'.format(total_seconds))
|
||||
self.stdout.write(' Total minutes: {}'.format(total_minutes))
|
||||
self.stdout.write(' Billable minutes: {}'.format(billable_minutes))
|
||||
|
||||
if billable_minutes <= 0:
|
||||
self.stdout.write(' Skipping, used free time.')
|
||||
continue
|
||||
|
||||
bill = billable_minutes * self.DOLLARS_PER_MINUTE
|
||||
bill_str = format(bill, '.2f')
|
||||
|
||||
self.stdout.write(' Total bill: ${}'.format(bill_str))
|
||||
|
||||
utils_email.send_usage_bill_email(
|
||||
user,
|
||||
self.DEVICE_NAME,
|
||||
previous_month_start.strftime('%B'),
|
||||
total_minutes,
|
||||
billable_minutes,
|
||||
bill_str,
|
||||
)
|
||||
|
||||
self.stdout.write(' Sent usage bill email.')
|
||||
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write('{} - Billing Trotec'.format(str(now())))
|
||||
start = time.time()
|
||||
|
||||
count = self.bill_trotec()
|
||||
self.stdout.write('Billed {} members'.format(count))
|
||||
|
||||
self.stdout.write('Completed billing in {} s'.format(
|
||||
str(time.time() - start)[:4]
|
||||
))
|
|
@ -0,0 +1,50 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils.timezone import now
|
||||
|
||||
from apiserver import settings
|
||||
from apiserver.api import models, utils, utils_stats
|
||||
|
||||
import time
|
||||
import os
|
||||
|
||||
if settings.DEBUG:
|
||||
STATIC_FOLDER = './data/static/'
|
||||
else:
|
||||
STATIC_FOLDER = '/opt/spaceport/apiserver/data/static/'
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Delete unused static assets'
|
||||
|
||||
def delete_old_static(self):
|
||||
members = models.Member.objects
|
||||
|
||||
good_files = []
|
||||
for static_field in ['photo_large', 'photo_medium', 'photo_small', 'member_forms']:
|
||||
good_files.extend(members.values_list(static_field, flat=True))
|
||||
|
||||
count = 0
|
||||
for f in os.listdir(STATIC_FOLDER):
|
||||
if len(f) != 40:
|
||||
self.stdout.write('Skipping: ' + f)
|
||||
continue
|
||||
|
||||
if f[-3:] not in ['jpg', 'pdf', 'png']:
|
||||
self.stdout.write('Skipping: ' + f)
|
||||
continue
|
||||
|
||||
if f not in good_files:
|
||||
os.remove(STATIC_FOLDER + f)
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write('{} - Deleting unused static files'.format(str(now())))
|
||||
start = time.time()
|
||||
|
||||
count = self.delete_old_static()
|
||||
self.stdout.write('Deleted {} files'.format(count))
|
||||
|
||||
self.stdout.write('Completed deletion in {} s'.format(
|
||||
str(time.time() - start)[:4]
|
||||
))
|
|
@ -0,0 +1,92 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils.timezone import now
|
||||
from django.core.cache import cache
|
||||
from django.db import transaction
|
||||
|
||||
from apiserver import secrets, settings
|
||||
from apiserver.api import models
|
||||
|
||||
from uuid import uuid4
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
if settings.DEBUG:
|
||||
API_FOLDER = '.'
|
||||
DATA_FOLDER = './data'
|
||||
BACKUP_FOLDER = './backups'
|
||||
else:
|
||||
API_FOLDER = '/opt/spaceport/apiserver'
|
||||
DATA_FOLDER = '/opt/spaceport/apiserver/data'
|
||||
BACKUP_FOLDER = '/opt/spaceport/apiserver/backups'
|
||||
|
||||
backup_id_string = lambda x: '{}\t{}\t{}'.format(
|
||||
str(now()), x['name'], x['backup_id'],
|
||||
)
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Generate backups.'
|
||||
|
||||
@transaction.atomic
|
||||
def generate_backups(self):
|
||||
backup_users = secrets.BACKUP_TOKENS.values()
|
||||
count = 0
|
||||
|
||||
for user in backup_users:
|
||||
models.MetaInfo.objects.update_or_create(
|
||||
id=0,
|
||||
defaults=dict(backup_id=backup_id_string(user)),
|
||||
)
|
||||
with open(DATA_FOLDER + '/backup_user.txt', 'w') as f:
|
||||
f.write(user['name'] + '\n')
|
||||
with open(DATA_FOLDER + '/static/123e4567-e89b-12d3-a456-426655440000.jpg', 'w') as f:
|
||||
f.write(backup_id_string(user) + '\n')
|
||||
|
||||
if user['name'] == 'null': # reset the canaries for data-at-rest
|
||||
continue
|
||||
|
||||
file_name = 'spaceport-backup-{}.tar.gz'.format(
|
||||
str(now().date()),
|
||||
)
|
||||
|
||||
path_name = str(uuid4())
|
||||
|
||||
full_name = '{}/{}/{}'.format(
|
||||
BACKUP_FOLDER,
|
||||
path_name,
|
||||
file_name,
|
||||
)
|
||||
|
||||
mkdir_command = [
|
||||
'mkdir',
|
||||
BACKUP_FOLDER + '/' + path_name,
|
||||
]
|
||||
|
||||
tar_command = [
|
||||
'tar',
|
||||
'-czf',
|
||||
full_name,
|
||||
'--directory',
|
||||
API_FOLDER,
|
||||
'data/',
|
||||
]
|
||||
|
||||
subprocess.run(mkdir_command, check=True)
|
||||
subprocess.run(tar_command, check=True)
|
||||
|
||||
cache.set(user['cache_key'], path_name + '/' + file_name)
|
||||
|
||||
self.stdout.write('Wrote backup for: ' + user['name'])
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write('{} - Generating backups'.format(str(now())))
|
||||
start = time.time()
|
||||
|
||||
count = self.generate_backups()
|
||||
self.stdout.write('Generated {} backups'.format(count))
|
||||
|
||||
self.stdout.write('Completed backups in {} s'.format(
|
||||
str(time.time() - start)[:4]
|
||||
))
|
|
@ -0,0 +1,56 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.contrib.auth.models import User
|
||||
from apiserver.api import models, utils, utils_stats
|
||||
|
||||
import time
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Link old sessions to instructors. Usage example: link_old_sessions "Tanner C" tanner.collin'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('old_instructor', type=str)
|
||||
parser.add_argument('username', type=str)
|
||||
|
||||
def link_old_sessions(self, old_instructor, username):
|
||||
sessions = models.Session.objects
|
||||
old_sessions = sessions.filter(old_instructor=old_instructor)
|
||||
|
||||
if not old_sessions.exists():
|
||||
self.stdout.write('Old instructor not found. Aborting.')
|
||||
return 0
|
||||
|
||||
user = User.objects.filter(username=username)
|
||||
|
||||
if not user.exists():
|
||||
self.stdout.write('Username not found. Aborting.')
|
||||
return 0
|
||||
|
||||
user = user.first()
|
||||
|
||||
for s in old_sessions:
|
||||
s.instructor = user
|
||||
s.save()
|
||||
self.stdout.write('Linked ' + s.course.name)
|
||||
|
||||
return old_sessions.count()
|
||||
|
||||
def handle(self, *args, **options):
|
||||
old_instructor = options['old_instructor']
|
||||
username = options['username']
|
||||
|
||||
self.stdout.write('Exact old instructor name: ' + old_instructor)
|
||||
self.stdout.write('Exact Spaceport username: ' + username)
|
||||
confirm = input('Is this correct? [y/N]: ')
|
||||
|
||||
if confirm != 'y':
|
||||
self.stdout.write('Aborting.')
|
||||
return
|
||||
|
||||
start = time.time()
|
||||
|
||||
count = self.link_old_sessions(old_instructor, username)
|
||||
self.stdout.write('Linked {} old sessions'.format(str(count)))
|
||||
|
||||
self.stdout.write('Completed in {} s'.format(
|
||||
str(time.time() - start)[:4]
|
||||
))
|
88
apiserver/apiserver/api/management/commands/run_daily.py
Normal file
88
apiserver/apiserver/api/management/commands/run_daily.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils.timezone import now
|
||||
from django.contrib.auth.models import User
|
||||
from apiserver.api import models, utils, utils_stats, utils_auth
|
||||
|
||||
import time
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Tasks to run on the portal daily. 7am UTC = 12am or 1am Calgary'
|
||||
|
||||
def tally_active_members(self):
|
||||
all_members = models.Member.objects
|
||||
active_members = all_members.filter(paused_date__isnull=True)
|
||||
|
||||
for member in active_members:
|
||||
utils.tally_membership_months(member)
|
||||
|
||||
return active_members.count()
|
||||
|
||||
def update_discourse_groups(self):
|
||||
add_to_group = {
|
||||
'directors_current': [],
|
||||
'protospace_members': [],
|
||||
'protospace_members_former': [],
|
||||
'protospace_instructors': [],
|
||||
}
|
||||
remove_from_group = {
|
||||
'directors_current': [],
|
||||
'protospace_members': [],
|
||||
'protospace_members_former': [],
|
||||
'protospace_members_uber': [],
|
||||
'protospace_instructors': [],
|
||||
}
|
||||
|
||||
for member in models.Member.objects.exclude(discourse_username__exact=''):
|
||||
username = member.discourse_username
|
||||
|
||||
# handle non-member vs. member
|
||||
if member.paused_date:
|
||||
add_to_group['protospace_members_former'].append(username)
|
||||
remove_from_group['directors_current'].append(username)
|
||||
remove_from_group['protospace_members'].append(username)
|
||||
remove_from_group['protospace_members_uber'].append(username)
|
||||
remove_from_group['protospace_instructors'].append(username)
|
||||
|
||||
continue
|
||||
else:
|
||||
add_to_group['protospace_members'].append(username)
|
||||
remove_from_group['protospace_members_former'].append(username)
|
||||
|
||||
# handle directors
|
||||
if member.is_director:
|
||||
add_to_group['directors_current'].append(username)
|
||||
else:
|
||||
remove_from_group['directors_current'].append(username)
|
||||
|
||||
# handle instructors
|
||||
if member.is_instructor:
|
||||
add_to_group['protospace_instructors'].append(username)
|
||||
else:
|
||||
remove_from_group['protospace_instructors'].append(username)
|
||||
|
||||
for group_name, usernames in add_to_group.items():
|
||||
utils_auth.add_discourse_group_members(group_name, usernames)
|
||||
|
||||
for group_name, usernames in remove_from_group.items():
|
||||
utils_auth.remove_discourse_group_members(group_name, usernames)
|
||||
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write('{} - Beginning daily tasks'.format(str(now())))
|
||||
start = time.time()
|
||||
|
||||
count = self.tally_active_members()
|
||||
self.stdout.write('Tallied {} active members'.format(count))
|
||||
|
||||
count = utils_stats.calc_retain_counts()
|
||||
self.stdout.write('Tallied {} retained members'.format(count))
|
||||
|
||||
self.update_discourse_groups()
|
||||
self.stdout.write('Updated Discourse group memberships')
|
||||
|
||||
utils_stats.changed_card()
|
||||
self.stdout.write('Updated card change time')
|
||||
|
||||
self.stdout.write('Completed tasks in {} s'.format(
|
||||
str(time.time() - start)[:4]
|
||||
))
|
179
apiserver/apiserver/api/management/commands/run_hourly.py
Normal file
179
apiserver/apiserver/api/management/commands/run_hourly.py
Normal file
|
@ -0,0 +1,179 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils.timezone import now
|
||||
from apiserver.api import models, utils, utils_stats, utils_email
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import time
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Tasks to run on the portal hourly.'
|
||||
|
||||
def generate_stats(self):
|
||||
utils_stats.calc_next_events()
|
||||
counts = utils_stats.calc_member_counts()
|
||||
signup_count = utils_stats.calc_signup_counts()
|
||||
|
||||
# do this hourly in case an admin causes a change
|
||||
models.StatsMemberCount.objects.update_or_create(
|
||||
date=utils.today_alberta_tz(),
|
||||
defaults=dict(
|
||||
member_count=counts['member_count'],
|
||||
green_count=counts['green_count'],
|
||||
six_month_plus_count=counts['six_month_plus_count'],
|
||||
vetted_count=counts['vetted_count'],
|
||||
subscriber_count=counts['subscriber_count'],
|
||||
),
|
||||
)
|
||||
|
||||
models.StatsSignupCount.objects.update_or_create(
|
||||
month=utils.today_alberta_tz().replace(day=1),
|
||||
defaults=dict(signup_count=signup_count),
|
||||
)
|
||||
|
||||
utils_stats.calc_card_scans()
|
||||
|
||||
utils.gen_search_strings()
|
||||
|
||||
def send_class_reminders(self):
|
||||
# sends reminders to instructors that they are teaching a class
|
||||
# within 6-7 hours from now
|
||||
count = 0
|
||||
|
||||
now = utils.now_alberta_tz()
|
||||
current_hour_start = now.replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
in_six_hours = current_hour_start + timedelta(hours=6)
|
||||
in_seven_hours = current_hour_start + timedelta(hours=7)
|
||||
|
||||
sessions = models.Session.objects.all()
|
||||
reminder_sessions = sessions.filter(
|
||||
datetime__gte=in_six_hours,
|
||||
datetime__lt=in_seven_hours,
|
||||
)
|
||||
|
||||
if reminder_sessions.count() == 0:
|
||||
self.stdout.write('No classes found within timeframe, returning')
|
||||
return 0
|
||||
|
||||
self.stdout.write('Found {} reminder sessions between {} and {} mountain time.'.format(
|
||||
reminder_sessions.count(),
|
||||
str(in_six_hours),
|
||||
str(in_seven_hours),
|
||||
))
|
||||
|
||||
for session in reminder_sessions:
|
||||
self.stdout.write('Session {} instructor {}:'.format(
|
||||
str(session),
|
||||
session.instructor.username,
|
||||
))
|
||||
|
||||
if session.is_cancelled:
|
||||
self.stdout.write(' Is cancelled, skipping.')
|
||||
continue
|
||||
|
||||
if session.course.id in [317, 273, 413]:
|
||||
self.stdout.write(' Is members meeting or cleanup, skipping.')
|
||||
continue
|
||||
|
||||
if 'Event' in session.course.tags or 'Outing' in session.course.tags:
|
||||
self.stdout.write(' Is partially outing or event, skipping.')
|
||||
continue
|
||||
|
||||
self.stdout.write(' Emailing {} {}:'.format(session.instructor.username, session.instructor.email))
|
||||
|
||||
utils.alert_tanner('Class reminder {} for {} {}'.format(
|
||||
str(session),
|
||||
session.instructor.username,
|
||||
session.instructor.email,
|
||||
))
|
||||
|
||||
self.stdout.write(' Sent class reminder email.')
|
||||
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
def send_attendance_reminders(self):
|
||||
# sends reminders to instructors to mark attendance for classes
|
||||
# that happened 6-7 hours ago if they haven't already
|
||||
count = 0
|
||||
|
||||
now = utils.now_alberta_tz()
|
||||
current_hour_start = now.replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
six_hours_ago = current_hour_start - timedelta(hours=6)
|
||||
seven_hours_ago = current_hour_start - timedelta(hours=7)
|
||||
|
||||
sessions = models.Session.objects.all()
|
||||
reminder_sessions = sessions.filter(
|
||||
datetime__gte=seven_hours_ago,
|
||||
datetime__lt=six_hours_ago,
|
||||
)
|
||||
|
||||
if reminder_sessions.count() == 0:
|
||||
self.stdout.write('No classes found within timeframe, returning')
|
||||
return 0
|
||||
|
||||
self.stdout.write('Found {} sessions between {} and {} mountain time.'.format(
|
||||
reminder_sessions.count(),
|
||||
str(seven_hours_ago),
|
||||
str(six_hours_ago),
|
||||
))
|
||||
|
||||
for session in reminder_sessions:
|
||||
self.stdout.write('Session {} instructor {}:'.format(
|
||||
str(session),
|
||||
session.instructor.username,
|
||||
))
|
||||
|
||||
if session.is_cancelled:
|
||||
self.stdout.write(' Is cancelled, skipping.')
|
||||
continue
|
||||
|
||||
if session.course.id in [317, 273, 413]:
|
||||
self.stdout.write(' Is members meeting or cleanup, skipping.')
|
||||
continue
|
||||
|
||||
if 'Event' in session.course.tags or 'Outing' in session.course.tags:
|
||||
self.stdout.write(' Is partially outing or event, skipping.')
|
||||
continue
|
||||
|
||||
if session.students.count() == 0:
|
||||
self.stdout.write(' Class is empty, skipping.')
|
||||
continue
|
||||
|
||||
if session.students.filter(attendance_status='Attended').count() > 0:
|
||||
self.stdout.write(' Instructor already marked attendance, skipping.')
|
||||
continue
|
||||
|
||||
self.stdout.write(' Emailing {} {}:'.format(session.instructor.username, session.instructor.email))
|
||||
|
||||
utils.alert_tanner('Attendance reminder {} for {} {}'.format(
|
||||
str(session),
|
||||
session.instructor.username,
|
||||
session.instructor.email,
|
||||
))
|
||||
|
||||
self.stdout.write(' Sent attendance reminder email.')
|
||||
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write('{} - Beginning hourly tasks'.format(str(now())))
|
||||
start = time.time()
|
||||
|
||||
self.generate_stats()
|
||||
self.stdout.write('Generated stats')
|
||||
|
||||
count = self.send_class_reminders()
|
||||
self.stdout.write('Sent {} class reminders'.format(count))
|
||||
|
||||
count = self.send_attendance_reminders()
|
||||
self.stdout.write('Sent {} attendance reminders'.format(count))
|
||||
|
||||
self.stdout.write('Completed tasks in {} s'.format(
|
||||
str(time.time() - start)[:4]
|
||||
))
|
22
apiserver/apiserver/api/management/commands/run_minutely.py
Normal file
22
apiserver/apiserver/api/management/commands/run_minutely.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils.timezone import now
|
||||
from apiserver.api import models, utils, utils_stats
|
||||
|
||||
import time
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Tasks to run on the portal minutely.'
|
||||
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write('{} - Beginning minutely tasks'.format(str(now())))
|
||||
start = time.time()
|
||||
|
||||
players = utils_stats.check_minecraft_server()
|
||||
self.stdout.write('Found Minecraft players: ' + str(players))
|
||||
users = utils_stats.check_mumble_server()
|
||||
self.stdout.write('Found Mumble users: ' + str(users))
|
||||
|
||||
self.stdout.write('Completed tasks in {} s'.format(
|
||||
str(time.time() - start)[:4]
|
||||
))
|
|
@ -1,55 +1,128 @@
|
|||
from datetime import date
|
||||
from datetime import date, datetime
|
||||
from django.db import models
|
||||
from django.contrib.auth.models import User
|
||||
from django.utils.timezone import now
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.timezone import now, pytz
|
||||
from simple_history.models import HistoricalRecords
|
||||
from simple_history import register
|
||||
|
||||
from . import old_models
|
||||
TIMEZONE_CALGARY = pytz.timezone('America/Edmonton')
|
||||
|
||||
register(User)
|
||||
|
||||
IGNORE = '+'
|
||||
|
||||
def today_alberta_tz():
|
||||
return datetime.now(TIMEZONE_CALGARY).date()
|
||||
|
||||
class Member(models.Model):
|
||||
user = models.OneToOneField(User, related_name='member', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
signup_helper = models.ForeignKey(User, related_name='signed_up', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
sponsorship = models.ManyToManyField('self', related_name='sponsored_by', symmetrical=False, blank=True)
|
||||
old_email = models.CharField(max_length=254, blank=True, null=True)
|
||||
photo_large = models.CharField(max_length=64, blank=True, null=True)
|
||||
photo_medium = models.CharField(max_length=64, blank=True, null=True)
|
||||
photo_small = models.CharField(max_length=64, blank=True, null=True)
|
||||
member_forms = models.CharField(max_length=64, blank=True, null=True)
|
||||
|
||||
set_details = models.BooleanField(default=False)
|
||||
first_name = models.CharField(max_length=32)
|
||||
last_name = models.CharField(max_length=32)
|
||||
preferred_name = models.CharField(max_length=32)
|
||||
phone = models.CharField(default='', max_length=32, null=True)
|
||||
emergency_contact_name = models.CharField(max_length=64, blank=True)
|
||||
emergency_contact_phone = models.CharField(max_length=32, blank=True)
|
||||
emergency_contact_name = models.CharField(default='', max_length=64, blank=True)
|
||||
emergency_contact_phone = models.CharField(default='', max_length=32, blank=True)
|
||||
birthdate = models.DateField(blank=True, null=True)
|
||||
is_minor = models.BooleanField(default=False)
|
||||
guardian_name = models.CharField(max_length=32, blank=True, null=True)
|
||||
street_address = models.CharField(default='', max_length=32, null=True)
|
||||
city = models.CharField(default='Calgary, AB', max_length=32)
|
||||
postal_code = models.CharField(max_length=16, null=True)
|
||||
public_bio = models.CharField(max_length=512, blank=True)
|
||||
private_notes = models.CharField(max_length=512, blank=True)
|
||||
|
||||
is_director = models.BooleanField(default=False)
|
||||
is_staff = models.BooleanField(default=False)
|
||||
is_instructor = models.BooleanField(default=False)
|
||||
status = models.CharField(max_length=32, blank=True, null=True)
|
||||
expire_date = models.DateField(default=date.today, null=True)
|
||||
current_start_date = models.DateField(default=date.today, null=True)
|
||||
application_date = models.DateField(default=date.today, null=True)
|
||||
expire_date = models.DateField(default=today_alberta_tz, null=True)
|
||||
current_start_date = models.DateField(default=today_alberta_tz, null=True)
|
||||
application_date = models.DateField(default=today_alberta_tz, null=True)
|
||||
vetted_date = models.DateField(blank=True, null=True)
|
||||
orientation_date = models.DateField(blank=True, null=True, default=None)
|
||||
lathe_cert_date = models.DateField(blank=True, null=True, default=None)
|
||||
mill_cert_date = models.DateField(blank=True, null=True, default=None)
|
||||
wood_cert_date = models.DateField(blank=True, null=True, default=None)
|
||||
wood2_cert_date = models.DateField(blank=True, null=True, default=None)
|
||||
tormach_cnc_cert_date = models.DateField(blank=True, null=True, default=None)
|
||||
precix_cnc_cert_date = models.DateField(blank=True, null=True, default=None)
|
||||
rabbit_cert_date = models.DateField(blank=True, null=True, default=None)
|
||||
trotec_cert_date = models.DateField(blank=True, null=True, default=None)
|
||||
paused_date = models.DateField(blank=True, null=True)
|
||||
monthly_fees = models.IntegerField(default=55, blank=True, null=True)
|
||||
is_allowed_entry = models.BooleanField(default=True)
|
||||
discourse_username = models.CharField(default='', max_length=40, blank=True, null=True)
|
||||
mediawiki_username = models.CharField(default='', max_length=40, blank=True, null=True)
|
||||
allow_last_scanned = models.BooleanField(default=True)
|
||||
|
||||
history = HistoricalRecords(excluded_fields=['member_forms'])
|
||||
|
||||
list_display = ['user', 'preferred_name', 'last_name', 'status']
|
||||
search_fields = ['user__username', 'preferred_name', 'last_name', 'status']
|
||||
def __str__(self):
|
||||
return getattr(self.user, 'username', 'None')
|
||||
|
||||
class Transaction(models.Model):
|
||||
user = models.ForeignKey(User, related_name='transactions', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
recorder = models.ForeignKey(User, related_name='+', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
recorder = models.ForeignKey(User, related_name=IGNORE, blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
member_id = models.IntegerField(blank=True, null=True)
|
||||
date = models.DateField(default=date.today)
|
||||
date = models.DateField(default=today_alberta_tz)
|
||||
amount = models.DecimalField(max_digits=7, decimal_places=2)
|
||||
reference_number = models.CharField(max_length=32, blank=True, null=True)
|
||||
reference_number = models.CharField(max_length=64, blank=True, null=True)
|
||||
memo = models.TextField(blank=True, null=True)
|
||||
number_of_membership_months = models.TextField(blank=True, null=True)
|
||||
number_of_membership_months = models.IntegerField(blank=True, null=True)
|
||||
payment_method = models.TextField(blank=True, null=True)
|
||||
category = models.TextField(blank=True, null=True)
|
||||
account_type = models.TextField(blank=True, null=True)
|
||||
info_source = models.TextField(blank=True, null=True)
|
||||
paypal_txn_id = models.CharField(max_length=17, blank=True, null=True, unique=True)
|
||||
paypal_txn_type = models.CharField(max_length=64, blank=True, null=True)
|
||||
paypal_payer_id = models.CharField(max_length=13, blank=True, null=True)
|
||||
protocoin = models.DecimalField(max_digits=7, decimal_places=2, default=0)
|
||||
|
||||
report_type = models.TextField(blank=True, null=True)
|
||||
report_memo = models.TextField(blank=True, null=True)
|
||||
|
||||
history = HistoricalRecords()
|
||||
|
||||
list_display = ['date', 'user', 'amount', 'protocoin', 'account_type', 'category']
|
||||
search_fields = ['date', 'user__username', 'account_type', 'category']
|
||||
def __str__(self):
|
||||
return '%s tx %s' % (user.username, date)
|
||||
|
||||
class PayPalHint(models.Model):
|
||||
user = models.ForeignKey(User, related_name='paypal_hints', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
account = models.CharField(unique=True, max_length=13)
|
||||
member_id = models.IntegerField(null=True)
|
||||
|
||||
history = HistoricalRecords()
|
||||
|
||||
list_display = ['account', 'user']
|
||||
search_fields = ['account', 'user__username']
|
||||
def __str__(self):
|
||||
return self.account
|
||||
|
||||
class IPN(models.Model):
|
||||
datetime = models.DateTimeField(auto_now_add=True)
|
||||
data = models.TextField()
|
||||
status = models.CharField(max_length=32)
|
||||
|
||||
history = HistoricalRecords()
|
||||
|
||||
list_display = ['datetime', 'status']
|
||||
search_fields = ['datetime', 'status']
|
||||
def __str__(self):
|
||||
return self.datetime
|
||||
|
||||
class Card(models.Model):
|
||||
user = models.ForeignKey(User, related_name='cards', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
@ -57,13 +130,29 @@ class Card(models.Model):
|
|||
member_id = models.IntegerField(blank=True, null=True)
|
||||
card_number = models.CharField(unique=True, max_length=16, blank=True, null=True)
|
||||
notes = models.TextField(blank=True, null=True)
|
||||
last_seen_at = models.DateField(default=date.today, blank=True, null=True)
|
||||
last_seen_at = models.DateField(blank=True, null=True)
|
||||
last_seen = models.DateTimeField(blank=True, null=True)
|
||||
active_status = models.CharField(max_length=32, blank=True, null=True)
|
||||
|
||||
history = HistoricalRecords(excluded_fields=['last_seen_at', 'last_seen'])
|
||||
|
||||
list_display = ['card_number', 'user', 'last_seen']
|
||||
search_fields = ['card_number', 'user__username', 'last_seen']
|
||||
def __str__(self):
|
||||
return self.card_number
|
||||
|
||||
class Course(models.Model):
|
||||
name = models.TextField(blank=True, null=True)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
is_old = models.BooleanField(default=False)
|
||||
tags = models.CharField(max_length=128, blank=True)
|
||||
|
||||
history = HistoricalRecords()
|
||||
|
||||
list_display = ['name', 'id']
|
||||
search_fields = ['name', 'id']
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Session(models.Model):
|
||||
instructor = models.ForeignKey(User, related_name='teaching', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
@ -75,11 +164,172 @@ class Session(models.Model):
|
|||
cost = models.DecimalField(max_digits=5, decimal_places=2)
|
||||
max_students = models.IntegerField(blank=True, null=True)
|
||||
|
||||
history = HistoricalRecords()
|
||||
|
||||
list_display = ['datetime', 'course', 'instructor']
|
||||
search_fields = ['datetime', 'course__name', 'instructor__username']
|
||||
def __str__(self):
|
||||
return '%s @ %s' % (self.course.name, self.datetime.astimezone(TIMEZONE_CALGARY).strftime('%Y-%m-%d %-I:%M %p'))
|
||||
|
||||
class Training(models.Model):
|
||||
user = models.ForeignKey(User, related_name='training', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
session = models.ForeignKey(Session, related_name='students', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
member_id = models.IntegerField(blank=True, null=True)
|
||||
attendance_status = models.TextField(blank=True, null=True)
|
||||
sign_up_date = models.DateField(default=date.today, blank=True, null=True)
|
||||
sign_up_date = models.DateField(default=today_alberta_tz, blank=True, null=True)
|
||||
paid_date = models.DateField(blank=True, null=True)
|
||||
|
||||
history = HistoricalRecords()
|
||||
|
||||
list_display = ['session', 'user']
|
||||
search_fields = ['session__course__name', 'user__username']
|
||||
def __str__(self):
|
||||
return '%s taking %s @ %s' % (self.user, self.session.course.name, self.session.datetime)
|
||||
|
||||
class Interest(models.Model):
|
||||
user = models.ForeignKey(User, related_name='interests', null=True, on_delete=models.SET_NULL)
|
||||
course = models.ForeignKey(Course, related_name='interests', null=True, on_delete=models.SET_NULL)
|
||||
|
||||
satisfied_by = models.ForeignKey(Session, related_name='satisfies', null=True, on_delete=models.SET_NULL)
|
||||
|
||||
list_display = ['user', 'course', 'satisfied_by']
|
||||
search_fields = ['user__username', 'course__name']
|
||||
def __str__(self):
|
||||
return '%s interested in %s' % (self.user, self.course)
|
||||
|
||||
|
||||
class MetaInfo(models.Model):
|
||||
backup_id = models.TextField()
|
||||
|
||||
class StatsMemberCount(models.Model):
|
||||
date = models.DateField(default=today_alberta_tz)
|
||||
member_count = models.IntegerField()
|
||||
green_count = models.IntegerField()
|
||||
six_month_plus_count = models.IntegerField()
|
||||
vetted_count = models.IntegerField()
|
||||
subscriber_count = models.IntegerField()
|
||||
|
||||
list_display = ['date', 'member_count', 'green_count', 'six_month_plus_count', 'vetted_count', 'subscriber_count']
|
||||
search_fields = ['date', 'member_count', 'green_count', 'six_month_plus_count', 'vetted_count', 'subscriber_count']
|
||||
|
||||
class StatsSignupCount(models.Model):
|
||||
month = models.DateField()
|
||||
signup_count = models.IntegerField()
|
||||
retain_count = models.IntegerField(default=0)
|
||||
vetted_count = models.IntegerField(default=0)
|
||||
|
||||
list_display = ['month', 'signup_count', 'retain_count', 'vetted_count']
|
||||
search_fields = ['month', 'signup_count', 'retain_count', 'vetted_count']
|
||||
|
||||
class StatsSpaceActivity(models.Model):
|
||||
date = models.DateField(default=today_alberta_tz)
|
||||
card_scans = models.IntegerField()
|
||||
|
||||
list_display = ['date', 'card_scans']
|
||||
search_fields = ['date', 'card_scans']
|
||||
|
||||
class Usage(models.Model):
|
||||
user = models.ForeignKey(User, related_name='usages', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
username = models.CharField(max_length=64, blank=True) # incase of LDAP-Spaceport mismatch
|
||||
|
||||
device = models.CharField(max_length=64)
|
||||
started_at = models.DateTimeField(auto_now_add=True)
|
||||
finished_at = models.DateTimeField(null=True)
|
||||
deleted_at = models.DateTimeField(null=True, blank=True)
|
||||
|
||||
num_seconds = models.IntegerField()
|
||||
num_reports = models.IntegerField()
|
||||
memo = models.TextField(blank=True)
|
||||
should_bill = models.BooleanField(default=True)
|
||||
|
||||
history = HistoricalRecords(excluded_fields=['num_reports'])
|
||||
|
||||
list_display = ['started_at', 'finished_at', 'user', 'num_seconds', 'should_bill']
|
||||
search_fields = ['started_at', 'finished_at', 'user__username']
|
||||
def __str__(self):
|
||||
return str(self.started_at)
|
||||
|
||||
class PinballScore(models.Model):
|
||||
user = models.ForeignKey(User, related_name='scores', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
started_at = models.DateTimeField(auto_now_add=True)
|
||||
finished_at = models.DateTimeField(null=True)
|
||||
|
||||
game_id = models.IntegerField()
|
||||
player = models.IntegerField()
|
||||
score = models.IntegerField()
|
||||
|
||||
# no history
|
||||
|
||||
list_display = ['started_at', 'game_id', 'player', 'score', 'user']
|
||||
search_fields = ['started_at', 'game_id', 'player', 'score', 'user__username']
|
||||
def __str__(self):
|
||||
return str(self.started_at)
|
||||
|
||||
class Hosting(models.Model):
|
||||
user = models.ForeignKey(User, related_name='hosting', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
started_at = models.DateTimeField(auto_now_add=True)
|
||||
finished_at = models.DateTimeField()
|
||||
hours = models.DecimalField(max_digits=5, decimal_places=2)
|
||||
|
||||
# no history
|
||||
|
||||
list_display = ['started_at', 'hours', 'finished_at', 'user']
|
||||
search_fields = ['started_at', 'hours', 'finished_at', 'user__username']
|
||||
def __str__(self):
|
||||
return str(self.started_at)
|
||||
|
||||
class StorageSpace(models.Model):
|
||||
user = models.ForeignKey(User, related_name='storage', blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
shelf_id = models.TextField(unique=True)
|
||||
location = models.TextField(choices=[
|
||||
('member_shelves', 'Member Shelves'),
|
||||
('lockers', 'Lockers'),
|
||||
('large_project_storage', 'Large Project Storage'),
|
||||
])
|
||||
memo = models.TextField(blank=True)
|
||||
|
||||
history = HistoricalRecords()
|
||||
|
||||
list_display = ['shelf_id', 'location', 'user', 'id']
|
||||
search_fields = ['shelf_id', 'location', 'user__username', 'id']
|
||||
def __str__(self):
|
||||
return self.shelf_id
|
||||
|
||||
|
||||
class HistoryIndex(models.Model):
|
||||
content_type = models.ForeignKey(ContentType, null=True, on_delete=models.SET_NULL)
|
||||
object_id = models.PositiveIntegerField()
|
||||
history = GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
owner_id = models.PositiveIntegerField()
|
||||
owner_name = models.TextField()
|
||||
object_name = models.TextField()
|
||||
history_user = models.ForeignKey(User, null=True, on_delete=models.SET_NULL)
|
||||
history_date = models.DateTimeField()
|
||||
history_type = models.TextField()
|
||||
revert_url = models.TextField()
|
||||
|
||||
is_system = models.BooleanField()
|
||||
is_admin = models.BooleanField()
|
||||
|
||||
list_display = ['history_date', 'history_user', 'history_type', 'owner_name', 'object_name']
|
||||
search_fields = ['history_date', 'history_user__username', 'history_type', 'owner_name', 'object_name']
|
||||
def __str__(self):
|
||||
return '%s changed %s\'s %s' % (self.history_user, self.owner_name, self.object_name)
|
||||
|
||||
class HistoryChange(models.Model):
|
||||
index = models.ForeignKey(HistoryIndex, related_name='changes', null=True, on_delete=models.SET_NULL)
|
||||
|
||||
field = models.TextField()
|
||||
old = models.TextField()
|
||||
new = models.TextField()
|
||||
|
||||
list_display = ['field', 'old', 'new', 'index']
|
||||
search_fields = ['field', 'old', 'new', 'index__history_user__username']
|
||||
def __str__(self):
|
||||
return self.field
|
65
apiserver/apiserver/api/permissions.py
Normal file
65
apiserver/apiserver/api/permissions.py
Normal file
|
@ -0,0 +1,65 @@
|
|||
from rest_framework.permissions import BasePermission, IsAuthenticated, SAFE_METHODS
|
||||
|
||||
class AllowMetadata(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
return request.method in ['OPTIONS', 'HEAD']
|
||||
|
||||
def is_admin_director(user):
|
||||
if not user:
|
||||
return False
|
||||
|
||||
if user.is_staff:
|
||||
return True
|
||||
|
||||
if hasattr(user, 'member'):
|
||||
if user.member.is_director:
|
||||
return True
|
||||
if user.member.is_staff:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
class IsObjOwnerOrAdmin(BasePermission):
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return bool(request.user
|
||||
and (obj.user == request.user
|
||||
or is_admin_director(request.user)
|
||||
)
|
||||
)
|
||||
|
||||
class IsSessionInstructorOrAdmin(BasePermission):
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return bool(request.user
|
||||
and (obj.session.instructor == request.user
|
||||
or is_admin_director(request.user)
|
||||
)
|
||||
)
|
||||
|
||||
class ReadOnly(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
return bool(request.method in SAFE_METHODS)
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return bool(request.method in SAFE_METHODS)
|
||||
|
||||
class IsAdmin(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
return bool(
|
||||
request.user
|
||||
and is_admin_director(request.user)
|
||||
)
|
||||
|
||||
class IsAdminOrReadOnly(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
return bool(
|
||||
request.method in SAFE_METHODS
|
||||
or request.user
|
||||
and is_admin_director(request.user)
|
||||
)
|
||||
|
||||
class IsInstructorOrReadOnly(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
return bool(
|
||||
request.method in SAFE_METHODS
|
||||
or request.user
|
||||
and request.user.member.is_instructor
|
||||
)
|
File diff suppressed because it is too large
Load Diff
98
apiserver/apiserver/api/signals.py
Normal file
98
apiserver/apiserver/api/signals.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from django.dispatch import receiver
|
||||
from simple_history.signals import (
|
||||
pre_create_historical_record,
|
||||
post_create_historical_record
|
||||
)
|
||||
|
||||
from . import models
|
||||
from .permissions import is_admin_director
|
||||
|
||||
def get_object_owner(obj):
|
||||
full_name = lambda member: member.preferred_name + ' ' + member.last_name
|
||||
|
||||
if obj.__class__.__name__ == 'Member':
|
||||
return full_name(obj), obj.id
|
||||
|
||||
if getattr(obj, 'user', False):
|
||||
return full_name(obj.user.member), obj.user.member.id
|
||||
|
||||
if getattr(obj, 'instructor', False):
|
||||
return full_name(obj.instructor.member), obj.instructor.member.id
|
||||
|
||||
return 'Protospace', 0
|
||||
|
||||
@receiver(post_create_historical_record, dispatch_uid='create_hist')
|
||||
def post_create_historical_record_callback(
|
||||
sender,
|
||||
instance,
|
||||
history_instance,
|
||||
history_change_reason,
|
||||
history_user,
|
||||
using,
|
||||
**kwargs):
|
||||
|
||||
try:
|
||||
history_type = history_instance.get_history_type_display()
|
||||
object_name = instance.__class__.__name__
|
||||
|
||||
if object_name in ['User', 'IPN']: return
|
||||
|
||||
if history_type == 'Changed':
|
||||
changes = history_instance.diff_against(history_instance.prev_record).changes
|
||||
else:
|
||||
changes = []
|
||||
|
||||
# it's possible for changes to be empty if model saved with no diff
|
||||
if len(changes) or history_type in ['Created', 'Deleted']:
|
||||
owner = get_object_owner(instance)
|
||||
|
||||
index = models.HistoryIndex.objects.create(
|
||||
history=history_instance,
|
||||
owner_id=owner[1],
|
||||
owner_name=owner[0],
|
||||
object_name=object_name,
|
||||
history_user=history_user,
|
||||
history_date=history_instance.history_date,
|
||||
history_type=history_type,
|
||||
revert_url=history_instance.revert_url(),
|
||||
is_system=bool(history_user == None),
|
||||
is_admin=is_admin_director(history_user),
|
||||
)
|
||||
|
||||
for num, change in enumerate(changes):
|
||||
change_old = str(change.old)
|
||||
change_new = str(change.new)
|
||||
|
||||
if len(change_old) > 200:
|
||||
change_old = change_old[:200] + '... [truncated]'
|
||||
if len(change_new) > 200:
|
||||
change_new = change_new[:200] + '... [truncated]'
|
||||
|
||||
models.HistoryChange.objects.create(
|
||||
index=index,
|
||||
field=change.field,
|
||||
old=change_old,
|
||||
new=change_new,
|
||||
)
|
||||
|
||||
logger.info('History - {} changed {}\'s {} {}/{}: {} "{}" --> "{}"'.format(
|
||||
history_user or 'System',
|
||||
owner[0],
|
||||
object_name,
|
||||
num+1,
|
||||
len(changes),
|
||||
change.field,
|
||||
change_old,
|
||||
change_new,
|
||||
))
|
||||
|
||||
except BaseException as e:
|
||||
logger.error('History Signal - {} - {}'.format(e.__class__.__name__, e))
|
||||
logger.info(str(sender))
|
||||
logger.info(str(instance))
|
||||
logger.info(str(history_instance))
|
||||
logger.info(str(history_change_reason))
|
||||
logger.info(str(history_user))
|
63
apiserver/apiserver/api/test_api.py
Normal file
63
apiserver/apiserver/api/test_api.py
Normal file
|
@ -0,0 +1,63 @@
|
|||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
from apiserver.api.models import Member, User
|
||||
import json
|
||||
from parameterized import parameterized
|
||||
|
||||
data = {
|
||||
"username": "registrationtc",
|
||||
"email": "unittest@email.com",
|
||||
"password1": "unittest",
|
||||
"password2": "unittest",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
|
||||
# need to fake this for updating progress
|
||||
"request_id": "lol"
|
||||
}
|
||||
|
||||
|
||||
class RegistrationTests(APITestCase):
|
||||
def setUp(self):
|
||||
self.url = reverse('rest_name_register')
|
||||
# TODO: expose data to be used for E2E testing from a webclient
|
||||
self.data = data
|
||||
# TODO: match with config
|
||||
self.allowed_ip = '24.66.110.96'
|
||||
|
||||
def test_success(self):
|
||||
"""Ensure we can create a new account object."""
|
||||
response = self.client.post(
|
||||
self.url,
|
||||
self.data,
|
||||
format='json',
|
||||
HTTP_X_REAL_IP=self.allowed_ip
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
user = User.objects.get(username=self.data['username'])
|
||||
assert user is not None
|
||||
assert Member.objects.get(user=user) is not None
|
||||
|
||||
def test_allowed_ip_wrong(self):
|
||||
"""Ensure creation only allowed when HTTP_X_REAL_IP header matched IP in whitelist"""
|
||||
response = self.client.post(
|
||||
self.url,
|
||||
self.data,
|
||||
format='json',
|
||||
HTTP_X_REAL_IP="0.0.0.0"
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@parameterized.expand([(f"{key} is missing", key, status.HTTP_400_BAD_REQUEST) for key in data.keys() if key is not 'request_id'])
|
||||
def test_malformed_data(self, name, inp, expected):
|
||||
"""Delete specific properties from data and confirm it is not accepted by API"""
|
||||
copy = self.data.copy()
|
||||
del copy[inp]
|
||||
response = self.client.post(
|
||||
self.url,
|
||||
copy,
|
||||
format='json',
|
||||
HTTP_X_REAL_IP=self.allowed_ip
|
||||
)
|
||||
self.assertEqual(response.status_code, expected)
|
|
@ -1,3 +1,363 @@
|
|||
from django.test import TestCase
|
||||
import datetime
|
||||
from dateutil import relativedelta
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
# Create your tests here.
|
||||
from apiserver.api import utils, utils_paypal, models
|
||||
|
||||
class TestMonthsSpanned(TestCase):
|
||||
def test_num_months_spanned_one_month(self):
|
||||
date2 = datetime.date(2020, 1, 10)
|
||||
date1 = datetime.date(2020, 2, 10)
|
||||
|
||||
spanned = utils.num_months_spanned(date1, date2)
|
||||
|
||||
self.assertEqual(spanned, 1)
|
||||
|
||||
def test_num_months_spanned_one_week(self):
|
||||
date1 = datetime.date(2020, 2, 5)
|
||||
date2 = datetime.date(2020, 1, 28)
|
||||
|
||||
spanned = utils.num_months_spanned(date1, date2)
|
||||
|
||||
self.assertEqual(spanned, 1)
|
||||
|
||||
def test_num_months_spanned_two_days(self):
|
||||
date1 = datetime.date(2020, 2, 1)
|
||||
date2 = datetime.date(2020, 1, 31)
|
||||
|
||||
spanned = utils.num_months_spanned(date1, date2)
|
||||
|
||||
self.assertEqual(spanned, 1)
|
||||
|
||||
def test_num_months_spanned_two_years(self):
|
||||
date1 = datetime.date(2022, 1, 18)
|
||||
date2 = datetime.date(2020, 1, 18)
|
||||
|
||||
spanned = utils.num_months_spanned(date1, date2)
|
||||
|
||||
self.assertEqual(spanned, 24)
|
||||
|
||||
def test_num_months_spanned_same_month(self):
|
||||
date1 = datetime.date(2020, 1, 31)
|
||||
date2 = datetime.date(2020, 1, 1)
|
||||
|
||||
spanned = utils.num_months_spanned(date1, date2)
|
||||
|
||||
self.assertEqual(spanned, 0)
|
||||
|
||||
|
||||
class TestMonthsDifference(TestCase):
|
||||
def test_num_months_difference_one_month(self):
|
||||
date2 = datetime.date(2020, 1, 10)
|
||||
date1 = datetime.date(2020, 2, 10)
|
||||
|
||||
difference = utils.num_months_difference(date1, date2)
|
||||
|
||||
self.assertEqual(difference, 1)
|
||||
|
||||
def test_num_months_difference_one_week(self):
|
||||
date1 = datetime.date(2020, 2, 5)
|
||||
date2 = datetime.date(2020, 1, 28)
|
||||
|
||||
difference = utils.num_months_difference(date1, date2)
|
||||
|
||||
self.assertEqual(difference, 0)
|
||||
|
||||
def test_num_months_difference_two_days(self):
|
||||
date1 = datetime.date(2020, 2, 1)
|
||||
date2 = datetime.date(2020, 1, 31)
|
||||
|
||||
difference = utils.num_months_difference(date1, date2)
|
||||
|
||||
self.assertEqual(difference, 0)
|
||||
|
||||
def test_num_months_difference_two_years(self):
|
||||
date1 = datetime.date(2022, 1, 18)
|
||||
date2 = datetime.date(2020, 1, 18)
|
||||
|
||||
difference = utils.num_months_difference(date1, date2)
|
||||
|
||||
self.assertEqual(difference, 24)
|
||||
|
||||
def test_num_months_difference_same_month(self):
|
||||
date1 = datetime.date(2020, 1, 31)
|
||||
date2 = datetime.date(2020, 1, 1)
|
||||
|
||||
difference = utils.num_months_difference(date1, date2)
|
||||
|
||||
self.assertEqual(difference, 0)
|
||||
|
||||
|
||||
class TestAddMonths(TestCase):
|
||||
def test_add_months_one_month(self):
|
||||
date = datetime.date(2020, 1, 18)
|
||||
num_months = 1
|
||||
|
||||
new_date = utils.add_months(date, num_months)
|
||||
|
||||
self.assertEqual(new_date, datetime.date(2020, 2, 18))
|
||||
|
||||
def test_add_months_february(self):
|
||||
date = datetime.date(2020, 1, 31)
|
||||
num_months = 1
|
||||
|
||||
new_date = utils.add_months(date, num_months)
|
||||
|
||||
self.assertEqual(new_date, datetime.date(2020, 2, 29))
|
||||
|
||||
def test_add_months_february_leap(self):
|
||||
date = datetime.date(2020, 2, 29)
|
||||
num_months = 12
|
||||
|
||||
new_date = utils.add_months(date, num_months)
|
||||
|
||||
self.assertEqual(new_date, datetime.date(2021, 2, 28))
|
||||
|
||||
def test_add_months_hundred_years(self):
|
||||
date = datetime.date(2020, 1, 31)
|
||||
num_months = 1200
|
||||
|
||||
new_date = utils.add_months(date, num_months)
|
||||
|
||||
self.assertEqual(new_date, datetime.date(2120, 1, 31))
|
||||
|
||||
|
||||
class TestCalcStatus(TestCase):
|
||||
def test_calc_member_status_14_days(self):
|
||||
expire_date = utils.today_alberta_tz() + datetime.timedelta(days=14)
|
||||
|
||||
status = utils.calc_member_status(expire_date)
|
||||
|
||||
self.assertEqual(status, 'Current')
|
||||
|
||||
def test_calc_member_status_1_month(self):
|
||||
today = datetime.date(2019, 2, 10)
|
||||
expire_date = datetime.date(2019, 3, 10)
|
||||
|
||||
status = utils.calc_member_status(expire_date, today)
|
||||
|
||||
self.assertEqual(status, 'Current')
|
||||
|
||||
def test_calc_member_status_90_days(self):
|
||||
expire_date = utils.today_alberta_tz() + datetime.timedelta(days=90)
|
||||
|
||||
status = utils.calc_member_status(expire_date)
|
||||
|
||||
self.assertEqual(status, 'Prepaid')
|
||||
|
||||
def test_calc_member_status_tomorrow(self):
|
||||
expire_date = utils.today_alberta_tz() + datetime.timedelta(days=1)
|
||||
|
||||
status = utils.calc_member_status(expire_date)
|
||||
|
||||
self.assertEqual(status, 'Current')
|
||||
|
||||
def test_calc_member_status_today(self):
|
||||
expire_date = utils.today_alberta_tz()
|
||||
|
||||
status = utils.calc_member_status(expire_date)
|
||||
|
||||
self.assertEqual(status, 'Due')
|
||||
|
||||
def test_calc_member_status_yesterday(self):
|
||||
expire_date = utils.today_alberta_tz() - datetime.timedelta(days=1)
|
||||
|
||||
status = utils.calc_member_status(expire_date)
|
||||
|
||||
self.assertEqual(status, 'Due')
|
||||
|
||||
def test_calc_member_status_1_month_ago(self):
|
||||
today = datetime.date(2019, 4, 10)
|
||||
expire_date = datetime.date(2019, 3, 10)
|
||||
|
||||
status = utils.calc_member_status(expire_date, today)
|
||||
|
||||
self.assertEqual(status, 'Overdue')
|
||||
|
||||
def test_calc_member_status_85_days_ago(self):
|
||||
expire_date = utils.today_alberta_tz() - datetime.timedelta(days=85)
|
||||
|
||||
status = utils.calc_member_status(expire_date)
|
||||
|
||||
self.assertEqual(status, 'Overdue')
|
||||
|
||||
def test_calc_member_status_95_days_ago(self):
|
||||
expire_date = utils.today_alberta_tz() - datetime.timedelta(days=95)
|
||||
|
||||
status = utils.calc_member_status(expire_date)
|
||||
|
||||
self.assertEqual(status, 'Former Member')
|
||||
|
||||
|
||||
class TestTallyMembership(TestCase):
|
||||
def get_user(self):
|
||||
testing_user, _ = models.User.objects.get_or_create(
|
||||
first_name='unittest',
|
||||
username='unittest',
|
||||
last_name='tester',
|
||||
email='unittest@unittest.com'
|
||||
)
|
||||
return testing_user
|
||||
|
||||
def get_member_clear_transactions(self):
|
||||
testing_user = self.get_user()
|
||||
|
||||
member, _ = models.Member.objects.get_or_create(
|
||||
first_name=testing_user.first_name,
|
||||
preferred_name=testing_user.first_name,
|
||||
last_name=testing_user.last_name,
|
||||
user=testing_user,
|
||||
paused_date=None,
|
||||
expire_date=None
|
||||
)
|
||||
|
||||
return member
|
||||
|
||||
def test_tally_membership_months_prepaid(self):
|
||||
member = self.get_member_clear_transactions()
|
||||
test_num_months = 8
|
||||
start_date = utils.today_alberta_tz() - relativedelta.relativedelta(months=6, days=14)
|
||||
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
|
||||
|
||||
member.current_start_date = start_date
|
||||
member.save()
|
||||
|
||||
for i in range(test_num_months):
|
||||
models.Transaction.objects.create(
|
||||
amount=0,
|
||||
member_id=member.id,
|
||||
user=member.user,
|
||||
number_of_membership_months=1,
|
||||
)
|
||||
|
||||
result = utils.tally_membership_months(member)
|
||||
|
||||
self.assertEqual(member.expire_date, end_date)
|
||||
self.assertEqual(member.status, 'Prepaid')
|
||||
|
||||
def test_tally_membership_months_current(self):
|
||||
member = self.get_member_clear_transactions()
|
||||
test_num_months = 7
|
||||
start_date = utils.today_alberta_tz() - relativedelta.relativedelta(months=6, days=14)
|
||||
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
|
||||
|
||||
member.current_start_date = start_date
|
||||
member.save()
|
||||
|
||||
for i in range(test_num_months):
|
||||
models.Transaction.objects.create(
|
||||
amount=0,
|
||||
member_id=member.id,
|
||||
user=member.user,
|
||||
number_of_membership_months=1,
|
||||
)
|
||||
|
||||
result = utils.tally_membership_months(member)
|
||||
|
||||
self.assertEqual(member.expire_date, end_date)
|
||||
self.assertEqual(member.status, 'Current')
|
||||
|
||||
def test_tally_membership_months_due(self):
|
||||
member = self.get_member_clear_transactions()
|
||||
test_num_months = 6
|
||||
start_date = utils.today_alberta_tz() - relativedelta.relativedelta(months=6, days=14)
|
||||
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
|
||||
|
||||
member.current_start_date = start_date
|
||||
member.save()
|
||||
|
||||
for i in range(test_num_months):
|
||||
models.Transaction.objects.create(
|
||||
amount=0,
|
||||
member_id=member.id,
|
||||
user=member.user,
|
||||
number_of_membership_months=1,
|
||||
)
|
||||
|
||||
result = utils.tally_membership_months(member)
|
||||
|
||||
self.assertEqual(member.expire_date, end_date)
|
||||
self.assertEqual(member.status, 'Due')
|
||||
|
||||
def test_tally_membership_months_overdue(self):
|
||||
member = self.get_member_clear_transactions()
|
||||
test_num_months = 5
|
||||
start_date = utils.today_alberta_tz() - relativedelta.relativedelta(months=6, days=14)
|
||||
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
|
||||
|
||||
member.current_start_date = start_date
|
||||
member.save()
|
||||
|
||||
for i in range(test_num_months):
|
||||
models.Transaction.objects.create(
|
||||
amount=0,
|
||||
member_id=member.id,
|
||||
user=member.user,
|
||||
number_of_membership_months=1,
|
||||
)
|
||||
|
||||
result = utils.tally_membership_months(member)
|
||||
|
||||
self.assertEqual(member.expire_date, end_date)
|
||||
self.assertEqual(member.status, 'Overdue')
|
||||
|
||||
def test_tally_membership_months_overdue_pause(self):
|
||||
member = self.get_member_clear_transactions()
|
||||
test_num_months = 1
|
||||
start_date = utils.today_alberta_tz() - relativedelta.relativedelta(months=6, days=14)
|
||||
end_date = start_date + relativedelta.relativedelta(months=test_num_months)
|
||||
|
||||
member.current_start_date = start_date
|
||||
member.save()
|
||||
|
||||
for i in range(test_num_months):
|
||||
models.Transaction.objects.create(
|
||||
amount=0,
|
||||
member_id=member.id,
|
||||
user=member.user,
|
||||
number_of_membership_months=1,
|
||||
)
|
||||
|
||||
result = utils.tally_membership_months(member)
|
||||
|
||||
self.assertEqual(member.expire_date, end_date)
|
||||
self.assertEqual(member.paused_date, end_date)
|
||||
self.assertEqual(member.status, 'Former Member')
|
||||
|
||||
def test_tally_membership_months_dont_run(self):
|
||||
member = self.get_member_clear_transactions()
|
||||
start_date = utils.today_alberta_tz()
|
||||
|
||||
member.current_start_date = start_date
|
||||
member.paused_date = start_date
|
||||
member.save()
|
||||
|
||||
result = utils.tally_membership_months(member)
|
||||
|
||||
self.assertEqual(result, False)
|
||||
|
||||
class TestParsePayPalDate(TestCase):
|
||||
def test_parse(self):
|
||||
string = '20:12:59 Jan 13, 2009 PST'
|
||||
|
||||
result = utils_paypal.parse_paypal_date(string)
|
||||
|
||||
self.assertEqual(str(result), '2009-01-14 04:12:59+00:00')
|
||||
|
||||
def test_parse_dst(self):
|
||||
string = '20:12:59 Jul 13, 2009 PDT'
|
||||
|
||||
result = utils_paypal.parse_paypal_date(string)
|
||||
|
||||
self.assertEqual(str(result), '2009-07-14 03:12:59+00:00')
|
||||
|
||||
def test_parse_bad_tz(self):
|
||||
string = '20:12:59 Jul 13, 2009 QOT'
|
||||
|
||||
self.assertRaises(ValidationError, utils_paypal.parse_paypal_date, string)
|
||||
|
||||
def test_parse_bad_string(self):
|
||||
string = 'ave satanas'
|
||||
|
||||
self.assertRaises(ValidationError, utils_paypal.parse_paypal_date, string)
|
||||
|
|
50
apiserver/apiserver/api/throttles.py
Normal file
50
apiserver/apiserver/api/throttles.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from rest_framework import throttling
|
||||
|
||||
class LoggingThrottle(throttling.BaseThrottle):
|
||||
def allow_request(self, request, view):
|
||||
if request.user.id:
|
||||
user = '{} ({})'.format(request.user, request.user.member.id)
|
||||
else:
|
||||
user = None
|
||||
|
||||
method = request._request.method
|
||||
path = request._request.path
|
||||
|
||||
if method == 'OPTIONS':
|
||||
return True
|
||||
|
||||
if path.startswith('/lockout/'):
|
||||
return True
|
||||
elif path == '/stats/sign/':
|
||||
pass # log this one
|
||||
elif path.startswith('/stats/'):
|
||||
return True
|
||||
elif path == '/sessions/' and user == None:
|
||||
return True
|
||||
elif path in [
|
||||
'/pinball/high_scores/',
|
||||
'/pinball/monthly_high_scores/',
|
||||
'/protocoin/printer_balance/',
|
||||
'/hosting/high_scores/',
|
||||
'/hosting/monthly_high_scores/',
|
||||
'/stats/ord2/printer3d/',
|
||||
'/stats/ord3/printer3d/'
|
||||
]:
|
||||
return True
|
||||
|
||||
if request.data:
|
||||
if type(request.data) is not dict:
|
||||
data = request.data.dict()
|
||||
else:
|
||||
data = request.data
|
||||
for key in ['password', 'password1', 'password2', 'old_password', 'new_password1', 'new_password2']:
|
||||
if key in data:
|
||||
data[key] = '[CENSORED]'
|
||||
else:
|
||||
data = None
|
||||
|
||||
logging.info('%s %s | User: %s | Data: %s', method, path, user, data)
|
||||
return True
|
527
apiserver/apiserver/api/utils.py
Normal file
527
apiserver/apiserver/api/utils.py
Normal file
|
@ -0,0 +1,527 @@
|
|||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import io
|
||||
import json
|
||||
import requests
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.views import exception_handler
|
||||
from dateutil import relativedelta
|
||||
from uuid import uuid4
|
||||
from PIL import Image, ImageDraw, ImageFont, ImageOps, JpegImagePlugin
|
||||
JpegImagePlugin._getmp = lambda x: None
|
||||
from bleach.sanitizer import Cleaner
|
||||
from PyPDF2 import PdfFileWriter, PdfFileReader
|
||||
from reportlab.pdfgen import canvas
|
||||
from reportlab.lib.pagesizes import letter
|
||||
|
||||
from django.db.models import Sum
|
||||
from django.core.cache import cache
|
||||
from django.utils.timezone import now, pytz
|
||||
|
||||
from . import models, serializers, utils_ldap, utils_stats, utils_auth, utils, utils_email
|
||||
from .. import settings, secrets
|
||||
|
||||
STATIC_FOLDER = 'data/static/'
|
||||
|
||||
TIMEZONE_CALGARY = pytz.timezone('America/Edmonton')
|
||||
|
||||
def today_alberta_tz():
|
||||
return datetime.now(TIMEZONE_CALGARY).date()
|
||||
|
||||
def now_alberta_tz():
|
||||
return datetime.now(TIMEZONE_CALGARY)
|
||||
|
||||
def alert_tanner(message):
|
||||
try:
|
||||
logger.info('Alerting Tanner: ' + message)
|
||||
params = dict(spaceport=message)
|
||||
requests.get('https://tbot.tannercollin.com/message', params=params, timeout=4)
|
||||
except BaseException as e:
|
||||
logger.error('Problem alerting Tanner: ' + str(e))
|
||||
|
||||
def spaceporter_host(message):
|
||||
logger.info('Spaceporter bot sending to host chat: ' + message)
|
||||
|
||||
if secrets.SPACEPORTER_HOST_TOKEN:
|
||||
url = 'https://forum.protospace.ca/chat/hooks/{}.json'.format(
|
||||
secrets.SPACEPORTER_HOST_TOKEN,
|
||||
)
|
||||
else:
|
||||
logger.info('Aborting Spaceporter bot message, no token.')
|
||||
return
|
||||
|
||||
try:
|
||||
data = dict(text=message)
|
||||
requests.post(url, json=data, timeout=4)
|
||||
except BaseException as e:
|
||||
logger.error('Problem with bot: ' + str(e))
|
||||
|
||||
def num_months_spanned(d1, d2):
|
||||
'''
|
||||
Return number of month thresholds two dates span.
|
||||
Order of arguments is same as subtraction
|
||||
ie. Feb 2, Jan 29 returns 1
|
||||
'''
|
||||
return (d1.year - d2.year) * 12 + d1.month - d2.month
|
||||
|
||||
def num_months_difference(d1, d2):
|
||||
'''
|
||||
Return number of whole months between two dates.
|
||||
Order of arguments is same as subtraction
|
||||
ie. Feb 2, Jan 29 returns 0
|
||||
'''
|
||||
r = relativedelta.relativedelta(d1, d2)
|
||||
return r.months + 12 * r.years
|
||||
|
||||
def calc_member_status(expire_date, fake_date=None):
|
||||
'''
|
||||
Return: member status
|
||||
'''
|
||||
today = fake_date or today_alberta_tz()
|
||||
|
||||
difference = num_months_difference(expire_date, today)
|
||||
|
||||
if today + timedelta(days=29) < expire_date:
|
||||
return 'Prepaid'
|
||||
elif difference <= -3:
|
||||
return 'Expired Member'
|
||||
elif today - timedelta(days=29) >= expire_date:
|
||||
return 'Overdue'
|
||||
elif today < expire_date:
|
||||
return 'Current'
|
||||
elif today >= expire_date:
|
||||
return 'Due'
|
||||
else:
|
||||
raise()
|
||||
|
||||
def add_months(date, num_months):
|
||||
return date + relativedelta.relativedelta(months=num_months)
|
||||
|
||||
def tally_membership_months(member, fake_date=None):
|
||||
'''
|
||||
Sum together member's dues and calculate their new expire date and status
|
||||
Doesn't work if member is paused.
|
||||
'''
|
||||
if member.paused_date: return False
|
||||
|
||||
start_date = member.current_start_date
|
||||
if not start_date: return False
|
||||
|
||||
txs = models.Transaction.objects.filter(
|
||||
user__member=member,
|
||||
date__gte=start_date,
|
||||
)
|
||||
total_months_agg = txs.aggregate(Sum('number_of_membership_months'))
|
||||
total_months = total_months_agg['number_of_membership_months__sum'] or 0
|
||||
|
||||
expire_date = add_months(start_date, total_months)
|
||||
status = calc_member_status(expire_date, fake_date)
|
||||
|
||||
if member.expire_date != expire_date or member.status != status:
|
||||
previous_status = member.status
|
||||
|
||||
member.expire_date = expire_date
|
||||
member.status = status
|
||||
|
||||
if status == 'Expired Member':
|
||||
member.paused_date = today_alberta_tz()
|
||||
msg = 'Member has expired: {} {}'.format(member.preferred_name, member.last_name)
|
||||
alert_tanner(msg)
|
||||
logger.info(msg)
|
||||
|
||||
if status == 'Overdue':
|
||||
if previous_status == 'Due':
|
||||
msg = 'Member has become Overdue: {} {}'.format(member.preferred_name, member.last_name)
|
||||
alert_tanner(msg)
|
||||
logger.info(msg)
|
||||
|
||||
utils_email.send_overdue_email(member)
|
||||
else:
|
||||
logger.info('Skipping email because member wasn\'t due before.')
|
||||
|
||||
member.save()
|
||||
logging.debug('Tallied %s membership months: updated.', member)
|
||||
else:
|
||||
logging.debug('Tallied %s membership months: no changes.', member)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def gen_search_strings():
|
||||
'''
|
||||
Generate a cache dict of names to member ids for rapid string matching
|
||||
'''
|
||||
start = time.time()
|
||||
|
||||
search_strings = {}
|
||||
for m in models.Member.objects.order_by('-expire_date').prefetch_related('user__storage'):
|
||||
string = '{} {} | {} {}'.format(
|
||||
m.preferred_name,
|
||||
m.last_name,
|
||||
m.first_name,
|
||||
m.last_name,
|
||||
)
|
||||
|
||||
string += ' | ' + m.user.email
|
||||
|
||||
if m.discourse_username:
|
||||
string += ' | ' + m.discourse_username
|
||||
|
||||
string += ' | ' + str(m.id)
|
||||
|
||||
for s in m.user.storage.all():
|
||||
string += ' | ' + s.shelf_id
|
||||
|
||||
string = string.lower()
|
||||
search_strings[string] = m.id
|
||||
|
||||
cache.set('search_strings', search_strings)
|
||||
|
||||
logger.info('Generated search strings in %s s.', time.time() - start)
|
||||
|
||||
|
||||
LARGE_SIZE = 1080
|
||||
MEDIUM_SIZE = 220
|
||||
SMALL_SIZE = 110
|
||||
|
||||
def process_image_upload(upload, crop):
|
||||
'''
|
||||
Save an image upload in small, medium, large sizes and return filenames
|
||||
'''
|
||||
try:
|
||||
pic = Image.open(upload)
|
||||
except OSError:
|
||||
raise serializers.ValidationError(dict(non_field_errors='Invalid image file.'))
|
||||
|
||||
logging.info('Detected format: %s', pic.format)
|
||||
|
||||
if pic.format == 'PNG':
|
||||
ext = '.png'
|
||||
elif pic.format == 'JPEG':
|
||||
ext = '.jpg'
|
||||
else:
|
||||
raise serializers.ValidationError(dict(non_field_errors='Image must be a jpg or png.'))
|
||||
|
||||
pic = ImageOps.exif_transpose(pic)
|
||||
|
||||
if crop:
|
||||
crop = json.loads(crop)
|
||||
pic_x, pic_y = pic.size
|
||||
left = pic_x * crop['x']/100.0
|
||||
top = pic_y * crop['y']/100.0
|
||||
right = left + pic_x * crop['width']/100.0
|
||||
bottom = top + pic_y * crop['height']/100.0
|
||||
pic = pic.crop((left, top, right, bottom))
|
||||
|
||||
large = str(uuid4()) + ext
|
||||
pic.thumbnail([LARGE_SIZE, LARGE_SIZE], Image.ANTIALIAS)
|
||||
pic.save(STATIC_FOLDER + large)
|
||||
|
||||
medium = str(uuid4()) + ext
|
||||
pic.thumbnail([MEDIUM_SIZE, MEDIUM_SIZE], Image.ANTIALIAS)
|
||||
pic.save(STATIC_FOLDER + medium)
|
||||
|
||||
small = str(uuid4()) + ext
|
||||
pic.thumbnail([SMALL_SIZE, SMALL_SIZE], Image.ANTIALIAS)
|
||||
pic.save(STATIC_FOLDER + small)
|
||||
|
||||
return small, medium, large
|
||||
|
||||
|
||||
GARDEN_MEDIUM_SIZE = 500
|
||||
|
||||
def process_garden_image(upload):
|
||||
try:
|
||||
pic = Image.open(upload)
|
||||
except OSError:
|
||||
raise serializers.ValidationError(dict(non_field_errors='Invalid image file.'))
|
||||
|
||||
logging.debug('Detected format: %s', pic.format)
|
||||
|
||||
if pic.format == 'PNG':
|
||||
ext = '.png'
|
||||
elif pic.format == 'JPEG':
|
||||
ext = '.jpg'
|
||||
else:
|
||||
raise serializers.ValidationError(dict(non_field_errors='Image must be a jpg or png.'))
|
||||
|
||||
pic = ImageOps.exif_transpose(pic)
|
||||
|
||||
draw = ImageDraw.Draw(pic)
|
||||
|
||||
timestamp = now_alberta_tz().strftime('%a %b %-d, %Y %-I:%M %p')
|
||||
|
||||
font = ImageFont.truetype('DejaVuSans.ttf', 60)
|
||||
draw.text((10, 10), timestamp, (0,0,0), font=font)
|
||||
|
||||
large = 'garden-large' + ext
|
||||
pic.save(STATIC_FOLDER + large)
|
||||
|
||||
medium = 'garden-medium' + ext
|
||||
pic.thumbnail([GARDEN_MEDIUM_SIZE, GARDEN_MEDIUM_SIZE], Image.ANTIALIAS)
|
||||
pic.save(STATIC_FOLDER + medium)
|
||||
|
||||
return medium, large
|
||||
|
||||
|
||||
CARD_TEMPLATE_FILE = 'misc/member_card_template.jpg'
|
||||
CARD_PHOTO_SIZE = 425
|
||||
CARD_PHOTO_MARGIN_TOP = 75
|
||||
CARD_PHOTO_MARGIN_SIDE = 30
|
||||
CARD_TEXT_SIZE_LIMIT = 550
|
||||
|
||||
def gen_card_photo(member):
|
||||
card_template = Image.open(CARD_TEMPLATE_FILE)
|
||||
|
||||
member_photo = Image.open(STATIC_FOLDER + member.photo_large)
|
||||
member_photo.thumbnail([CARD_PHOTO_SIZE, CARD_PHOTO_SIZE], Image.ANTIALIAS)
|
||||
member_photo = ImageOps.expand(member_photo, border=10)
|
||||
mx, my = member_photo.size
|
||||
|
||||
x = CARD_PHOTO_MARGIN_SIDE
|
||||
y = CARD_PHOTO_MARGIN_TOP
|
||||
card_template.paste(member_photo, (x, y))
|
||||
|
||||
draw = ImageDraw.Draw(card_template)
|
||||
|
||||
# check font size
|
||||
font_sizes = (60, 72)
|
||||
font = ImageFont.truetype('DejaVuSans-Bold.ttf', font_sizes[1])
|
||||
size = draw.textsize(str(member.last_name), font=font)
|
||||
if size[0] > CARD_TEXT_SIZE_LIMIT:
|
||||
font_sizes = (36, 48)
|
||||
|
||||
font = ImageFont.truetype('DejaVuSans.ttf', font_sizes[0])
|
||||
x = CARD_PHOTO_MARGIN_SIDE
|
||||
y = my + CARD_PHOTO_MARGIN_TOP + CARD_PHOTO_MARGIN_SIDE
|
||||
draw.text((x, y), str(member.preferred_name), (0,0,0), font=font)
|
||||
|
||||
font = ImageFont.truetype('DejaVuSans-Bold.ttf', font_sizes[1])
|
||||
y = my + CARD_PHOTO_MARGIN_TOP + CARD_PHOTO_MARGIN_SIDE + font_sizes[1]
|
||||
draw.text((x, y), str(member.last_name), (0,0,0), font=font)
|
||||
|
||||
font = ImageFont.truetype('DejaVuSans.ttf', 36)
|
||||
draw.text((x, 800), 'Joined: ' + str(member.application_date or 'Unknown'), (0,0,0), font=font)
|
||||
y = CARD_PHOTO_MARGIN_SIDE
|
||||
draw.text((475, y), str(member.id), (0,0,0), font=font)
|
||||
|
||||
bio = io.BytesIO()
|
||||
card_template.save(bio, 'JPEG', quality=95)
|
||||
bio.seek(0)
|
||||
|
||||
return bio
|
||||
|
||||
|
||||
ALLOWED_TAGS = [
|
||||
'h3',
|
||||
'p',
|
||||
'br',
|
||||
'strong',
|
||||
'em',
|
||||
'u',
|
||||
'code',
|
||||
'ol',
|
||||
'li',
|
||||
'ul',
|
||||
'a',
|
||||
]
|
||||
|
||||
clean = Cleaner(tags=ALLOWED_TAGS).clean
|
||||
|
||||
|
||||
def is_request_from_protospace(request):
|
||||
# TODO: pull to config
|
||||
whitelist = ['24.66.110.96', '205.233.15.76', '205.233.15.69', '70.75.142.145']
|
||||
|
||||
if settings.DEBUG:
|
||||
return True
|
||||
|
||||
# set (not appended) directly by nginx so we can trust it
|
||||
real_ip = request.META.get('HTTP_X_REAL_IP', False)
|
||||
|
||||
return real_ip in whitelist
|
||||
|
||||
def create_new_member(data, user):
|
||||
members = models.Member.objects
|
||||
if members.filter(old_email__iexact=data['email']).exists():
|
||||
msg = 'Account was found in old portal.'
|
||||
logger.info(msg)
|
||||
raise ValidationError(dict(email=msg))
|
||||
|
||||
if utils_ldap.is_configured():
|
||||
if data['request_id']: utils_stats.set_progress(data['request_id'], 'Creating LDAP account...')
|
||||
result = utils_ldap.find_user(user.username)
|
||||
if result == 200:
|
||||
msg = 'Username was found in old portal.'
|
||||
logger.info(msg)
|
||||
raise ValidationError(dict(username=msg))
|
||||
elif result == 404:
|
||||
pass
|
||||
else:
|
||||
msg = 'Problem connecting to LDAP server.'
|
||||
alert_tanner(msg)
|
||||
logger.info(msg)
|
||||
raise ValidationError(dict(non_field_errors=msg))
|
||||
|
||||
if utils_ldap.create_user(data) != 200:
|
||||
msg = 'Problem connecting to LDAP server: create.'
|
||||
alert_tanner(msg)
|
||||
logger.info(msg)
|
||||
raise ValidationError(dict(non_field_errors=msg))
|
||||
|
||||
if data['request_id']: utils_stats.set_progress(data['request_id'], 'Creating new member...')
|
||||
|
||||
models.Member.objects.create(
|
||||
user=user,
|
||||
first_name=data['first_name'],
|
||||
last_name=data['last_name'],
|
||||
preferred_name=data['preferred_name'],
|
||||
)
|
||||
|
||||
def register_user(data, user):
|
||||
data = data.copy()
|
||||
data['first_name'] = data['first_name'].title().strip()
|
||||
data['last_name'] = data['last_name'].title().strip()
|
||||
data['preferred_name'] = data['preferred_name'].title().strip()
|
||||
|
||||
# Sometimes during demos, a user makes a fake account then then has to be cleaned out
|
||||
# Notify me that this has happened so I can go clean out the database
|
||||
if 'test' in data['username']:
|
||||
msg = 'Someone created a test account: {} {} {} {}'.format(
|
||||
data['username'],
|
||||
data['first_name'],
|
||||
data['last_name'],
|
||||
data['email'],
|
||||
)
|
||||
logger.info(msg)
|
||||
alert_tanner(msg)
|
||||
|
||||
try:
|
||||
logger.info('Creating new member...')
|
||||
create_new_member(data, user)
|
||||
except:
|
||||
user.delete()
|
||||
raise
|
||||
|
||||
auth_data = dict(
|
||||
username=data['username'],
|
||||
password=data['password1'],
|
||||
email=data['email'],
|
||||
first_name=data['preferred_name'],
|
||||
)
|
||||
|
||||
if utils_auth.wiki_is_configured():
|
||||
if data['request_id']: utils_stats.set_progress(data['request_id'], 'Creating Wiki account...')
|
||||
if utils_auth.set_wiki_password(auth_data) != 200:
|
||||
msg = 'Problem connecting to Wiki Auth server: set.'
|
||||
utils.alert_tanner(msg)
|
||||
logger.info(msg)
|
||||
|
||||
if utils_auth.discourse_is_configured():
|
||||
if data['request_id']: utils_stats.set_progress(data['request_id'], 'Creating Discourse account...')
|
||||
if utils_auth.set_discourse_password(auth_data) != 200:
|
||||
msg = 'Problem connecting to Discourse Auth server: set.'
|
||||
utils.alert_tanner(msg)
|
||||
logger.info(msg)
|
||||
if not user.member.discourse_username:
|
||||
user.member.discourse_username = user.username
|
||||
user.member.save()
|
||||
|
||||
if utils_auth.discourse_is_configured():
|
||||
if data['request_id']: utils_stats.set_progress(data['request_id'], 'Adding to Discourse group...')
|
||||
if utils_auth.add_discourse_group_members('protospace_members', [data['username']]) != 200:
|
||||
msg = 'Problem connecting to Discourse Auth server: add.'
|
||||
utils.alert_tanner(msg)
|
||||
logger.info(msg)
|
||||
|
||||
if data['request_id']: utils_stats.set_progress(data['request_id'], 'Sending welcome email...')
|
||||
try:
|
||||
utils_email.send_welcome_email(user.member)
|
||||
except BaseException as e:
|
||||
msg = 'Problem sending welcome email: ' + str(e)
|
||||
logger.exception(msg)
|
||||
alert_tanner(msg)
|
||||
|
||||
|
||||
if data['request_id']: utils_stats.set_progress(data['request_id'], 'Done!')
|
||||
|
||||
gen_search_strings()
|
||||
|
||||
cache.set('sign', 'Welcome to Protospace, {}!'.format(data['preferred_name']))
|
||||
|
||||
|
||||
BLANK_FORM = 'misc/blank_member_form.pdf'
|
||||
def gen_member_forms(member):
|
||||
serializer = serializers.MemberSerializer(member)
|
||||
data = serializer.data
|
||||
|
||||
packet = io.BytesIO()
|
||||
can = canvas.Canvas(packet, pagesize=letter)
|
||||
can.drawString(34, 683, data['first_name'])
|
||||
can.drawString(218, 683, data['last_name'])
|
||||
can.drawString(403, 683, data['preferred_name'])
|
||||
can.drawString(34, 626, data['email'])
|
||||
can.drawString(332, 626, data['phone'])
|
||||
can.drawString(34, 570, data['emergency_contact_name'])
|
||||
can.drawString(332, 570, data['emergency_contact_phone'])
|
||||
can.save()
|
||||
packet.seek(0)
|
||||
info_pdf = PdfFileReader(packet)
|
||||
|
||||
packet = io.BytesIO()
|
||||
can = canvas.Canvas(packet, pagesize=letter)
|
||||
can.drawRightString(600, 770, '{} {} ({})'.format(
|
||||
data['preferred_name'],
|
||||
data['last_name'],
|
||||
data['id'],
|
||||
))
|
||||
can.save()
|
||||
packet.seek(0)
|
||||
topright_pdf = PdfFileReader(packet)
|
||||
|
||||
existing_pdf = PdfFileReader(open(BLANK_FORM, 'rb'))
|
||||
output = PdfFileWriter()
|
||||
page = existing_pdf.getPage(0)
|
||||
page.mergePage(info_pdf.getPage(0))
|
||||
page.mergePage(topright_pdf.getPage(0))
|
||||
output.addPage(page)
|
||||
page = existing_pdf.getPage(1)
|
||||
page.mergePage(topright_pdf.getPage(0))
|
||||
output.addPage(page)
|
||||
page = existing_pdf.getPage(2)
|
||||
page.mergePage(topright_pdf.getPage(0))
|
||||
output.addPage(page)
|
||||
|
||||
file_name = str(uuid4()) + '.pdf'
|
||||
outputStream = open(STATIC_FOLDER + file_name, 'wb')
|
||||
output.write(outputStream)
|
||||
|
||||
member.member_forms = file_name
|
||||
member.save()
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
response = exception_handler(exc, context)
|
||||
if response is not None:
|
||||
if hasattr(exc, 'detail'):
|
||||
logging.warning('Response: %s', json.dumps(exc.detail))
|
||||
else:
|
||||
logging.warning('Response: %s', exc)
|
||||
return response
|
||||
|
||||
def log_transaction(tx):
|
||||
msg = 'Transaction log | {} | {} | {} | {} | {} | {} | {} | {} | {}'.format(
|
||||
tx.id,
|
||||
tx.user.username,
|
||||
tx.user.member.id,
|
||||
tx.account_type,
|
||||
tx.amount,
|
||||
tx.protocoin,
|
||||
tx.category,
|
||||
tx.reference_number,
|
||||
tx.memo,
|
||||
)
|
||||
|
||||
logging.info(msg)
|
67
apiserver/apiserver/api/utils_auth.py
Normal file
67
apiserver/apiserver/api/utils_auth.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import requests
|
||||
from requests.exceptions import Timeout
|
||||
|
||||
from apiserver import secrets
|
||||
from apiserver.api import utils
|
||||
|
||||
def wiki_is_configured():
|
||||
return bool(secrets.WIKI_AUTH_API_URL and secrets.AUTH_API_KEY)
|
||||
|
||||
def discourse_is_configured():
|
||||
return bool(secrets.DISCOURSE_AUTH_API_URL and secrets.AUTH_API_KEY)
|
||||
|
||||
|
||||
def auth_api(url, data=None, json=None):
|
||||
try:
|
||||
headers = {'Authorization': 'Token ' + secrets.AUTH_API_KEY}
|
||||
r = requests.post(url, data=data, json=json, headers=headers, timeout=6)
|
||||
return r.status_code
|
||||
except Timeout as e:
|
||||
logger.info('Auth {} - {} - {}'.format(url, e.__class__.__name__, str(e)))
|
||||
logger.info('Auth timeout occured, assuming it worked and returning 200.')
|
||||
return 200
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
logger.error('Auth {} - {} - {}'.format(url, e.__class__.__name__, str(e)))
|
||||
return None
|
||||
|
||||
def set_wiki_password(data):
|
||||
auth_data = dict(
|
||||
username=data['username'].lower(),
|
||||
password=data['password'],
|
||||
)
|
||||
return auth_api(secrets.WIKI_AUTH_API_URL + 'set-wiki-password', data=auth_data)
|
||||
|
||||
def set_discourse_password(data):
|
||||
auth_data = dict(
|
||||
username=data['username'].lower(),
|
||||
password=data['password'],
|
||||
first_name=data['first_name'],
|
||||
email=data['email'],
|
||||
)
|
||||
return auth_api(secrets.DISCOURSE_AUTH_API_URL + 'set-discourse-password', data=auth_data)
|
||||
|
||||
def add_discourse_group_members(group_name, usernames):
|
||||
json = dict(
|
||||
group_name=group_name,
|
||||
usernames=usernames,
|
||||
)
|
||||
return auth_api(secrets.DISCOURSE_AUTH_API_URL + 'add-discourse-group-members', json=json)
|
||||
|
||||
def remove_discourse_group_members(group_name, usernames):
|
||||
json = dict(
|
||||
group_name=group_name,
|
||||
usernames=usernames,
|
||||
)
|
||||
return auth_api(secrets.DISCOURSE_AUTH_API_URL + 'remove-discourse-group-members', json=json)
|
||||
|
||||
def change_discourse_username(username, new_username):
|
||||
data = dict(
|
||||
username=username,
|
||||
new_username=new_username,
|
||||
)
|
||||
return auth_api(secrets.DISCOURSE_AUTH_API_URL + 'change-discourse-username', data=data)
|
154
apiserver/apiserver/api/utils_email.py
Normal file
154
apiserver/apiserver/api/utils_email.py
Normal file
|
@ -0,0 +1,154 @@
|
|||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import os
|
||||
import smtplib
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django.core.mail import send_mail, EmailMultiAlternatives
|
||||
|
||||
from . import utils
|
||||
from .. import settings
|
||||
|
||||
EMAIL_DIR = os.path.join(settings.BASE_DIR, 'apiserver/api/emails/')
|
||||
|
||||
def send_welcome_email(member):
|
||||
vetting_date = member.application_date + timedelta(days=28)
|
||||
|
||||
def replace_fields(text):
|
||||
return text.replace(
|
||||
'[name]', member.preferred_name,
|
||||
).replace(
|
||||
'[username]', member.user.username,
|
||||
).replace(
|
||||
'[date]', vetting_date.strftime('%A, %B %d'),
|
||||
)
|
||||
|
||||
with open(EMAIL_DIR + 'welcome.txt', 'r') as f:
|
||||
email_text = replace_fields(f.read())
|
||||
|
||||
with open(EMAIL_DIR + 'welcome.html', 'r') as f:
|
||||
email_html = replace_fields(f.read())
|
||||
|
||||
send_mail(
|
||||
subject='Welcome to Protospace!',
|
||||
message=email_text,
|
||||
from_email=None, # defaults to DEFAULT_FROM_EMAIL
|
||||
recipient_list=[member.user.email],
|
||||
html_message=email_html,
|
||||
)
|
||||
|
||||
logger.info('Sent welcome email:\n' + email_text)
|
||||
|
||||
def send_ical_email(member, session, ical_file):
|
||||
def replace_fields(text):
|
||||
date = session.datetime.astimezone(utils.TIMEZONE_CALGARY).strftime('%A, %B %d')
|
||||
|
||||
return text.replace(
|
||||
'[name]', member.preferred_name,
|
||||
).replace(
|
||||
'[class]', session.course.name,
|
||||
).replace(
|
||||
'[date]', date
|
||||
)
|
||||
|
||||
with open(EMAIL_DIR + 'ical.txt', 'r') as f:
|
||||
email_text = replace_fields(f.read())
|
||||
|
||||
with open(EMAIL_DIR + 'ical.html', 'r') as f:
|
||||
email_html = replace_fields(f.read())
|
||||
|
||||
subject = 'Protospace ' + session.course.name
|
||||
from_email = None # defaults to DEFAULT_FROM_EMAIL
|
||||
to = member.user.email
|
||||
msg = EmailMultiAlternatives(subject, email_text, from_email, [to])
|
||||
msg.attach_alternative(email_html, "text/html")
|
||||
msg.attach('event.ics', ical_file, 'text/calendar')
|
||||
msg.send()
|
||||
|
||||
logger.info('Sent ical email:\n' + email_text)
|
||||
|
||||
def send_interest_email(interest):
|
||||
def replace_fields(text):
|
||||
return text.replace(
|
||||
'[name]', interest.user.member.preferred_name,
|
||||
).replace(
|
||||
'[course]', interest.course.name,
|
||||
).replace(
|
||||
'[link]', 'https://my.protospace.ca/courses/' + str(interest.course.id),
|
||||
)
|
||||
|
||||
with open(EMAIL_DIR + 'interest.txt', 'r') as f:
|
||||
email_text = replace_fields(f.read())
|
||||
|
||||
with open(EMAIL_DIR + 'interest.html', 'r') as f:
|
||||
email_html = replace_fields(f.read())
|
||||
|
||||
send_mail(
|
||||
subject='Protospace class scheduled',
|
||||
message=email_text,
|
||||
from_email=None, # defaults to DEFAULT_FROM_EMAIL
|
||||
recipient_list=[interest.user.email],
|
||||
html_message=email_html,
|
||||
)
|
||||
|
||||
if not settings.EMAIL_HOST:
|
||||
time.sleep(0.5) # simulate slowly sending emails when logging to console
|
||||
|
||||
logger.info('Sent interest email:\n' + email_text)
|
||||
|
||||
def send_usage_bill_email(user, device, month, minutes, overage, bill):
|
||||
def replace_fields(text):
|
||||
return text.replace(
|
||||
'[name]', user.member.preferred_name,
|
||||
).replace(
|
||||
'[device]', device,
|
||||
).replace(
|
||||
'[month]', month,
|
||||
).replace(
|
||||
'[minutes]', str(minutes),
|
||||
).replace(
|
||||
'[overage]', str(overage),
|
||||
).replace(
|
||||
'[bill]', bill,
|
||||
)
|
||||
|
||||
with open(EMAIL_DIR + 'usage_bill.txt', 'r') as f:
|
||||
email_text = replace_fields(f.read())
|
||||
|
||||
send_mail(
|
||||
subject='{} {} Usage Bill'.format(month, device),
|
||||
message=email_text,
|
||||
from_email=None, # defaults to DEFAULT_FROM_EMAIL
|
||||
recipient_list=[user.email, 'directors@protospace.ca', 'spaceport@tannercollin.com'],
|
||||
)
|
||||
|
||||
if not settings.EMAIL_HOST:
|
||||
time.sleep(0.5) # simulate slowly sending emails when logging to console
|
||||
|
||||
logger.info('Sent usage bill email:\n' + email_text)
|
||||
|
||||
def send_overdue_email(member):
|
||||
def replace_fields(text):
|
||||
return text.replace(
|
||||
'[name]', member.preferred_name,
|
||||
).replace(
|
||||
'[date]', member.expire_date.strftime('%B %d, %Y'),
|
||||
)
|
||||
|
||||
with open(EMAIL_DIR + 'overdue.txt', 'r') as f:
|
||||
email_text = replace_fields(f.read())
|
||||
|
||||
with open(EMAIL_DIR + 'overdue.html', 'r') as f:
|
||||
email_html = replace_fields(f.read())
|
||||
|
||||
send_mail(
|
||||
subject='Protospace member dues overdue',
|
||||
message=email_text,
|
||||
from_email=None, # defaults to DEFAULT_FROM_EMAIL
|
||||
recipient_list=[member.user.email, 'directors@protospace.ca', 'spaceport@tannercollin.com'],
|
||||
html_message=email_html,
|
||||
)
|
||||
|
||||
logger.info('Sent overdue email:\n' + email_text)
|
72
apiserver/apiserver/api/utils_ldap.py
Normal file
72
apiserver/apiserver/api/utils_ldap.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import requests
|
||||
|
||||
from apiserver import secrets
|
||||
from apiserver.api import utils
|
||||
|
||||
def is_configured():
|
||||
return bool(secrets.LDAP_API_URL and secrets.LDAP_API_KEY)
|
||||
|
||||
|
||||
def ldap_api(route, data):
|
||||
try:
|
||||
headers = {'Authorization': 'Token ' + secrets.LDAP_API_KEY}
|
||||
url = secrets.LDAP_API_URL + route
|
||||
r = requests.post(url, data=data, headers=headers, timeout=10)
|
||||
return r.status_code
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
logger.error('LDAP {} - {} - {}'.format(url, e.__class__.__name__, str(e)))
|
||||
return None
|
||||
|
||||
def find_user(username):
|
||||
ldap_data = dict(username=username)
|
||||
return ldap_api('find-user', ldap_data)
|
||||
|
||||
def create_user(data):
|
||||
ldap_data = dict(
|
||||
first=data['preferred_name'],
|
||||
last=data['last_name'],
|
||||
username=data['username'],
|
||||
email=data['email'],
|
||||
password=data['password1'],
|
||||
)
|
||||
return ldap_api('create-user', ldap_data)
|
||||
|
||||
def set_password(data):
|
||||
ldap_data = dict(
|
||||
username=data['username'],
|
||||
password=data['password1'],
|
||||
)
|
||||
return ldap_api('set-password', ldap_data)
|
||||
|
||||
def add_to_group(member, group):
|
||||
try:
|
||||
ldap_data = dict(group=group)
|
||||
|
||||
ldap_data['username'] = member.user.username
|
||||
|
||||
if ldap_api('add-to-group', ldap_data) != 200: raise
|
||||
except BaseException as e:
|
||||
logger.error('LDAP Group - {} - {}'.format(e.__class__.__name__, str(e)))
|
||||
m = '{} {} ({})'.format(member.preferred_name, member.last_name, member.id)
|
||||
msg = 'Problem adding {} to group {}!'.format(m, group)
|
||||
utils.alert_tanner(msg)
|
||||
logger.info(msg)
|
||||
|
||||
def remove_from_group(member, group):
|
||||
try:
|
||||
ldap_data = dict(group=group)
|
||||
|
||||
ldap_data['username'] = member.user.username
|
||||
|
||||
if ldap_api('remove-from-group', ldap_data) != 200: raise
|
||||
except BaseException as e:
|
||||
logger.error('LDAP Group - {} - {}'.format(e.__class__.__name__, str(e)))
|
||||
m = '{} {} ({})'.format(member.preferred_name, member.last_name, member.id)
|
||||
msg = 'Problem removing {} from group {}!'.format(m, group)
|
||||
utils.alert_tanner(msg)
|
||||
logger.info(msg)
|
403
apiserver/apiserver/api/utils_paypal.py
Normal file
403
apiserver/apiserver/api/utils_paypal.py
Normal file
|
@ -0,0 +1,403 @@
|
|||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import requests
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from uuid import uuid4
|
||||
|
||||
from django.db.models import Sum
|
||||
from django.utils import timezone
|
||||
from django.utils.timezone import now
|
||||
|
||||
from . import models, serializers, utils
|
||||
from .. import settings
|
||||
|
||||
SANDBOX = False
|
||||
if SANDBOX:
|
||||
VERIFY_URL = 'https://ipnpb.sandbox.paypal.com/cgi-bin/webscr'
|
||||
OUR_EMAIL = 'seller@paypalsandbox.com'
|
||||
OUR_CURRENCY = 'USD'
|
||||
else:
|
||||
VERIFY_URL = 'https://ipnpb.paypal.com/cgi-bin/webscr'
|
||||
OUR_EMAIL = 'paypal@protospace.ca'
|
||||
OUR_CURRENCY = 'CAD'
|
||||
|
||||
def parse_paypal_date(string):
|
||||
'''
|
||||
Convert paypal date string into python datetime. PayPal's a bunch of idiots.
|
||||
Their API returns dates in some custom format, so we have to parse it.
|
||||
|
||||
Stolen from:
|
||||
https://github.com/spookylukey/django-paypal/blob/master/paypal/standard/forms.py
|
||||
|
||||
Return the UTC python datetime.
|
||||
'''
|
||||
MONTHS = [
|
||||
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug',
|
||||
'Sep', 'Oct', 'Nov', 'Dec',
|
||||
]
|
||||
|
||||
if not string: return now()
|
||||
|
||||
value = string.strip()
|
||||
try:
|
||||
time_part, month_part, day_part, year_part, zone_part = value.split()
|
||||
month_part = month_part.strip('.')
|
||||
day_part = day_part.strip(',')
|
||||
month = MONTHS.index(month_part) + 1
|
||||
day = int(day_part)
|
||||
year = int(year_part)
|
||||
hour, minute, second = map(int, time_part.split(':'))
|
||||
dt = datetime.datetime(year, month, day, hour, minute, second)
|
||||
except ValueError as e:
|
||||
raise ValidationError('Invalid date format {} {}'.format(
|
||||
value, str(e)
|
||||
))
|
||||
|
||||
if zone_part in ['PDT', 'PST']:
|
||||
# PST/PDT is 'US/Pacific' and ignored, localize only cares about date
|
||||
dt = timezone.pytz.timezone('US/Pacific').localize(dt)
|
||||
dt = dt.astimezone(timezone.pytz.UTC)
|
||||
else:
|
||||
raise ValidationError('Bad timezone: ' + zone_part)
|
||||
return dt
|
||||
|
||||
def record_ipn(data):
|
||||
'''
|
||||
Record each individual IPN (even dupes) for logging and debugging
|
||||
'''
|
||||
return models.IPN.objects.create(
|
||||
data=data.urlencode(),
|
||||
status='New',
|
||||
)
|
||||
|
||||
def update_ipn(ipn, status):
|
||||
ipn.status = status
|
||||
ipn.save()
|
||||
|
||||
def verify_paypal_ipn(data):
|
||||
if settings.DEBUG:
|
||||
return True
|
||||
|
||||
params = data.copy()
|
||||
params['cmd'] = '_notify-validate'
|
||||
headers = {
|
||||
'content-type': 'application/x-www-form-urlencoded',
|
||||
'user-agent': 'spaceport',
|
||||
}
|
||||
|
||||
try:
|
||||
r = requests.post(VERIFY_URL, params=params, headers=headers, timeout=4)
|
||||
r.raise_for_status()
|
||||
logger.info('Result: ' + r.text)
|
||||
if r.text == 'VERIFIED':
|
||||
return True
|
||||
except BaseException as e:
|
||||
logger.error('IPN verify - {} - {}'.format(e.__class__.__name__, str(e)))
|
||||
|
||||
logger.info('IPN - verification failed, retrying...')
|
||||
|
||||
try:
|
||||
r = requests.post(VERIFY_URL, params=params, headers=headers, timeout=4)
|
||||
r.raise_for_status()
|
||||
logger.info('Result: ' + r.text)
|
||||
if r.text == 'VERIFIED':
|
||||
return True
|
||||
except BaseException as e:
|
||||
logger.error('IPN verify - {} - {}'.format(e.__class__.__name__, str(e)))
|
||||
|
||||
utils.alert_tanner('IPN failed to verify:\n\n' + str(data.dict()))
|
||||
|
||||
return False
|
||||
|
||||
def build_tx(data):
|
||||
amount = float(data.get('mc_gross', 0))
|
||||
return dict(
|
||||
account_type='PayPal',
|
||||
amount=amount,
|
||||
date=parse_paypal_date(data.get('payment_date', '')),
|
||||
info_source='PayPal IPN',
|
||||
payment_method=data.get('payment_type', 'unknown'),
|
||||
paypal_payer_id=data.get('payer_id', 'unknown'),
|
||||
paypal_txn_id=data.get('txn_id', 'unknown'),
|
||||
paypal_txn_type=data.get('txn_type', 'unknown'),
|
||||
reference_number=data.get('txn_id', 'unknown'),
|
||||
)
|
||||
|
||||
def create_unmatched_member_tx(data):
|
||||
transactions = models.Transaction.objects
|
||||
|
||||
report_memo = 'Cant link sender name, {} {}, email: {}, note: {} - {}'.format(
|
||||
data.get('first_name', 'unknown'),
|
||||
data.get('last_name', 'unknown'),
|
||||
data.get('payer_email', 'unknown'),
|
||||
data.get('custom', 'none'),
|
||||
data.get('memo', 'none'),
|
||||
)
|
||||
|
||||
tx = transactions.create(
|
||||
**build_tx(data),
|
||||
report_memo=report_memo,
|
||||
report_type='Unmatched Member',
|
||||
)
|
||||
|
||||
utils.log_transaction(tx)
|
||||
return tx
|
||||
|
||||
def create_member_dues_tx(data, member, num_months, deal):
|
||||
transactions = models.Transaction.objects
|
||||
|
||||
# new member 3 for 2 will have to be manual anyway
|
||||
if deal == 12 and num_months == 11:
|
||||
num_months = 12
|
||||
deal_str = '12 for 11, '
|
||||
elif deal == 3 and num_months == 2:
|
||||
num_months = 3
|
||||
deal_str = '3 for 2, '
|
||||
elif num_months == 11: # handle pre-Spaceport yearly subs
|
||||
num_months = 12
|
||||
deal_str = '12 for 11 (legacy), '
|
||||
else:
|
||||
deal_str = ''
|
||||
|
||||
user = getattr(member, 'user', None)
|
||||
memo = '{}{} {} - Protospace Membership, {}'.format(
|
||||
deal_str,
|
||||
data.get('first_name', 'unknown'),
|
||||
data.get('last_name', 'unknown'),
|
||||
data.get('payer_email', 'unknown'),
|
||||
)
|
||||
|
||||
tx = transactions.create(
|
||||
**build_tx(data),
|
||||
memo=memo,
|
||||
category='Membership',
|
||||
number_of_membership_months=num_months,
|
||||
user=user,
|
||||
)
|
||||
utils.tally_membership_months(member)
|
||||
utils.log_transaction(tx)
|
||||
return tx
|
||||
|
||||
def create_unmatched_purchase_tx(data, member):
|
||||
transactions = models.Transaction.objects
|
||||
|
||||
user = getattr(member, 'user', None)
|
||||
report_memo = 'Unknown payment reason, {} {}, email: {}, note: {} - {}'.format(
|
||||
data.get('first_name', 'unknown'),
|
||||
data.get('last_name', 'unknown'),
|
||||
data.get('payer_email', 'unknown'),
|
||||
data.get('custom', 'none'),
|
||||
data.get('memo', 'none'),
|
||||
)
|
||||
|
||||
tx = transactions.create(
|
||||
**build_tx(data),
|
||||
report_memo=report_memo,
|
||||
report_type='Unmatched Purchase',
|
||||
user=user,
|
||||
)
|
||||
|
||||
utils.log_transaction(tx)
|
||||
return tx
|
||||
|
||||
def create_member_training_tx(data, member, training):
|
||||
transactions = models.Transaction.objects
|
||||
|
||||
user = getattr(member, 'user', None)
|
||||
memo = '{} {} - {} Course, email: {}, session: {}, training: {}'.format(
|
||||
data.get('first_name', 'unknown'),
|
||||
data.get('last_name', 'unknown'),
|
||||
training.session.course.name,
|
||||
data.get('payer_email', 'unknown'),
|
||||
str(training.session.id),
|
||||
str(training.id),
|
||||
)
|
||||
|
||||
tx = transactions.create(
|
||||
**build_tx(data),
|
||||
category='OnAcct',
|
||||
memo=memo,
|
||||
user=user,
|
||||
)
|
||||
|
||||
utils.log_transaction(tx)
|
||||
return tx
|
||||
|
||||
def check_training(data, training_id, amount):
|
||||
trainings = models.Training.objects
|
||||
|
||||
if not trainings.filter(id=training_id).exists():
|
||||
return False
|
||||
|
||||
training = trainings.get(id=training_id)
|
||||
|
||||
#if training.attendance_status != 'Waiting for payment':
|
||||
# return False
|
||||
|
||||
if not training.session:
|
||||
return False
|
||||
|
||||
if training.session.is_cancelled:
|
||||
return False
|
||||
|
||||
if training.session.cost != amount:
|
||||
return False
|
||||
|
||||
member = training.user.member
|
||||
|
||||
if training.attendance_status == 'Waiting for payment':
|
||||
training.attendance_status = 'Confirmed'
|
||||
training.paid_date = utils.today_alberta_tz()
|
||||
training.save()
|
||||
|
||||
logger.info('IPN - Amount valid for training cost, id: ' + str(training.id))
|
||||
return create_member_training_tx(data, member, training)
|
||||
|
||||
def create_category_tx(data, member, custom_json, amount):
|
||||
transactions = models.Transaction.objects
|
||||
|
||||
user = getattr(member, 'user', None)
|
||||
category = custom_json['category']
|
||||
|
||||
if category == 'Exchange':
|
||||
protocoin = amount
|
||||
note = '{} Protocoin Purchase'.format(amount)
|
||||
else:
|
||||
protocoin = 0
|
||||
note = custom_json.get('memo', 'none')
|
||||
|
||||
memo = '{} {} - {}, email: {}, note: {}'.format(
|
||||
data.get('first_name', 'unknown'),
|
||||
data.get('last_name', 'unknown'),
|
||||
category,
|
||||
data.get('payer_email', 'unknown'),
|
||||
note,
|
||||
)
|
||||
|
||||
tx = transactions.create(
|
||||
**build_tx(data),
|
||||
category=category,
|
||||
memo=memo,
|
||||
user=user,
|
||||
protocoin=protocoin,
|
||||
)
|
||||
|
||||
utils.log_transaction(tx)
|
||||
return tx
|
||||
|
||||
|
||||
def process_paypal_ipn(data):
|
||||
'''
|
||||
Receive IPN from PayPal, then verify it. If it's good, try to associate it
|
||||
with a member. If the value is a multiple of member dues, credit that many
|
||||
months of membership. Ignore if payment incomplete or duplicate IPN.
|
||||
|
||||
Blocks the IPN POST response, so keep it quick.
|
||||
'''
|
||||
ipn = record_ipn(data)
|
||||
|
||||
if verify_paypal_ipn(data):
|
||||
logger.info('IPN - verified')
|
||||
else:
|
||||
logger.error('IPN - verification failed')
|
||||
update_ipn(ipn, 'Verification Failed')
|
||||
return False
|
||||
|
||||
amount = float(data.get('mc_gross', '0'))
|
||||
|
||||
if data.get('payment_status', 'unknown') != 'Completed':
|
||||
logger.info('IPN - Payment not yet completed, ignoring')
|
||||
update_ipn(ipn, 'Payment Incomplete')
|
||||
return False
|
||||
|
||||
if data.get('receiver_email', 'unknown') != OUR_EMAIL:
|
||||
logger.info('IPN - Payment not for us, ignoring')
|
||||
update_ipn(ipn, 'Invalid Receiver')
|
||||
return False
|
||||
|
||||
if data.get('mc_currency', 'unknown') != OUR_CURRENCY:
|
||||
logger.info('IPN - Payment currency invalid, ignoring')
|
||||
update_ipn(ipn, 'Invalid Currency')
|
||||
return False
|
||||
|
||||
transactions = models.Transaction.objects
|
||||
members = models.Member.objects
|
||||
hints = models.PayPalHint.objects
|
||||
|
||||
if 'txn_id' not in data:
|
||||
logger.info('IPN - Missing transaction ID, ignoring')
|
||||
update_ipn(ipn, 'Missing ID')
|
||||
return False
|
||||
|
||||
# TODO: index txn_id?
|
||||
if transactions.filter(paypal_txn_id=data['txn_id']).exists():
|
||||
logger.info('IPN - Duplicate transaction, ignoring')
|
||||
update_ipn(ipn, 'Duplicate')
|
||||
return False
|
||||
|
||||
try:
|
||||
custom_json = json.loads(data.get('custom', '').replace('`', '"'))
|
||||
except (KeyError, ValueError):
|
||||
custom_json = {}
|
||||
|
||||
if 'training' in custom_json:
|
||||
tx = check_training(data, custom_json['training'], amount)
|
||||
if tx:
|
||||
logger.info('IPN - Training matched, adding hint and returning')
|
||||
update_ipn(ipn, 'Accepted, training')
|
||||
hints.update_or_create(
|
||||
account=data.get('payer_id', 'unknown'),
|
||||
defaults=dict(user=tx.user),
|
||||
)
|
||||
return tx
|
||||
|
||||
user = False
|
||||
|
||||
try:
|
||||
user = hints.get(account=data['payer_id']).user
|
||||
except models.PayPalHint.DoesNotExist:
|
||||
logger.info('IPN - No PayPalHint found for %s', data['payer_id'])
|
||||
|
||||
if not user and 'member' in custom_json:
|
||||
member_id = custom_json['member']
|
||||
try:
|
||||
user = members.get(id=member_id).user
|
||||
except models.Member.DoesNotExist:
|
||||
pass
|
||||
|
||||
if not user:
|
||||
logger.info('IPN - Unable to associate with member, reporting')
|
||||
update_ipn(ipn, 'Accepted, Unmatched Member')
|
||||
return create_unmatched_member_tx(data)
|
||||
|
||||
member = user.member
|
||||
|
||||
hints.update_or_create(
|
||||
account=data.get('payer_id', 'unknown'),
|
||||
defaults=dict(user=user),
|
||||
)
|
||||
|
||||
if custom_json.get('category', False) in ['Snacks', 'OnAcct', 'Donation', 'Consumables', 'Purchases', 'Exchange']:
|
||||
logger.info('IPN - Category matched')
|
||||
update_ipn(ipn, 'Accepted, category')
|
||||
return create_category_tx(data, member, custom_json, amount)
|
||||
|
||||
monthly_fees = member.monthly_fees
|
||||
|
||||
if amount.is_integer() and monthly_fees and amount % monthly_fees == 0:
|
||||
num_months = int(amount // monthly_fees)
|
||||
else:
|
||||
num_months = 0
|
||||
|
||||
if num_months:
|
||||
logger.info('IPN - Amount valid for membership dues, adding months')
|
||||
update_ipn(ipn, 'Accepted, Member Dues')
|
||||
deal = custom_json.get('deal', False)
|
||||
return create_member_dues_tx(data, member, num_months, deal)
|
||||
|
||||
logger.info('IPN - Unable to find a reason for payment, reporting')
|
||||
update_ipn(ipn, 'Accepted, Unmatched Purchase')
|
||||
return create_unmatched_purchase_tx(data, member)
|
210
apiserver/apiserver/api/utils_stats.py
Normal file
210
apiserver/apiserver/api/utils_stats.py
Normal file
|
@ -0,0 +1,210 @@
|
|||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import time
|
||||
from datetime import date, datetime, timedelta
|
||||
import requests
|
||||
from django.db.models import Prefetch
|
||||
from django.core.cache import cache
|
||||
from django.utils.timezone import now, pytz
|
||||
from apiserver.api import models, utils
|
||||
from apiserver import secrets
|
||||
|
||||
DEFAULTS = {
|
||||
'last_card_change': time.time(),
|
||||
'next_meeting': None,
|
||||
'next_clean': None,
|
||||
'next_class': None,
|
||||
'prev_class': None,
|
||||
'member_count': None,
|
||||
'paused_count': None,
|
||||
'green_count': None,
|
||||
'bay_108_temp': None,
|
||||
'bay_110_temp': None,
|
||||
'minecraft_players': [],
|
||||
'card_scans': 0,
|
||||
'track': {},
|
||||
'alarm': {},
|
||||
'sign': '',
|
||||
'link': '',
|
||||
'autoscan': '',
|
||||
'last_scan': {},
|
||||
'closing': {},
|
||||
'printer3d': {},
|
||||
}
|
||||
|
||||
if secrets.MUMBLE:
|
||||
DEFAULTS['mumble_users'] = []
|
||||
|
||||
def changed_card():
|
||||
'''
|
||||
Called whenever the card list could change, ie. cards added, modified, or
|
||||
user status becoming overdue by 3 months
|
||||
'''
|
||||
cache.set('last_card_change', time.time())
|
||||
|
||||
def calc_next_events():
|
||||
sessions = models.Session.objects
|
||||
|
||||
# TODO, go by tag?
|
||||
member_meeting = sessions.filter(is_cancelled=False, course__in=[317, 413], datetime__gte=now()).first()
|
||||
monthly_clean = sessions.filter(is_cancelled=False, course=273, datetime__gte=now()).first()
|
||||
next_class = sessions.exclude(course__in=[317, 413, 273]).filter(is_cancelled=False, datetime__gte=now()).order_by('datetime').first()
|
||||
prev_class = sessions.exclude(course__in=[317, 413, 273]).filter(is_cancelled=False, datetime__lte=now()).order_by('datetime').last()
|
||||
|
||||
if member_meeting:
|
||||
cache.set('next_meeting', member_meeting.datetime)
|
||||
else:
|
||||
cache.set('next_meeting', None)
|
||||
|
||||
if monthly_clean:
|
||||
cache.set('next_clean', monthly_clean.datetime)
|
||||
else:
|
||||
cache.set('next_clean', None)
|
||||
|
||||
if next_class:
|
||||
cache.set('next_class', dict(datetime=next_class.datetime, id=next_class.id, name=next_class.course.name))
|
||||
else:
|
||||
cache.set('next_class', None)
|
||||
|
||||
if prev_class:
|
||||
cache.set('prev_class', dict(datetime=prev_class.datetime, id=prev_class.id, name=prev_class.course.name))
|
||||
else:
|
||||
cache.set('prev_class', None)
|
||||
|
||||
|
||||
def calc_member_counts():
|
||||
members = models.Member.objects
|
||||
not_paused = members.filter(paused_date__isnull=True)
|
||||
|
||||
num_current = not_paused.filter(status='Current').count()
|
||||
num_prepaid = not_paused.filter(status='Prepaid').count()
|
||||
num_due = not_paused.filter(status='Due').count()
|
||||
num_overdue = not_paused.filter(status='Overdue').count()
|
||||
|
||||
member_count = num_current + num_prepaid + num_due + num_overdue
|
||||
paused_count = members.count() - member_count
|
||||
green_count = num_current + num_prepaid
|
||||
|
||||
six_months_ago = utils.today_alberta_tz() - timedelta(days=183)
|
||||
six_month_plus_count = not_paused.filter(application_date__lte=six_months_ago).count()
|
||||
|
||||
vetted_count = not_paused.filter(vetted_date__isnull=False).count()
|
||||
|
||||
related_membership_tx = Prefetch(
|
||||
'user__transactions',
|
||||
queryset=models.Transaction.objects.exclude(
|
||||
number_of_membership_months=0,
|
||||
).exclude(
|
||||
number_of_membership_months__isnull=True,
|
||||
),
|
||||
)
|
||||
|
||||
subscriber_count = 0
|
||||
for member in not_paused.prefetch_related(related_membership_tx):
|
||||
if not member.user.transactions.count():
|
||||
continue
|
||||
if member.user.transactions.latest('date').paypal_txn_type == 'subscr_payment':
|
||||
subscriber_count += 1
|
||||
|
||||
cache.set('member_count', member_count)
|
||||
cache.set('paused_count', paused_count)
|
||||
cache.set('green_count', green_count)
|
||||
|
||||
return dict(
|
||||
member_count=member_count,
|
||||
green_count=green_count,
|
||||
six_month_plus_count=six_month_plus_count,
|
||||
vetted_count=vetted_count,
|
||||
subscriber_count=subscriber_count,
|
||||
)
|
||||
|
||||
def calc_signup_counts():
|
||||
month_beginning = utils.today_alberta_tz().replace(day=1)
|
||||
|
||||
members = models.Member.objects
|
||||
new_members = members.filter(application_date__gte=month_beginning)
|
||||
num_new_members = new_members.count()
|
||||
|
||||
return num_new_members
|
||||
|
||||
def calc_retain_counts():
|
||||
signup_counts = models.StatsSignupCount.objects.all()
|
||||
|
||||
all_members = models.Member.objects
|
||||
active_members = all_members.filter(paused_date__isnull=True)
|
||||
vetted_members = all_members.filter(vetted_date__isnull=False)
|
||||
|
||||
for entry in signup_counts:
|
||||
date = entry.month
|
||||
active_new_members = active_members.filter(
|
||||
application_date__month=date.month, application_date__year=date.year
|
||||
)
|
||||
vetted_new_members = vetted_members.filter(
|
||||
application_date__month=date.month, application_date__year=date.year
|
||||
)
|
||||
|
||||
entry.retain_count = active_new_members.count()
|
||||
entry.vetted_count = vetted_new_members.count()
|
||||
entry.save()
|
||||
|
||||
return active_members.count()
|
||||
|
||||
def check_minecraft_server():
|
||||
if secrets.MINECRAFT:
|
||||
url = 'https://api.minetools.eu/ping/' + secrets.MINECRAFT
|
||||
|
||||
try:
|
||||
r = requests.get(url, timeout=5)
|
||||
r.raise_for_status()
|
||||
players = [x['name'] for x in r.json()['players']['sample']]
|
||||
cache.set('minecraft_players', players)
|
||||
return players
|
||||
except BaseException as e:
|
||||
logger.error('Problem checking Minecraft: {} - {}'.format(e.__class__.__name__, str(e)))
|
||||
|
||||
return []
|
||||
|
||||
def check_mumble_server():
|
||||
if secrets.MUMBLE:
|
||||
url = secrets.MUMBLE
|
||||
|
||||
try:
|
||||
r = requests.get(url, timeout=5)
|
||||
r.raise_for_status()
|
||||
users = r.text.split()
|
||||
cache.set('mumble_users', users)
|
||||
return users
|
||||
except BaseException as e:
|
||||
logger.error('Problem checking Mumble: {} - {}'.format(e.__class__.__name__, str(e)))
|
||||
|
||||
return []
|
||||
|
||||
def calc_card_scans():
|
||||
date = utils.today_alberta_tz()
|
||||
dt = datetime.combine(date, datetime.min.time())
|
||||
midnight = utils.TIMEZONE_CALGARY.localize(dt)
|
||||
|
||||
cards = models.Card.objects
|
||||
count = cards.filter(last_seen__gte=midnight).count()
|
||||
|
||||
cache.set('card_scans', count)
|
||||
|
||||
models.StatsSpaceActivity.objects.update_or_create(
|
||||
date=date,
|
||||
defaults=dict(card_scans=count),
|
||||
)
|
||||
|
||||
def get_progress(request_id):
|
||||
return cache.get('request-progress-' + request_id, [])
|
||||
|
||||
def set_progress(request_id, data, replace=False):
|
||||
logger.info('Progress - ID: %s | Status: %s', request_id, data)
|
||||
progress = get_progress(request_id)
|
||||
|
||||
if replace and len(progress):
|
||||
progress[-1] = data
|
||||
else:
|
||||
progress.append(data)
|
||||
|
||||
cache.set('request-progress-' + request_id, progress)
|
File diff suppressed because it is too large
Load Diff
17
apiserver/apiserver/filters.py
Normal file
17
apiserver/apiserver/filters.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
import logging
|
||||
|
||||
class IgnoreStats(logging.Filter):
|
||||
def filter(self, record):
|
||||
if 'GET /stats/' in record.msg:
|
||||
return False
|
||||
elif 'POST /stats/' in record.msg:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
class IgnoreLockout(logging.Filter):
|
||||
def filter(self, record):
|
||||
if 'GET /lockout/' in record.msg:
|
||||
return False
|
||||
else:
|
||||
return True
|
123
apiserver/apiserver/secrets.py.example
Normal file
123
apiserver/apiserver/secrets.py.example
Normal file
|
@ -0,0 +1,123 @@
|
|||
# Spaceport secrets file, don't commit to version control!
|
||||
#
|
||||
# Note: all values are optional, features are excluded if left blank
|
||||
|
||||
# /admin/ route obfuscation
|
||||
# Set this to random characters
|
||||
# For example, use the output of this:
|
||||
# head /dev/urandom | base32 | head -c 16
|
||||
ADMIN_RANDOM = ''
|
||||
|
||||
# /ipn/ route obfuscation
|
||||
# Set this to random characters
|
||||
# For example, use the output of this:
|
||||
# head /dev/urandom | base32 | head -c 16
|
||||
IPN_RANDOM = ''
|
||||
|
||||
# Django secret key
|
||||
# Set this to random characters
|
||||
# For example, use the output of this:
|
||||
# head /dev/urandom | base64 | head -c 50
|
||||
DJANGO_SECRET_KEY = ''
|
||||
# Warning
|
||||
# Keep this value secret.
|
||||
# Running Django with a known SECRET_KEY defeats many of Django’s security
|
||||
# protections, and can lead to privilege escalation and remote code execution
|
||||
# vulnerabilities.
|
||||
|
||||
# LDAP API url
|
||||
# should contain the IP and port of the script and machine connected over VPN
|
||||
# with trailing slash
|
||||
LDAP_API_URL = ''
|
||||
|
||||
# LDAP API key
|
||||
# should be equal to the auth token value set in
|
||||
# spaceport/ldapserver/secrets.py
|
||||
LDAP_API_KEY = ''
|
||||
|
||||
# Wiki Auth API url
|
||||
# should contain the IP and port of the script and machine connected over VPN
|
||||
# with trailing slash
|
||||
WIKI_AUTH_API_URL = ''
|
||||
|
||||
# Discourse Auth API url
|
||||
# should contain the IP and port of the script and machine connected over VPN
|
||||
# with trailing slash
|
||||
DISCOURSE_AUTH_API_URL = ''
|
||||
|
||||
# Auth API key
|
||||
# should be equal to the auth token value set in
|
||||
# spaceport/authserver/secrets.py
|
||||
AUTH_API_KEY = ''
|
||||
|
||||
# Door cards API token
|
||||
# Set this to random characters
|
||||
# For example, use the output of this:
|
||||
# head /dev/urandom | base32 | head -c 40
|
||||
DOOR_API_TOKEN = ''
|
||||
|
||||
# Vending machine cards API token
|
||||
# Set this to random characters
|
||||
# For example, use the output of this:
|
||||
# head /dev/urandom | base32 | head -c 40
|
||||
VEND_API_TOKEN = ''
|
||||
|
||||
# Printer report API token
|
||||
# Set this to random characters
|
||||
# For example, use the output of this:
|
||||
# head /dev/urandom | base32 | head -c 40
|
||||
PRINTER_API_TOKEN = ''
|
||||
|
||||
# Pinball machine score API token
|
||||
# Set this to random characters
|
||||
# For example, use the output of this:
|
||||
# head /dev/urandom | base32 | head -c 40
|
||||
PINBALL_API_TOKEN = ''
|
||||
|
||||
# Alarm integration API token
|
||||
# Set this to random characters
|
||||
# For example, use the output of this:
|
||||
# head /dev/urandom | base32 | head -c 40
|
||||
ALARM_API_TOKEN = ''
|
||||
|
||||
# Spaceporter bot webhook URL token
|
||||
# Posts to the "find a host" chat
|
||||
# Found in /admin/plugins/chat URL section
|
||||
# should look like '37f5e8d9c64064c86109b7ea'
|
||||
SPACEPORTER_HOST_TOKEN = ''
|
||||
|
||||
# Protospace general info
|
||||
DOOR_CODE = ''
|
||||
WIFI_PASS = ''
|
||||
MINECRAFT = ''
|
||||
MUMBLE = ''
|
||||
|
||||
# Portal Email Credentials
|
||||
# For sending password resets, etc.
|
||||
EMAIL_HOST = ''
|
||||
EMAIL_USER = ''
|
||||
EMAIL_PASS = ''
|
||||
|
||||
|
||||
# Backup API tokens
|
||||
# These tokens allow each user to download a backup of member data.
|
||||
# Don't mess up the data structure!
|
||||
# Tokens must be random and unique, use the output of:
|
||||
# head /dev/urandom | base32 | head -c 40
|
||||
BACKUP_TOKENS = {
|
||||
'<token>': {
|
||||
'name': 'firstname.lastname',
|
||||
'backup_id': '<token>',
|
||||
'cache_key': '<token>',
|
||||
},
|
||||
'<token>': {
|
||||
'name': 'firstname.lastname',
|
||||
'backup_id': '<token>',
|
||||
'cache_key': '<token>',
|
||||
},
|
||||
'<token>': { # reset the canaries for data-at-rest
|
||||
'name': 'null',
|
||||
'backup_id': '<token>',
|
||||
'cache_key': '<token>',
|
||||
},
|
||||
}
|
|
@ -11,7 +11,11 @@ https://docs.djangoproject.com/en/3.0/ref/settings/
|
|||
"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
import logging.config
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
from . import secrets
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
@ -21,24 +25,39 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|||
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = 'tm2h!9@=+cqy#n^&2en9(dhyfc@n--9*$s*#b9&%rdai)jrj&f'
|
||||
SECRET_KEY = secrets.DJANGO_SECRET_KEY or 'OaOBN2E+brpoRyDMlTD9eTE5PgBtkkl+L7Bzt6pQ5Qr3GS82SH'
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG_ENV = os.environ.get('DEBUG', False)
|
||||
BINDALL_ENV = os.environ.get('BINDALL', False)
|
||||
DEBUG = DEBUG_ENV or False
|
||||
if DEBUG: print('Debug mode ON')
|
||||
|
||||
|
||||
PRODUCTION_HOST = 'my.protospace.ca'
|
||||
|
||||
|
||||
# production hosts
|
||||
ALLOWED_HOSTS = []
|
||||
ALLOWED_HOSTS = [
|
||||
'api.' + PRODUCTION_HOST,
|
||||
]
|
||||
|
||||
if DEBUG:
|
||||
ALLOWED_HOSTS += [
|
||||
'localhost',
|
||||
'127.0.0.1',
|
||||
'spaceport-api.dns.t0.vc',
|
||||
'api.spaceport.dns.t0.vc',
|
||||
]
|
||||
|
||||
if BINDALL_ENV:
|
||||
ALLOWED_HOSTS = ['*']
|
||||
SESSION_COOKIE_SECURE = False
|
||||
CSRF_COOKIE_SECURE = False
|
||||
else:
|
||||
SESSION_COOKIE_SECURE = True
|
||||
CSRF_COOKIE_SECURE = True
|
||||
|
||||
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
||||
SECURE_REFERRER_POLICY = 'same-origin'
|
||||
|
||||
# Application definition
|
||||
|
||||
|
@ -50,6 +69,7 @@ INSTALLED_APPS = [
|
|||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
'django.contrib.sites',
|
||||
'django_extensions',
|
||||
'rest_framework',
|
||||
'rest_framework.authtoken',
|
||||
'apiserver.api',
|
||||
|
@ -58,6 +78,7 @@ INSTALLED_APPS = [
|
|||
'allauth.account',
|
||||
'allauth.socialaccount', # to support user deletion
|
||||
'rest_auth.registration',
|
||||
'simple_history',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
|
@ -68,8 +89,18 @@ MIDDLEWARE = [
|
|||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
'simple_history.middleware.HistoryRequestMiddleware',
|
||||
]
|
||||
|
||||
if BINDALL_ENV:
|
||||
INSTALLED_APPS += [
|
||||
'corsheaders',
|
||||
]
|
||||
MIDDLEWARE += [
|
||||
'corsheaders.middleware.CorsMiddleware',
|
||||
]
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
|
||||
ROOT_URLCONF = 'apiserver.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
|
@ -98,10 +129,17 @@ DATABASES = {
|
|||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.path.join(BASE_DIR, 'data/db.sqlite3'),
|
||||
'OPTIONS': {
|
||||
'timeout': 20, # increased because generate_backups.py blocks
|
||||
},
|
||||
},
|
||||
'old_portal': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.path.join(BASE_DIR, 'old_portal.sqlite3'),
|
||||
}
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
|
||||
'LOCATION': '127.0.0.1:11211',
|
||||
'TIMEOUT': None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -150,7 +188,14 @@ USE_TZ = True
|
|||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/3.0/howto/static-files/
|
||||
|
||||
STATIC_URL = '/static/'
|
||||
if DEBUG:
|
||||
STATIC_URL = 'devstatic/'
|
||||
MEDIA_URL = 'static/'
|
||||
MEDIA_ROOT = os.path.join(BASE_DIR, 'data/static')
|
||||
else:
|
||||
STATIC_URL = 'https://static.{}/'.format(PRODUCTION_HOST)
|
||||
STATIC_ROOT = os.path.join(BASE_DIR, 'data/static')
|
||||
|
||||
|
||||
DEFAULT_RENDERER_CLASSES = (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
|
@ -172,49 +217,61 @@ if DEBUG:
|
|||
|
||||
REST_FRAMEWORK = {
|
||||
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
|
||||
'PAGE_SIZE': 100,
|
||||
'PAGE_SIZE': 500,
|
||||
'DEFAULT_RENDERER_CLASSES': DEFAULT_RENDERER_CLASSES,
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': DEFAULT_AUTHENTICATION_CLASSES,
|
||||
'DEFAULT_THROTTLE_CLASSES': ['apiserver.api.throttles.LoggingThrottle'],
|
||||
'EXCEPTION_HANDLER': 'apiserver.api.utils.custom_exception_handler'
|
||||
}
|
||||
|
||||
#DEFAULT_LOGGING = None
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'formatters': {
|
||||
'verbose': {
|
||||
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
|
||||
},
|
||||
'medium': {
|
||||
'format': '[%(asctime)s] [%(levelname)s] %(message)s'
|
||||
'format': '[%(asctime)s] [%(process)d] [%(levelname)7s] %(message)s'
|
||||
},
|
||||
'simple': {
|
||||
'format': '%(levelname)s %(message)s'
|
||||
},
|
||||
'filters': {
|
||||
'ignore_stats': {
|
||||
'()': 'apiserver.filters.IgnoreStats',
|
||||
},
|
||||
'ignore_lockout': {
|
||||
'()': 'apiserver.filters.IgnoreLockout',
|
||||
},
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'level': 'INFO',
|
||||
'level': 'DEBUG',
|
||||
'filters': ['ignore_stats', 'ignore_lockout'],
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'medium'
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
#'django.db.backends': {
|
||||
# 'handlers': ['console'],
|
||||
# 'level': 'DEBUG',
|
||||
# 'propagate': False,
|
||||
# },
|
||||
'gunicorn': {
|
||||
'handlers': ['console'],
|
||||
'level': 'ERROR',
|
||||
'propagate': True,
|
||||
},
|
||||
'django': {
|
||||
'handlers': ['console'],
|
||||
'level': 'INFO',
|
||||
'propagate': True,
|
||||
'level': 'DEBUG' if DEBUG else 'INFO',
|
||||
'propagate': False,
|
||||
},
|
||||
'': {
|
||||
'handlers': ['console'],
|
||||
'level': 'INFO',
|
||||
'level': 'DEBUG',
|
||||
'propagate': True,
|
||||
},
|
||||
}
|
||||
},
|
||||
'root': {
|
||||
'level': 'DEBUG' if DEBUG else 'INFO',
|
||||
'handlers': ['console'],
|
||||
},
|
||||
}
|
||||
logging.config.dictConfig(LOGGING)
|
||||
|
||||
SITE_ID = 1
|
||||
ACCOUNT_EMAIL_REQUIRED = True
|
||||
|
@ -223,3 +280,28 @@ ACCOUNT_USERNAME_MIN_LENGTH = 3
|
|||
ACCOUNT_AUTHENTICATION_METHOD = 'username'
|
||||
OLD_PASSWORD_FIELD_ENABLED = True
|
||||
LOGOUT_ON_PASSWORD_CHANGE = False
|
||||
ACCOUNT_PRESERVE_USERNAME_CASING = False
|
||||
|
||||
if not secrets.EMAIL_USER or not secrets.EMAIL_PASS:
|
||||
logger.info('Logging outgoing emails to console')
|
||||
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
||||
else:
|
||||
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
||||
|
||||
EMAIL_HOST = secrets.EMAIL_HOST
|
||||
EMAIL_PORT = '587'
|
||||
EMAIL_HOST_USER = secrets.EMAIL_USER
|
||||
EMAIL_HOST_PASSWORD = secrets.EMAIL_PASS
|
||||
EMAIL_USE_TLS = True
|
||||
EMAIL_USE_SSL = False
|
||||
DEFAULT_FROM_EMAIL = 'Protospace Portal <portal@mg.protospace.ca>'
|
||||
|
||||
if DEBUG: logger.info('Debug mode ON')
|
||||
logger.info('Test logging for each thread')
|
||||
|
||||
APP_VERSION = 6 # TODO: automate this
|
||||
|
||||
SHELL_PLUS = 'ipython'
|
||||
|
||||
#import logging_tree
|
||||
#logging_tree.printout()
|
||||
|
|
|
@ -1,27 +1,66 @@
|
|||
from django.conf.urls import url
|
||||
from django.conf.urls.static import static
|
||||
from django.contrib import admin
|
||||
from django.urls import include, path
|
||||
from rest_framework import routers
|
||||
from rest_auth.views import LoginView, LogoutView
|
||||
|
||||
from .api import views
|
||||
from . import secrets, settings
|
||||
|
||||
router = routers.DefaultRouter()
|
||||
#router.register(r'users', views.UserViewSet)
|
||||
router.register(r'door', views.DoorViewSet, basename='door')
|
||||
router.register(r'lockout', views.LockoutViewSet, basename='lockout')
|
||||
router.register(r'cards', views.CardViewSet, basename='card')
|
||||
router.register(r'stats', views.StatsViewSet, basename='stats')
|
||||
router.register(r'usage', views.UsageViewSet, basename='usage')
|
||||
router.register(r'search', views.SearchViewSet, basename='search')
|
||||
router.register(r'members', views.MemberViewSet, basename='members')
|
||||
router.register(r'courses', views.CourseViewSet, basename='course')
|
||||
router.register(r'history', views.HistoryViewSet, basename='history')
|
||||
router.register(r'vetting', views.VettingViewSet, basename='vetting')
|
||||
router.register(r'sponsorship', views.SponsorshipViewSet, basename='sponsorship')
|
||||
router.register(r'pinball', views.PinballViewSet, basename='pinball')
|
||||
router.register(r'storage', views.StorageSpaceViewSet, basename='storage')
|
||||
router.register(r'hosting', views.HostingViewSet, basename='hosting')
|
||||
router.register(r'sessions', views.SessionViewSet, basename='session')
|
||||
router.register(r'training', views.TrainingViewSet, basename='training')
|
||||
router.register(r'interest', views.InterestViewSet, basename='interest')
|
||||
router.register(r'protocoin', views.ProtocoinViewSet, basename='protocoin')
|
||||
router.register(r'transactions', views.TransactionViewSet, basename='transaction')
|
||||
router.register(r'charts/membercount', views.MemberCountViewSet, basename='membercount')
|
||||
router.register(r'charts/signupcount', views.SignupCountViewSet, basename='signupcount')
|
||||
router.register(r'charts/spaceactivity', views.SpaceActivityViewSet, basename='spaceactivity')
|
||||
#router.register(r'me', views.FullMemberView, basename='fullmember')
|
||||
#router.register(r'registration', views.RegistrationViewSet, basename='register')
|
||||
|
||||
urlpatterns = [
|
||||
path('', include(router.urls)),
|
||||
path('admin/', admin.site.urls),
|
||||
path('api-auth/', include('rest_framework.urls')),
|
||||
url(r'^rest-auth/', include('rest_auth.urls')),
|
||||
url(r'^registration/', views.RegistrationView.as_view(), name='rest_name_register'),
|
||||
url(r'^rest-auth/login/$', views.MyLoginView.as_view(), name='rest_login'),
|
||||
url(r'^spaceport-auth/login/$', views.SpaceportAuthView.as_view(), name='spaceport_auth'),
|
||||
url(r'^rest-auth/logout/$', LogoutView.as_view(), name='rest_logout'),
|
||||
url(r'^password/reset/$', views.PasswordResetView.as_view(), name='rest_password_reset'),
|
||||
url(r'^password/reset/confirm/$', views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'),
|
||||
url(r'^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,32})/$', views.null_view, name='password_reset_confirm'),
|
||||
url(r'^password/change/', views.PasswordChangeView.as_view(), name='rest_password_change'),
|
||||
url(r'^registration/', views.RegistrationView.as_view(), name='rest_name_register'),
|
||||
url(r'^user/', views.UserView.as_view(), name='user'),
|
||||
url(r'^ping/', views.PingView.as_view(), name='ping'),
|
||||
url(r'^paste/', views.PasteView.as_view(), name='paste'),
|
||||
url(r'^backup/', views.BackupView.as_view(), name='backup'),
|
||||
]
|
||||
|
||||
if secrets.IPN_RANDOM:
|
||||
IPN_ROUTE = r'^ipn/{}/'.format(secrets.IPN_RANDOM)
|
||||
urlpatterns.append(url(IPN_ROUTE, views.IpnView.as_view(), name='ipn'))
|
||||
|
||||
if secrets.ADMIN_RANDOM:
|
||||
ADMIN_ROUTE = '{}/admin/'.format(secrets.ADMIN_RANDOM)
|
||||
else:
|
||||
ADMIN_ROUTE = 'admin/'
|
||||
urlpatterns.append(path(ADMIN_ROUTE, admin.site.urls))
|
||||
|
||||
if settings.DEBUG:
|
||||
urlpatterns += [
|
||||
path('api-auth/', include('rest_framework.urls')),
|
||||
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
1
apiserver/backups/index.html
Normal file
1
apiserver/backups/index.html
Normal file
|
@ -0,0 +1 @@
|
|||
<i>DO YOU HAVE A COMRADE?</i>
|
1
apiserver/data/static/index.html
Normal file
1
apiserver/data/static/index.html
Normal file
|
@ -0,0 +1 @@
|
|||
<i>SEE YOU SPACE COWBOY...</i>
|
20
apiserver/docs/Makefile
Normal file
20
apiserver/docs/Makefile
Normal file
|
@ -0,0 +1,20 @@
|
|||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = source
|
||||
BUILDDIR = build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
35
apiserver/docs/make.bat
Normal file
35
apiserver/docs/make.bat
Normal file
|
@ -0,0 +1,35 @@
|
|||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=source
|
||||
set BUILDDIR=build
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
BIN
apiserver/docs/source/_static/favicon.ico
Normal file
BIN
apiserver/docs/source/_static/favicon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 766 B |
50
apiserver/docs/source/api.md
Normal file
50
apiserver/docs/source/api.md
Normal file
|
@ -0,0 +1,50 @@
|
|||
# Spaceport API
|
||||
|
||||
The current API URL is: [https://api.my.protospace.ca/](https://api.my.protospace.ca/).
|
||||
|
||||
JSON is returned by all API responses including errors and HTTP response status
|
||||
codes are to designate success and failure.
|
||||
|
||||
Request bodies can be JSON or form data.
|
||||
|
||||
All API routes require a trailing slash. This is a Django default and you'll get
|
||||
a 301 redirect if you forget it.
|
||||
|
||||
## Authentication
|
||||
|
||||
Most API routes require authentication with a token. The token is returned on
|
||||
registration and login. The token needs to be placed in the `Authorization`
|
||||
request header like this: `Token <token>`.
|
||||
|
||||
**Example**
|
||||
|
||||
Login request:
|
||||
|
||||
```
|
||||
$ curl -d 'username=tanner.collin' -d 'password=supersecret' 'https://my.protospace.ca/rest-auth/login/'
|
||||
```
|
||||
|
||||
Login response:
|
||||
|
||||
```
|
||||
{"key":"1fb8ef73f118c5de1f9ba4939a76b3f3b0bc7444"}
|
||||
```
|
||||
|
||||
Add the following header to requests:
|
||||
|
||||
```
|
||||
Authorization: Token 1fb8ef73f118c5de1f9ba4939a76b3f3b0bc7444
|
||||
```
|
||||
|
||||
/user/ request:
|
||||
|
||||
```
|
||||
curl -H 'Authorization: Token 1fb8ef73f118c5de1f9ba4939a76b3f3b0bc7444' 'https://my.protospace.ca/user/'
|
||||
```
|
||||
|
||||
## API Routes
|
||||
|
||||
API routes are not documented. They used to be but the utility for how much
|
||||
effort it took was not worth it.
|
||||
|
||||
Use your browser's network inspector to learn how the API works.
|
63
apiserver/docs/source/conf.py
Normal file
63
apiserver/docs/source/conf.py
Normal file
|
@ -0,0 +1,63 @@
|
|||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# This file only contains a selection of the most common options. For a full
|
||||
# list see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Path setup --------------------------------------------------------------
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
# import os
|
||||
# import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = 'Spaceport'
|
||||
copyright = '2020, Tanner Collin'
|
||||
author = 'Tanner Collin'
|
||||
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'myst_parser',
|
||||
]
|
||||
|
||||
source_suffix = ['.rst', '.md']
|
||||
|
||||
source_parsers = {
|
||||
'.md': 'markdown',
|
||||
}
|
||||
|
||||
known_url_schemes = ['http', 'https']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = []
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_favicon = '_static/favicon.ico'
|
||||
collapse_navigation = False
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html4_writer = True
|
67
apiserver/docs/source/dev_apiserver.md
Normal file
67
apiserver/docs/source/dev_apiserver.md
Normal file
|
@ -0,0 +1,67 @@
|
|||
# API Server Development Setup
|
||||
|
||||
This guide assumes you are using [Debian GNU/Linux 11](https://cdimage.debian.org/cdimage/unofficial/non-free/images-including-firmware/archive/11.2.0+nonfree/amd64/iso-cd/firmware-11.2.0-amd64-netinst.iso) or [Ubuntu 20.04 LTS](https://releases.ubuntu.com/20.04/). If you
|
||||
aren't, just spin up a VM with the correct version. Things break if you don't.
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
```
|
||||
$ sudo apt update
|
||||
$ sudo apt install build-essential python3 python3-dev libffi-dev python3-pip python3-virtualenv memcached git
|
||||
```
|
||||
|
||||
Clone the repo. Skip this step if you already have it:
|
||||
|
||||
```
|
||||
$ git clone https://github.com/Protospace/spaceport.git
|
||||
```
|
||||
|
||||
Set up Python:
|
||||
|
||||
```
|
||||
$ cd spaceport/apiserver/
|
||||
$ python3 -m virtualenv -p python3 env
|
||||
$ source env/bin/activate
|
||||
(env) $ pip install -r requirements.txt
|
||||
```
|
||||
|
||||
You need to make sure the Python virtual environment `(env)` is enabled whenever
|
||||
you run the API server.
|
||||
|
||||
Copy the secrets file and optionally fill out values depending on which
|
||||
[[integrations]] you wish to enable. It runs fine by default.
|
||||
|
||||
```
|
||||
(env) $ cp apiserver/secrets.py.example apiserver/secrets.py
|
||||
(env) $ sensible-editor apiserver/secrets.py # optional
|
||||
```
|
||||
|
||||
## Initialize Database
|
||||
|
||||
Set up the database:
|
||||
|
||||
```
|
||||
(env) $ python manage.py makemigrations
|
||||
(env) $ python manage.py makemigrations api
|
||||
(env) $ python manage.py migrate
|
||||
```
|
||||
|
||||
Create a super user so you can manage who's a director or staff. This is a special
|
||||
account and is not treated as a member.
|
||||
|
||||
```
|
||||
(env) $ python manage.py createsuperuser --email admin@example.com --username admin
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
Run the development server:
|
||||
|
||||
```
|
||||
$ source env/bin/activate
|
||||
(env) $ DEBUG=true BINDALL=true python manage.py runserver 0.0.0.0:8000
|
||||
```
|
||||
|
||||
The development server is now listening on port 8000. You can connect to it by
|
||||
opening `http://<ip address>:8000/` in your web browser. If it's running
|
||||
locally, that would be [http://127.0.0.1:8000/](http://127.0.0.1:8000/).
|
55
apiserver/docs/source/dev_running.md
Normal file
55
apiserver/docs/source/dev_running.md
Normal file
|
@ -0,0 +1,55 @@
|
|||
# Running the Development Setup
|
||||
|
||||
This document explains how to actually use Spaceport after you have the API
|
||||
server and web client set up.
|
||||
|
||||
It assumes the API server is running on [http://127.0.0.1:8000/](http://127.0.0.1:8000/) and the web
|
||||
client is [http://127.0.0.1:3000/](http://127.0.0.1:3000/) you can replace `127.0.0.1` with whatever the
|
||||
IP if your virtual machine or server is.
|
||||
|
||||
## Register the First Member
|
||||
|
||||
Open the web client [http://127.0.0.1:3000/](http://127.0.0.1:3000/) in your browser.
|
||||
|
||||
Fill out the "Sign Up to Spaceport" form. If you see a "Please Visit Protospace"
|
||||
warning, this means the web client can't talk to the API server properly. Hit
|
||||
Sign Up and finish the registration.
|
||||
|
||||
Navigate to [http://127.0.0.1:8000/admin/api/member/1/change/](http://127.0.0.1:8000/admin/api/member/1/change/) and log in with the
|
||||
super user credentials you created during the API server setup.
|
||||
|
||||
Scroll down and check "Is staff" and click today by "Vetted date".
|
||||
|
||||
Scroll to the bottom and click "Save".
|
||||
|
||||
Go back and refresh Spaceport. You should now be Staff, which grants you the
|
||||
same powers as a director. Navigate to [http://127.0.0.1:3000/admin](http://127.0.0.1:3000/admin) to confirm.
|
||||
|
||||
## Running Cron Jobs
|
||||
|
||||
Spaceport runs commands periodically to manage infomation that changes with
|
||||
time and generate the stats. Running them is optional and you can run them
|
||||
manually like so:
|
||||
|
||||
```
|
||||
$ source env/bin/activate
|
||||
(env) $ DEBUG=true python manage.py run_minutely
|
||||
(env) $ DEBUG=true python manage.py run_hourly
|
||||
(env) $ DEBUG=true python manage.py run_daily
|
||||
```
|
||||
|
||||
Or automatically:
|
||||
|
||||
```
|
||||
$ crontab -e
|
||||
```
|
||||
|
||||
Add to the bottom of the file:
|
||||
|
||||
```
|
||||
10 10 * * * /whatever/spaceport/apiserver/env/bin/python /whatever/spaceport/apiserver/manage.py run_daily
|
||||
58 * * * * /whatever/spaceport/apiserver/env/bin/python /whatever/spaceport/apiserver/manage.py run_hourly
|
||||
* * * * * /whatever/spaceport/apiserver/env/bin/python /whatever/spaceport/apiserver/manage.py run_minutely
|
||||
```
|
||||
|
||||
Replace `whatever` with the path to Spaceport.
|
40
apiserver/docs/source/dev_webclient.md
Normal file
40
apiserver/docs/source/dev_webclient.md
Normal file
|
@ -0,0 +1,40 @@
|
|||
# Web Client Development Setup
|
||||
|
||||
This guide assumes you are using [Debian GNU/Linux 11](https://cdimage.debian.org/cdimage/unofficial/non-free/images-including-firmware/archive/11.2.0+nonfree/amd64/iso-cd/firmware-11.2.0-amd64-netinst.iso) or [Ubuntu 20.04 LTS](https://releases.ubuntu.com/20.04/). If you
|
||||
aren't, just spin up a VM with the correct version. Things break if you don't.
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
```
|
||||
$ sudo apt update
|
||||
$ sudo apt install nodejs npm
|
||||
$ sudo npm install --global yarn
|
||||
```
|
||||
|
||||
Clone the repo. Skip this step if you already have it:
|
||||
|
||||
```
|
||||
$ git clone https://github.com/Protospace/spaceport.git
|
||||
```
|
||||
|
||||
Set up nodejs:
|
||||
|
||||
```
|
||||
$ cd spaceport/webclient/
|
||||
$ yarn install
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
Run the development server:
|
||||
|
||||
```
|
||||
$ export NODE_OPTIONS=--openssl-legacy-provider
|
||||
$ HOST=0.0.0.0 yarn start
|
||||
```
|
||||
|
||||
You'll see about 500 warnings which you can safely ignore or help get rid of.
|
||||
|
||||
The development server is now listening on port 3000. You can connect to it by
|
||||
opening `http://<ip address>:3000/` in your web browser. If it's running
|
||||
locally, that would be [http://127.0.0.1:3000/](http://127.0.0.1:3000/).
|
17
apiserver/docs/source/index.rst
Normal file
17
apiserver/docs/source/index.rst
Normal file
|
@ -0,0 +1,17 @@
|
|||
.. Spaceport documentation master file, created by
|
||||
sphinx-quickstart on Tue Jan 21 06:12:17 2020.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Spaceport Documentation
|
||||
=======================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
api
|
||||
ldap
|
||||
dev_apiserver
|
||||
dev_webclient
|
||||
dev_running
|
68
apiserver/docs/source/ldap.rst
Normal file
68
apiserver/docs/source/ldap.rst
Normal file
|
@ -0,0 +1,68 @@
|
|||
LDAP Server Setup
|
||||
=================
|
||||
|
||||
.. contents:: :depth: 3
|
||||
|
||||
This guide assumes you are on a Debian-based distro.
|
||||
|
||||
Install dependencies:
|
||||
|
||||
.. sourcecode:: bash
|
||||
|
||||
$ sudo apt update
|
||||
$ sudo apt install build-essential python3 python3-dev python3-pip python-virtualenv python3-virtualenv supervisor libsasl2-dev libldap2-dev libssl-dev
|
||||
|
||||
Clone the repo:
|
||||
|
||||
.. sourcecode:: bash
|
||||
|
||||
$ git clone https://github.com/Protospace/spaceport.git
|
||||
$ cd spaceport
|
||||
|
||||
Main Script
|
||||
-----------
|
||||
|
||||
Create a venv, activate it, and install:
|
||||
|
||||
.. sourcecode:: bash
|
||||
|
||||
$ cd ldapserver
|
||||
$ virtualenv -p python3 env
|
||||
$ source env/bin/activate
|
||||
(env) $ pip install -r requirements.txt
|
||||
|
||||
Edit ``ldapserver/secrets.py.example`` and save it as ``ldapserver/secrets.py``.
|
||||
|
||||
Securely move the auth token to ``apiserver/secrets.py`` on the server running Spaceport.
|
||||
|
||||
Now you can run the script to test:
|
||||
|
||||
.. sourcecode:: bash
|
||||
|
||||
(env) $ python main.py
|
||||
|
||||
Flask will now be running on port 5000, connect to localhost:5000 to test it.
|
||||
|
||||
Process Management
|
||||
------------------
|
||||
|
||||
The script is kept alive with `supervisor <https://pypi.org/project/supervisor/>`_.
|
||||
|
||||
Configure ``/etc/supervisor/conf.d/ldapserver.conf``:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
[program:ldapserver]
|
||||
user=ldapserver
|
||||
directory=/opt/spaceport/ldapserver
|
||||
command=/opt/spaceport/ldapserver/env/bin/gunicorn -w 2 --bind 0.0.0.0:5000 server:app
|
||||
stopasgroup=true
|
||||
stopsignal=INT
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/var/log/ldapserver.log
|
||||
stderr_logfile_maxbytes=10MB
|
||||
stdout_logfile=/var/log/ldapserver.log
|
||||
stdout_logfile_maxbytes=10MB
|
||||
|
||||
Script logs to ``/var/log/ldapserver.log``.
|
249
apiserver/generate_storage_spaces.py
Executable file
249
apiserver/generate_storage_spaces.py
Executable file
|
@ -0,0 +1,249 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from apiserver.api import models
|
||||
|
||||
SHELVES = [
|
||||
('A1A', 0),
|
||||
('A1B', 0),
|
||||
('A1C', 0),
|
||||
('A1D', 0),
|
||||
('A1E', 0),
|
||||
('A2A', 0),
|
||||
('A2B', 0),
|
||||
('A2C', 0),
|
||||
('A2D', 0),
|
||||
('A2E', 0),
|
||||
('A3A', 0),
|
||||
('A3B', 0),
|
||||
('A3C', 0),
|
||||
('A3D', 0),
|
||||
('A3E', 0),
|
||||
('A4A', 0),
|
||||
('A4B', 0),
|
||||
('A4C', 0),
|
||||
('A4D', 0),
|
||||
('A4E', 0),
|
||||
('A5A', 0),
|
||||
('A5B', 0),
|
||||
('A5C', 0),
|
||||
('A5D', 0),
|
||||
('A5E', 0),
|
||||
('A6A', 0),
|
||||
('A6B', 0),
|
||||
('A6C', 0),
|
||||
('A6D', 0),
|
||||
('A6E', 0),
|
||||
('A7A', 0),
|
||||
('A7B', 0),
|
||||
('A7C', 0),
|
||||
('A7D', 0),
|
||||
('A7E', 0),
|
||||
('B1A', 0),
|
||||
('B1B', 0),
|
||||
('B1C', 0),
|
||||
('B1D', 0),
|
||||
('B1E', 0),
|
||||
('B2A', 0),
|
||||
('B2B', 0),
|
||||
('B2C', 0),
|
||||
('B2D', 0),
|
||||
('B2E', 0),
|
||||
('B3A', 0),
|
||||
('B3B', 0),
|
||||
('B3C', 0),
|
||||
('B3D', 0),
|
||||
('B3E', 0),
|
||||
('B4A', 0),
|
||||
('B4B', 0),
|
||||
('B4C', 0),
|
||||
('B4D', 0),
|
||||
('B4E', 0),
|
||||
('B5A', 0),
|
||||
('B5B', 0),
|
||||
('B5C', 0),
|
||||
('B5D', 0),
|
||||
('B5E', 0),
|
||||
('B6A', 0),
|
||||
('B6B', 0),
|
||||
('B6C', 0),
|
||||
('B6D', 0),
|
||||
('B6E', 0),
|
||||
('C1A', 0),
|
||||
('C1B', 0),
|
||||
('C1C', 0),
|
||||
('C1D', 0),
|
||||
('C1E', 0),
|
||||
('C2A', 0),
|
||||
('C2B', 0),
|
||||
('C2C', 0),
|
||||
('C2D', 0),
|
||||
('C2E', 0),
|
||||
('C3A', 0),
|
||||
('C3B', 0),
|
||||
('C3C', 0),
|
||||
('C3D', 0),
|
||||
('C3E', 0),
|
||||
('C4A', 0),
|
||||
('C4B', 0),
|
||||
('C4C', 0),
|
||||
('C4D', 0),
|
||||
('C4E', 0),
|
||||
('C5A', 0),
|
||||
('C5B', 0),
|
||||
('C5C', 0),
|
||||
('C5D', 0),
|
||||
('C5E', 0),
|
||||
('C6A', 0),
|
||||
('C6B', 0),
|
||||
('C6C', 0),
|
||||
('C6D', 0),
|
||||
('C6E', 0),
|
||||
('C6F', 6),
|
||||
('C6G', 7),
|
||||
('D1A', 0),
|
||||
('D1B', 0),
|
||||
('D1C', 0),
|
||||
('D1D', 0),
|
||||
('D1E', 0),
|
||||
('D2A', 0),
|
||||
('D2B', 0),
|
||||
('D2C', 0),
|
||||
('D2D', 0),
|
||||
('D2E', 0),
|
||||
('D3A', 0),
|
||||
('D3B', 0),
|
||||
('D3C', 0),
|
||||
('D3D', 0),
|
||||
('D3E', 0),
|
||||
('D4A', 0),
|
||||
('D4B', 0),
|
||||
('D4C', 0),
|
||||
('D4D', 0),
|
||||
('D4E', 0),
|
||||
('D5A', 0),
|
||||
('D5B', 0),
|
||||
('D5C', 0),
|
||||
('D5D', 0),
|
||||
('D5E', 0),
|
||||
('D6A', 0),
|
||||
('D6B', 0),
|
||||
('D6C', 0),
|
||||
('D6D', 0),
|
||||
('D6E', 0),
|
||||
('D7A', 33),
|
||||
('D7B', 34),
|
||||
('D7C', 35),
|
||||
('D7D', 36),
|
||||
('E1A', 0),
|
||||
('E1B', 0),
|
||||
('E1C', 0),
|
||||
('E1D', 0),
|
||||
('E1E', 0),
|
||||
('E2A', 0),
|
||||
('E2B', 0),
|
||||
('E2C', 0),
|
||||
('E2D', 0),
|
||||
('E2E', 0),
|
||||
('E3A', 0),
|
||||
('E3B', 0),
|
||||
('E3C', 0),
|
||||
('E3D', 0),
|
||||
('E3E', 0),
|
||||
('E4A', 0),
|
||||
('E4B', 0),
|
||||
('E4C', 0),
|
||||
('E4D', 0),
|
||||
('E4E', 0),
|
||||
('E5A', 0),
|
||||
('E5B', 0),
|
||||
('E5C', 0),
|
||||
('E5D', 0),
|
||||
('E5E', 0),
|
||||
('E6A', 0),
|
||||
('E6B', 0),
|
||||
('E6C', 0),
|
||||
('E6D', 0),
|
||||
('E6E', 0),
|
||||
('E6F', 69),
|
||||
('E6G', 70),
|
||||
('E6H', 71),
|
||||
('E6I', 72),
|
||||
('E6J', 291),
|
||||
('E6K', 292),
|
||||
('E6L', 293),
|
||||
('E6M', 294),
|
||||
('E6N', 331),
|
||||
('E6O', 332),
|
||||
('E6P', 333),
|
||||
('E6Q', 334),
|
||||
('F1A', 0),
|
||||
('F1B', 0),
|
||||
('F1C', 0),
|
||||
('F2A', 0),
|
||||
('F2B', 0),
|
||||
('F2C', 0),
|
||||
('F3A', 0),
|
||||
('F3B', 0),
|
||||
('F3C', 0),
|
||||
('F4A', 0),
|
||||
('F4B', 0),
|
||||
('F4C', 0),
|
||||
('F5A', 0),
|
||||
('F5B', 0),
|
||||
('F5C', 0),
|
||||
('F6A', 0),
|
||||
('F6B', 0),
|
||||
('F6C', 0),
|
||||
('F6D', 0),
|
||||
('F6E', 0),
|
||||
('G1A', 0),
|
||||
('G1B', 0),
|
||||
('G1C', 0),
|
||||
('G2A', 0),
|
||||
('G2B', 0),
|
||||
('G2C', 0),
|
||||
('G3A', 0),
|
||||
('G3B', 0),
|
||||
('G3C', 0),
|
||||
('G4A', 0),
|
||||
('G4B', 0),
|
||||
('G4C', 0),
|
||||
('G5A', 0),
|
||||
('G5B', 0),
|
||||
('G5C', 0),
|
||||
('G6A', 0),
|
||||
('G6B', 0),
|
||||
('G6C', 0),
|
||||
('G6D', 0),
|
||||
('G6E', 0),
|
||||
('H1A', 1),
|
||||
('H2A', 0),
|
||||
('H2B', 0),
|
||||
('H2C', 0),
|
||||
('H2D', 0),
|
||||
('H3A', 0),
|
||||
('H3B', 0),
|
||||
('H3C', 0),
|
||||
('H3D', 0),
|
||||
('H4A', 0),
|
||||
('H4B', 0),
|
||||
('H4C', 0),
|
||||
('H4D', 0),
|
||||
('H5A', 0),
|
||||
('H5B', 0),
|
||||
('H5C', 0),
|
||||
('H5D', 0),
|
||||
('H6A', 0),
|
||||
('H6B', 0),
|
||||
('H6C', 0),
|
||||
('H6D', 0),
|
||||
]
|
||||
|
||||
for shelf in SHELVES:
|
||||
models.StorageSpace.objects.create(
|
||||
shelf_id=shelf[0],
|
||||
location='lockers' if shelf[1] else 'member_shelves',
|
||||
)
|
||||
|
BIN
apiserver/misc/blank_member_form.odg
Normal file
BIN
apiserver/misc/blank_member_form.odg
Normal file
Binary file not shown.
BIN
apiserver/misc/blank_member_form.pdf
Normal file
BIN
apiserver/misc/blank_member_form.pdf
Normal file
Binary file not shown.
BIN
apiserver/misc/member_card_template.jpg
Normal file
BIN
apiserver/misc/member_card_template.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 56 KiB |
|
@ -1,26 +1,77 @@
|
|||
alabaster==0.7.12
|
||||
argon2-cffi==19.2.0
|
||||
asgiref==3.2.3
|
||||
bleach==3.1.0
|
||||
asgiref==3.5.0
|
||||
Babel==2.9.1
|
||||
backcall==0.2.0
|
||||
bleach==3.3.0
|
||||
certifi==2019.11.28
|
||||
cffi==1.13.2
|
||||
cffi==1.15.1
|
||||
chardet==3.0.4
|
||||
commonmark==0.9.1
|
||||
decorator==5.1.1
|
||||
defusedxml==0.6.0
|
||||
Django==3.0.2
|
||||
Django==3.1.14
|
||||
django-allauth==0.41.0
|
||||
django-cors-headers==3.11.0
|
||||
django-extensions==3.1.5
|
||||
django-rest-auth==0.9.5
|
||||
djangorestframework==3.11.0
|
||||
django-simple-history==2.8.0
|
||||
djangorestframework==3.11.2
|
||||
docutils==0.16
|
||||
fuzzywuzzy==0.17.0
|
||||
gunicorn==20.0.4
|
||||
icalendar==4.0.9
|
||||
idna==2.8
|
||||
imagesize==1.2.0
|
||||
importlib-metadata==4.12.0
|
||||
ipython==7.33.0
|
||||
jedi==0.18.1
|
||||
Jinja2==2.11.3
|
||||
logging-tree==1.8.1
|
||||
markdown-it-py==2.1.0
|
||||
MarkupSafe==1.1.1
|
||||
matplotlib-inline==0.1.3
|
||||
mdit-py-plugins==0.3.0
|
||||
mdurl==0.1.1
|
||||
myst-parser==0.18.0
|
||||
oauthlib==3.1.0
|
||||
Pillow==7.0.0
|
||||
pkg-resources==0.0.0
|
||||
packaging==20.0
|
||||
parso==0.8.3
|
||||
pexpect==4.8.0
|
||||
pickleshare==0.7.5
|
||||
Pillow==9.5.0
|
||||
prompt-toolkit==3.0.29
|
||||
ptyprocess==0.7.0
|
||||
pycparser==2.19
|
||||
Pygments==2.7.4
|
||||
pyparsing==2.4.6
|
||||
PyPDF2==1.26.0
|
||||
python-dateutil==2.8.1
|
||||
python-Levenshtein==0.12.0
|
||||
python-memcached==1.59
|
||||
python3-openid==3.1.0
|
||||
pytz==2019.3
|
||||
PyYAML==6.0
|
||||
recommonmark==0.7.1
|
||||
reportlab==4.0.4
|
||||
requests==2.22.0
|
||||
requests-oauthlib==1.3.0
|
||||
six==1.13.0
|
||||
snowballstemmer==2.0.0
|
||||
Sphinx==5.0.2
|
||||
sphinx-rtd-theme==0.4.3
|
||||
sphinxcontrib-applehelp==1.0.1
|
||||
sphinxcontrib-devhelp==1.0.1
|
||||
sphinxcontrib-htmlhelp==2.0.0
|
||||
sphinxcontrib-httpdomain==1.7.0
|
||||
sphinxcontrib-jsmath==1.0.1
|
||||
sphinxcontrib-qthelp==1.0.2
|
||||
sphinxcontrib-serializinghtml==1.1.5
|
||||
sqlparse==0.3.0
|
||||
urllib3==1.25.7
|
||||
traitlets==5.1.1
|
||||
typing-extensions==4.0.1
|
||||
urllib3==1.25.11
|
||||
wcwidth==0.2.5
|
||||
webencodings==0.5.1
|
||||
xmltodict==0.13.0
|
||||
zipp==3.8.1
|
||||
|
|
42
apiserver/scripts/calc_subs_membership_length.py
Executable file
42
apiserver/scripts/calc_subs_membership_length.py
Executable file
|
@ -0,0 +1,42 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from django.db.models import Prefetch, Sum
|
||||
from apiserver.api import models, utils
|
||||
|
||||
today = utils.today_alberta_tz()
|
||||
|
||||
members = models.Member.objects.filter(paused_date__isnull=True)
|
||||
related_tx = Prefetch(
|
||||
'user__transactions',
|
||||
queryset=models.Transaction.objects.filter(category='Membership'),
|
||||
)
|
||||
|
||||
sub_total = 0
|
||||
sub_count = 0
|
||||
other_total = 0
|
||||
other_count = 0
|
||||
|
||||
for member in members.prefetch_related(related_tx):
|
||||
name = member.preferred_name + ' ' + member.last_name[0]
|
||||
start = member.application_date
|
||||
length = today - member.application_date
|
||||
days = length.days
|
||||
|
||||
if member.user.transactions.count():
|
||||
if member.user.transactions.latest('date').paypal_txn_type == 'subscr_payment':
|
||||
print('subscriber,{},{},{}'.format(name, start, days))
|
||||
sub_total += days
|
||||
sub_count += 1
|
||||
continue
|
||||
|
||||
print('non-subscr,{},{},{}'.format(name, start, days))
|
||||
other_total += days
|
||||
other_count += 1
|
||||
|
||||
|
||||
print('subscriber avg:', int(sub_total / sub_count))
|
||||
print('non-subscr avg:', int(other_total / other_count))
|
||||
|
||||
|
24
apiserver/scripts/clean_member_names.py
Executable file
24
apiserver/scripts/clean_member_names.py
Executable file
|
@ -0,0 +1,24 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import datetime
|
||||
import json
|
||||
from apiserver.api import models, old_models, utils
|
||||
|
||||
members = models.Member.objects.all()
|
||||
|
||||
for m in members:
|
||||
first_name = m.first_name
|
||||
last_name = m.last_name
|
||||
preferred_name = m.preferred_name
|
||||
|
||||
print('Updating:', first_name, last_name, '-->', first_name.title(), last_name.title())
|
||||
|
||||
models.Member.objects.filter(id=m.id).update(
|
||||
first_name=first_name.title().strip(),
|
||||
last_name=last_name.title().strip(),
|
||||
preferred_name=preferred_name.title().strip(),
|
||||
)
|
||||
|
||||
print('Done.')
|
24
apiserver/scripts/convert_card_seen.py
Executable file
24
apiserver/scripts/convert_card_seen.py
Executable file
|
@ -0,0 +1,24 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from datetime import datetime
|
||||
import json
|
||||
import pytz
|
||||
|
||||
from apiserver.api import models, utils
|
||||
|
||||
tz = pytz.timezone('America/Edmonton')
|
||||
|
||||
cards = models.Card.objects.order_by('last_seen_at')
|
||||
|
||||
for card in cards:
|
||||
seen = card.last_seen_at
|
||||
if seen:
|
||||
t = datetime.combine(seen, datetime.min.time())
|
||||
card.last_seen = tz.localize(t)
|
||||
card.save()
|
||||
|
||||
print('card', card.card_number, 'date', seen, '-->', card.last_seen)
|
||||
|
||||
print('Done.')
|
40
apiserver/scripts/delete_addresses.py
Executable file
40
apiserver/scripts/delete_addresses.py
Executable file
|
@ -0,0 +1,40 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from apiserver.api import models
|
||||
|
||||
print('Deleting member object addresses...')
|
||||
|
||||
result = models.Member.objects.update(
|
||||
street_address='',
|
||||
postal_code='',
|
||||
city='',
|
||||
)
|
||||
|
||||
print(result, 'rows affected')
|
||||
print()
|
||||
|
||||
print('Scrubbing history...')
|
||||
|
||||
result = models.Member.history.update(
|
||||
street_address='',
|
||||
postal_code='',
|
||||
city='',
|
||||
)
|
||||
|
||||
print(result, 'rows affected')
|
||||
print()
|
||||
|
||||
print('Deleting historical changes...')
|
||||
|
||||
address_fields = ['street_address', 'postal_code', 'city']
|
||||
result = models.HistoryChange.objects.filter(field__in=address_fields).update(
|
||||
old='',
|
||||
new='',
|
||||
)
|
||||
|
||||
print(result, 'rows affected')
|
||||
print()
|
||||
|
||||
print('Done.')
|
39
apiserver/scripts/delete_course_merge_into.py
Executable file
39
apiserver/scripts/delete_course_merge_into.py
Executable file
|
@ -0,0 +1,39 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from apiserver.api import models
|
||||
|
||||
print()
|
||||
|
||||
if len(sys.argv) != 3:
|
||||
print('Invalid arguments.')
|
||||
print('Usage: python delete_course_merge_into.py 123 456')
|
||||
os._exit(1)
|
||||
|
||||
course_to_delete_id = sys.argv[1]
|
||||
course_merge_into_id = sys.argv[2]
|
||||
|
||||
course_to_delete = models.Course.objects.get(id=course_to_delete_id)
|
||||
course_merge_into = models.Course.objects.get(id=course_merge_into_id)
|
||||
|
||||
print('Delete course', course_to_delete_id, course_to_delete.name)
|
||||
print('and merge into', course_merge_into_id, course_merge_into.name, '?')
|
||||
print('ENTER to continue, ctrl-c to abort.')
|
||||
try:
|
||||
input()
|
||||
except KeyboardInterrupt:
|
||||
print('\nCancelled.')
|
||||
os._exit(0)
|
||||
|
||||
interests = course_to_delete.interests
|
||||
|
||||
print('Deleting', interests.count(), 'interests...')
|
||||
interests.all().delete()
|
||||
|
||||
sessions = course_to_delete.sessions
|
||||
print('Moving', sessions.count(), 'sessions...')
|
||||
sessions.update(course=course_merge_into)
|
||||
|
||||
print('Deleting course...')
|
||||
course_to_delete.delete()
|
9
apiserver/scripts/delete_old_backups.sh
Executable file
9
apiserver/scripts/delete_old_backups.sh
Executable file
|
@ -0,0 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
# be safe
|
||||
set -euf -o pipefail
|
||||
|
||||
# test these carefully
|
||||
#find "${backup_folder}" -mindepth 1 -type d -print
|
||||
#find "${backup_folder}" -mindepth 1 -type d -ctime +14 -print
|
||||
#find "${backup_folder}" -mindepth 1 -type d -ctime +14 -exec rm -r {} \;
|
13
apiserver/scripts/delete_usagetracks.py
Executable file
13
apiserver/scripts/delete_usagetracks.py
Executable file
|
@ -0,0 +1,13 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from apiserver.api import models
|
||||
|
||||
indexs = models.HistoryIndex.objects.filter(object_name='UsageTrack')
|
||||
count = indexs.delete()
|
||||
print(count, 'indexs deleted')
|
||||
|
||||
changes = models.HistoryChange.objects.filter(field='num_seconds')
|
||||
count = changes.delete()
|
||||
print(count, 'changes deleted')
|
32
apiserver/scripts/distinguish_paused_expired.py
Executable file
32
apiserver/scripts/distinguish_paused_expired.py
Executable file
|
@ -0,0 +1,32 @@
|
|||
# will not work after expired date change
|
||||
# =======================================
|
||||
|
||||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from dateutil import relativedelta
|
||||
|
||||
from apiserver.api import models
|
||||
|
||||
members = models.Member.objects.all()
|
||||
count = 0
|
||||
|
||||
for m in members:
|
||||
if m.paused_date and m.status == 'Former Member':
|
||||
print('Former member', m.preferred_name, m.last_name)
|
||||
|
||||
if m.paused_date == m.expire_date:
|
||||
new_status = 'Expired Member'
|
||||
new_paused_date = m.paused_date + relativedelta.relativedelta(months=3)
|
||||
print(' Moving paused date', m.paused_date, '-->', new_paused_date)
|
||||
m.paused_date = new_paused_date
|
||||
else:
|
||||
new_status = 'Paused Member'
|
||||
|
||||
print(' Setting status to', new_status)
|
||||
m.status = new_status
|
||||
count += 1
|
||||
m.save()
|
||||
|
||||
print('Processed', count)
|
23
apiserver/scripts/export_class_report.py
Executable file
23
apiserver/scripts/export_class_report.py
Executable file
|
@ -0,0 +1,23 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import csv
|
||||
from apiserver.api import models
|
||||
|
||||
sessions = models.Session.objects.filter(datetime__gte='2021-01-01')
|
||||
|
||||
with open('output.csv', 'w', newline='') as csvfile:
|
||||
fields = ['date', 'name', 'num_students','attended']
|
||||
writer = csv.DictWriter(csvfile, fieldnames=fields)
|
||||
|
||||
writer.writeheader()
|
||||
|
||||
for s in sessions:
|
||||
writer.writerow(dict(
|
||||
date=s.datetime.date(),
|
||||
name=s.course.name,
|
||||
num_students=s.students.count(),
|
||||
attended=s.students.filter(attendance_status='Attended').count(),
|
||||
))
|
||||
|
13
apiserver/scripts/export_member_addresses.py
Executable file
13
apiserver/scripts/export_member_addresses.py
Executable file
|
@ -0,0 +1,13 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import csv
|
||||
from apiserver.api import models
|
||||
|
||||
members = models.Member.objects.all()
|
||||
|
||||
writer = csv.writer(sys.stdout)
|
||||
|
||||
for m in members:
|
||||
writer.writerow([m.id, m.first_name, m.last_name, m.street_address, m.city, m.postal_code])
|
24
apiserver/scripts/export_member_statuses.py
Executable file
24
apiserver/scripts/export_member_statuses.py
Executable file
|
@ -0,0 +1,24 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import csv
|
||||
from apiserver.api import models
|
||||
|
||||
members = models.Member.objects.all()
|
||||
|
||||
writer = csv.writer(sys.stdout)
|
||||
|
||||
def color(status):
|
||||
if status in ['Prepaid', 'Current']:
|
||||
return 'Green'
|
||||
elif status == 'Due':
|
||||
return 'Yellow'
|
||||
elif status == 'Overdue':
|
||||
return 'Red'
|
||||
else:
|
||||
return 'Black'
|
||||
|
||||
for m in members:
|
||||
status = 'Former Member' if m.paused_date else m.status
|
||||
writer.writerow([m.id, m.first_name, m.last_name, status, color(status)])
|
24
apiserver/scripts/gen_card_photos.py
Executable file
24
apiserver/scripts/gen_card_photos.py
Executable file
|
@ -0,0 +1,24 @@
|
|||
# Generates card photos for existing members with photos
|
||||
|
||||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from apiserver.api import models, utils
|
||||
|
||||
members = models.Member.objects
|
||||
members = members.filter(photo_large__isnull=False)
|
||||
members = members.filter(card_photo__isnull=True)
|
||||
|
||||
print('Count:', members.count())
|
||||
|
||||
for m in members:
|
||||
print('Processing', m.first_name, m.last_name)
|
||||
|
||||
m.card_photo = utils.gen_card_photo(m)
|
||||
print(m.card_photo)
|
||||
|
||||
m.save()
|
||||
|
||||
print('Done.')
|
||||
|
43
apiserver/scripts/generate_ldap_users.py
Executable file
43
apiserver/scripts/generate_ldap_users.py
Executable file
|
@ -0,0 +1,43 @@
|
|||
# Generates missing LDAP users from Spaceport members
|
||||
# this fixes an issue when a very old member resets their password
|
||||
# and their LDAP user can't be found in the system.
|
||||
#
|
||||
# Assigns a random password to the user.
|
||||
|
||||
|
||||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from apiserver.api import models, utils, utils_ldap
|
||||
from uuid import uuid4
|
||||
import re
|
||||
|
||||
random_password = lambda: str(uuid4())[:23]
|
||||
|
||||
members = models.Member.objects.all()
|
||||
|
||||
for member in members:
|
||||
print()
|
||||
|
||||
username = member.user.username
|
||||
print('Checking LDAP for member id:', member.id, 'username:', username)
|
||||
|
||||
if utils_ldap.is_configured():
|
||||
result = utils_ldap.find_user(member.user.username)
|
||||
if result == 200:
|
||||
print(' username found, skipping')
|
||||
continue
|
||||
|
||||
print(' generating LDAP user...')
|
||||
|
||||
data = dict(
|
||||
first_name=member.first_name,
|
||||
last_name=member.last_name,
|
||||
username=username,
|
||||
email=member.user.email,
|
||||
password1=random_password(),
|
||||
)
|
||||
result = utils_ldap.create_user(data)
|
||||
|
||||
print(' result:', result)
|
96
apiserver/scripts/generate_users.py
Executable file
96
apiserver/scripts/generate_users.py
Executable file
|
@ -0,0 +1,96 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from apiserver.api import models, utils
|
||||
from uuid import uuid4
|
||||
import re
|
||||
|
||||
random_email = lambda: 'spaceport-' + str(uuid4()).split('-')[0] + '@protospace.ca'
|
||||
|
||||
members = models.Member.objects.all()
|
||||
|
||||
print('Deleting duplicates...')
|
||||
|
||||
for mid in [5203, 5257, 5261, 5277, 5278, 5299, 5307, 5310, 5240]:
|
||||
member = models.Member.objects.get(id=mid)
|
||||
print('Deleting:', member.first_name, member.last_name)
|
||||
member.delete()
|
||||
|
||||
print()
|
||||
print('Generating Users')
|
||||
|
||||
count = 0
|
||||
|
||||
for member in members:
|
||||
print('Member', member.id, member.first_name, member.last_name)
|
||||
|
||||
if not member.user:
|
||||
print(' No user, generating.')
|
||||
|
||||
if not member.first_name.isalpha():
|
||||
print(' Non-alpha first name.')
|
||||
|
||||
if not member.last_name.isalpha():
|
||||
print(' Non-alpha last name.')
|
||||
|
||||
first_name = member.first_name.strip().lower()
|
||||
last_name = member.last_name.strip().lower()
|
||||
|
||||
first_name = re.sub(r'[^a-z- ]+', '', first_name)
|
||||
last_name = re.sub(r'[^a-z- ]+', '', last_name)
|
||||
|
||||
first_name = first_name.replace(' ', '.').replace('-', '.')
|
||||
last_name = last_name.replace(' ', '.').replace('-', '.')
|
||||
|
||||
username = first_name + '.' + last_name
|
||||
print(' Username:', username)
|
||||
|
||||
if member.old_email:
|
||||
email = member.old_email
|
||||
else:
|
||||
email = random_email()
|
||||
print(' No email, using:', email)
|
||||
|
||||
user = User.objects.create_user(username, email, str(uuid4()))
|
||||
|
||||
member.user = user
|
||||
member.save()
|
||||
|
||||
x = models.Transaction.objects.filter(user=None, member_id=member.id)
|
||||
print(' Linking', x.count(), 'transactions')
|
||||
x.update(user=member.user)
|
||||
|
||||
x = models.Card.objects.filter(user=None, member_id=member.id)
|
||||
print(' Linking', x.count(), 'cards')
|
||||
x.update(user=member.user)
|
||||
|
||||
x = models.Training.objects.filter(user=None, member_id=member.id)
|
||||
print(' Linking', x.count(), 'trainings')
|
||||
x.update(user=member.user)
|
||||
|
||||
x = models.PayPalHint.objects.filter(user=None, member_id=member.id)
|
||||
print(' Linking', x.count(), 'paypal hints')
|
||||
x.update(user=member.user)
|
||||
|
||||
|
||||
count += 1
|
||||
print()
|
||||
|
||||
print('Processed', count, 'members.')
|
||||
|
||||
print('Deleting orphan cards...')
|
||||
count = models.Card.objects.filter(user__isnull=True).delete()[0]
|
||||
print('Deleted', count, 'cards.')
|
||||
|
||||
print('Deleting orphan trainings...')
|
||||
count = models.Training.objects.filter(user__isnull=True).delete()[0]
|
||||
print('Deleted', count, 'trainings.')
|
||||
|
||||
print('Deleting orphan hints...')
|
||||
count = models.PayPalHint.objects.filter(user__isnull=True).delete()[0]
|
||||
print('Deleted', count, 'hints.')
|
||||
|
||||
print('Done.')
|
126
apiserver/scripts/hammertime.py
Normal file
126
apiserver/scripts/hammertime.py
Normal file
|
@ -0,0 +1,126 @@
|
|||
import django, sys, os
|
||||
sys.path.append("..")
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import random
|
||||
import string
|
||||
import datetime
|
||||
from uuid import uuid4
|
||||
import requests
|
||||
from apiserver.api import models
|
||||
|
||||
API_URL = 'http://localhost:8002'
|
||||
|
||||
if len(sys.argv) == 2:
|
||||
token = sys.argv[1]
|
||||
else:
|
||||
raise('Please provide a login token in the command line')
|
||||
|
||||
members = models.Member.objects.all()
|
||||
member_ids = list(members.values_list('id', flat=True))
|
||||
courses = models.Course.objects.all()
|
||||
course_ids = list(courses.values_list('id', flat=True))
|
||||
|
||||
|
||||
randstr = lambda: str(uuid4()).split('-')[-1]
|
||||
|
||||
build_user = lambda: dict(
|
||||
first_name='test',
|
||||
last_name='tester',
|
||||
username=randstr()+'.tester',
|
||||
password1='protospace',
|
||||
password2='protospace',
|
||||
email=randstr()+'@domain.com',
|
||||
existing_member='false',
|
||||
)
|
||||
|
||||
change_member = lambda: dict(
|
||||
street_address=randstr(),
|
||||
city=randstr(),
|
||||
postal_code=randstr(),
|
||||
first_name=randstr(),
|
||||
)
|
||||
|
||||
build_transaction = lambda member_id: dict(
|
||||
member_id=member_id,
|
||||
date='2020-02-02',
|
||||
amount=0,
|
||||
account_type='Clearing',
|
||||
info_source='DB Edit',
|
||||
memo='Test transaction, ignore'
|
||||
)
|
||||
|
||||
build_card = lambda member_id: dict(
|
||||
member_id=member_id,
|
||||
card_number=randstr(),
|
||||
active_status='card_active',
|
||||
notes='qot',
|
||||
)
|
||||
|
||||
build_search = lambda: dict(
|
||||
q=random.choice(string.ascii_lowercase),
|
||||
seq=123,
|
||||
)
|
||||
|
||||
build_session = lambda: dict(
|
||||
datetime=datetime.datetime.now(),
|
||||
course=random.choice(course_ids),
|
||||
cost=0,
|
||||
)
|
||||
|
||||
def poster(headers, payload, route):
|
||||
r = requests.post(API_URL + route, data=payload, headers=headers, timeout=5)
|
||||
if r.status_code < 300:
|
||||
print(r.text.strip())
|
||||
else:
|
||||
print(r.text)
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
|
||||
def patcher(headers, payload, route):
|
||||
r = requests.patch(API_URL + route, data=payload, headers=headers, timeout=5)
|
||||
if r.status_code < 300:
|
||||
print(r.text.strip())
|
||||
else:
|
||||
print(r.text)
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
|
||||
|
||||
def register_member():
|
||||
payload = build_user()
|
||||
poster({}, payload, '/registration/')
|
||||
|
||||
def edit_member():
|
||||
payload = change_member()
|
||||
headers = {'Authorization': 'Token ' + token}
|
||||
patcher(headers, payload, '/members/'+str(random.choice(member_ids))+'/')
|
||||
|
||||
def create_transaction():
|
||||
headers = {'Authorization': 'Token ' + token}
|
||||
payload = build_transaction(random.choice(member_ids))
|
||||
poster(headers, payload, '/transactions/')
|
||||
|
||||
def create_card():
|
||||
headers = {'Authorization': 'Token ' + token}
|
||||
payload = build_card(random.choice(member_ids))
|
||||
poster(headers, payload, '/cards/')
|
||||
|
||||
def perform_search():
|
||||
headers = {'Authorization': 'Token ' + token}
|
||||
payload = build_search()
|
||||
poster(headers, payload, '/search/')
|
||||
|
||||
def create_session():
|
||||
headers = {'Authorization': 'Token ' + token}
|
||||
payload = build_session()
|
||||
poster(headers, payload, '/sessions/')
|
||||
|
||||
|
||||
|
||||
while True:
|
||||
register_member()
|
||||
edit_member()
|
||||
perform_search()
|
||||
create_transaction()
|
||||
create_card()
|
||||
create_session()
|
54
apiserver/scripts/import_card_scans.py
Executable file
54
apiserver/scripts/import_card_scans.py
Executable file
|
@ -0,0 +1,54 @@
|
|||
# Expects a scans.csv of the historical scans in format:
|
||||
# date,card_number
|
||||
|
||||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import csv
|
||||
from datetime import datetime, timedelta
|
||||
from apiserver.api import models
|
||||
from django.utils.timezone import now, pytz
|
||||
|
||||
def today_alberta_tz():
|
||||
return datetime.now(pytz.timezone('America/Edmonton')).date()
|
||||
|
||||
days = {}
|
||||
|
||||
date = datetime(2020, 3, 7).date()
|
||||
while date <= today_alberta_tz():
|
||||
days[str(date)] = set()
|
||||
date += timedelta(days=1)
|
||||
|
||||
print('Initialized with:')
|
||||
print(days)
|
||||
|
||||
with open('scans.csv', newline='') as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
datetime_obj = datetime.strptime(row['date'], "%Y-%m-%d %H:%M:%S")
|
||||
datetime_obj_utc = datetime_obj.replace(tzinfo=pytz.timezone('UTC'))
|
||||
date = datetime_obj_utc.astimezone(pytz.timezone('America/Edmonton'))
|
||||
|
||||
card = row['card_number']
|
||||
|
||||
print('Processing', date, card)
|
||||
day = str(date.date())
|
||||
|
||||
if day not in days:
|
||||
days[day] = set()
|
||||
|
||||
days[day].add(card)
|
||||
|
||||
print(days)
|
||||
|
||||
for day, cards in days.items():
|
||||
print(day, len(cards))
|
||||
|
||||
models.StatsSpaceActivity.objects.update_or_create(
|
||||
date=day,
|
||||
defaults=dict(card_scans=len(cards)),
|
||||
)
|
||||
|
||||
print('Done.')
|
||||
|
21
apiserver/scripts/import_member_counts.py
Executable file
21
apiserver/scripts/import_member_counts.py
Executable file
|
@ -0,0 +1,21 @@
|
|||
# Expects a old_counts.csv of the historical counts in format:
|
||||
# date,member_count,green_count
|
||||
|
||||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import csv
|
||||
from apiserver.api import models
|
||||
|
||||
with open('old_counts.csv', newline='') as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
print('Adding', row['date'], row['member_count'], row['green_count'])
|
||||
|
||||
models.StatsMemberCount.objects.update_or_create(
|
||||
date=row['date'],
|
||||
defaults=dict(member_count=row['member_count'], green_count=row['green_count']),
|
||||
)
|
||||
|
||||
print('Done.')
|
186
apiserver/scripts/import_missing_paypal.py
Executable file
186
apiserver/scripts/import_missing_paypal.py
Executable file
|
@ -0,0 +1,186 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import datetime
|
||||
import json
|
||||
from apiserver.api import models, old_models, utils
|
||||
|
||||
def find_name(t):
|
||||
try:
|
||||
p_info = t['payer_info']
|
||||
except KeyError:
|
||||
return 'Unknown Name'
|
||||
try:
|
||||
return p_info['payer_name']['given_name'] + ' ' + p_info['payer_name']['surname']
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
return t['shipping_info']['name']
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
return p_info['payer_name']['alternate_full_name']
|
||||
except KeyError:
|
||||
return 'Unknown Name'
|
||||
|
||||
def build_tx(t):
|
||||
t_info = t['transaction_info']
|
||||
p_info = t['payer_info']
|
||||
amount = float(t_info['transaction_amount']['value'])
|
||||
return dict(
|
||||
account_type='PayPal',
|
||||
amount=amount,
|
||||
date=t_info['transaction_updated_date'].split('T')[0],
|
||||
info_source='PayPal IPN',
|
||||
payment_method='PayPal',
|
||||
paypal_payer_id=t_info['paypal_account_id'],
|
||||
paypal_txn_id=t_info['transaction_id'],
|
||||
reference_number=t_info['transaction_id'],
|
||||
memo=t_info.get('transaction_subject', 'no memo') + ' (import missing paypal script)',
|
||||
)
|
||||
|
||||
def create_unmatched_member_tx(t):
|
||||
t_info = t['transaction_info']
|
||||
p_info = t['payer_info']
|
||||
transactions = models.Transaction.objects
|
||||
|
||||
report_memo = 'Cant link sender name, {}, email: {}, note: {}'.format(
|
||||
find_name(t),
|
||||
p_info['email_address'],
|
||||
'(import missing paypal script)',
|
||||
)
|
||||
|
||||
return transactions.create(
|
||||
**build_tx(t),
|
||||
report_memo=report_memo,
|
||||
report_type='Unmatched Member',
|
||||
)
|
||||
|
||||
def create_member_dues_tx(t, member, num_months):
|
||||
transactions = models.Transaction.objects
|
||||
|
||||
# new member 3 for 2 will have to be manual anyway
|
||||
if num_months == 11:
|
||||
num_months = 12
|
||||
|
||||
user = getattr(member, 'user', None)
|
||||
|
||||
tx = transactions.create(
|
||||
**build_tx(t),
|
||||
member_id=member.id,
|
||||
number_of_membership_months=num_months,
|
||||
user=user,
|
||||
)
|
||||
utils.tally_membership_months(member)
|
||||
return tx
|
||||
|
||||
def create_unmatched_purchase_tx(t, member):
|
||||
t_info = t['transaction_info']
|
||||
p_info = t['payer_info']
|
||||
transactions = models.Transaction.objects
|
||||
|
||||
user = getattr(member, 'user', None)
|
||||
report_memo = 'Unknown payment reason, {}, email: {}, note: {}'.format(
|
||||
find_name(t),
|
||||
p_info['email_address'],
|
||||
'(import missing paypal script)',
|
||||
)
|
||||
|
||||
return transactions.create(
|
||||
**build_tx(t),
|
||||
member_id=member.id,
|
||||
report_memo=report_memo,
|
||||
report_type='Unmatched Purchase',
|
||||
user=user,
|
||||
)
|
||||
|
||||
|
||||
PAYPAL_FOLDER = 'missing_paypal/'
|
||||
|
||||
transactions = models.Transaction.objects.all()
|
||||
hints = models.PayPalHint.objects.all()
|
||||
members = models.Member.objects.all()
|
||||
|
||||
paypal_files = os.listdir(PAYPAL_FOLDER)
|
||||
paypal_json = [x for x in paypal_files if x.endswith('.json')]
|
||||
|
||||
if paypal_json:
|
||||
print('Found paypal json files:', paypal_json)
|
||||
else:
|
||||
print('Couldnt find any paypal json files in', PAYPAL_FOLDER)
|
||||
exit(1)
|
||||
|
||||
paypal_txs = []
|
||||
num_unmatched = 0
|
||||
num_dues = 0
|
||||
num_noreason = 0
|
||||
|
||||
for filename in paypal_json:
|
||||
with open(PAYPAL_FOLDER + filename) as f:
|
||||
j = json.load(f)
|
||||
paypal_txs.extend(j['transaction_details'])
|
||||
|
||||
print('Num transactions found:', len(paypal_txs))
|
||||
print('Importing transactions into portal...')
|
||||
|
||||
for t in paypal_txs:
|
||||
t_info = t['transaction_info']
|
||||
|
||||
account_id = t_info.get('paypal_account_id', None)
|
||||
if not account_id:
|
||||
print('Skipping tx id: {}, no payer (could be bank tx):'.format(
|
||||
t_info['transaction_id'],
|
||||
))
|
||||
print(t_info)
|
||||
print()
|
||||
continue
|
||||
|
||||
reference = t_info['transaction_id'][:11]
|
||||
|
||||
similar = transactions.filter(reference_number__startswith=reference)
|
||||
if similar.exists():
|
||||
tx = similar.first()
|
||||
print('Skipping tx id: {}, transaction already in portal:'.format(
|
||||
t_info['transaction_id'],
|
||||
))
|
||||
print('https://spaceport.dns.t0.vc/transactions/'+str(tx.id))
|
||||
print()
|
||||
continue
|
||||
|
||||
print('Inspecting tx id:', t_info['transaction_id'])
|
||||
|
||||
if not hints.filter(account=t_info['paypal_account_id']).exists():
|
||||
print('Unable to associate with member, reporting')
|
||||
create_unmatched_member_tx(t)
|
||||
num_unmatched += 1
|
||||
continue
|
||||
|
||||
amount = float(t_info['transaction_amount']['value'])
|
||||
|
||||
member_id = hints.get(account=t_info['paypal_account_id']).member_id
|
||||
member = members.get(id=member_id)
|
||||
print('Found member', member.first_name, member.last_name)
|
||||
monthly_fees = member.monthly_fees
|
||||
|
||||
if amount.is_integer() and monthly_fees and amount % monthly_fees == 0:
|
||||
num_months = int(amount // monthly_fees)
|
||||
else:
|
||||
num_months = 0
|
||||
|
||||
if num_months:
|
||||
print('Amount valid for membership dues, adding months:', num_months)
|
||||
create_member_dues_tx(t, member, num_months)
|
||||
num_dues += 1
|
||||
continue
|
||||
|
||||
print('Unable to find a reason for payment, reporting')
|
||||
create_unmatched_purchase_tx(t, member)
|
||||
num_noreason += 1
|
||||
|
||||
|
||||
print('Num unmatched members:', num_unmatched)
|
||||
print('Num member dues:', num_dues)
|
||||
print('Num no reason:', num_noreason)
|
||||
print('Num skipped:', len(paypal_txs) - num_unmatched - num_dues - num_noreason)
|
||||
print('Done.')
|
|
@ -2,8 +2,9 @@ import django, sys, os
|
|||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from apiserver.api import models, old_models
|
||||
from apiserver.api.serializers import process_image
|
||||
import datetime
|
||||
from django.utils import timezone
|
||||
from apiserver.api import models, old_models, utils
|
||||
|
||||
MEMBER_FIELDS = [
|
||||
'id',
|
||||
|
@ -70,7 +71,7 @@ TRAINING_FIELDS = [
|
|||
'id',
|
||||
# class_session_id -> session
|
||||
'member_id',
|
||||
'attendance_status',
|
||||
# attendance_status -> capitalize
|
||||
'sign_up_date',
|
||||
'paid_date',
|
||||
]
|
||||
|
@ -84,9 +85,18 @@ models.Member.objects.all().delete()
|
|||
print('Importing old members...')
|
||||
old = old_models.Members.objects.using('old_portal').all()
|
||||
|
||||
import_date = old.last().web_crawl_date.date()
|
||||
print('Using import date:', import_date)
|
||||
|
||||
for o in old:
|
||||
new = {}
|
||||
|
||||
if o.status == 'Contractor':
|
||||
print('Skipping contractor member #{} - {} {}'.format(
|
||||
o.id, o.first_name, o.last_name
|
||||
))
|
||||
continue
|
||||
|
||||
for f in MEMBER_FIELDS:
|
||||
new[f] = o.__dict__.get(f, None)
|
||||
|
||||
|
@ -94,12 +104,13 @@ for o in old:
|
|||
new['city'] = '{}, {}'.format(o.city, o.province)
|
||||
new['old_email'] = o.email
|
||||
new['is_minor'] = o.minor
|
||||
new['paused_date'] = None
|
||||
|
||||
small, medium, large = None, None, None
|
||||
if str(o.id) in photo_folders:
|
||||
folder = 'old_photos/' + str(o.id)
|
||||
if 'photo.jpg' in os.listdir(folder):
|
||||
small, medium, large = process_image(folder + '/photo.jpg')
|
||||
small, medium, large = utils.process_image_upload(folder + '/photo.jpg')
|
||||
print('Found a photo')
|
||||
|
||||
models.Member.objects.create(photo_small=small, photo_medium=medium, photo_large=large, **new)
|
||||
|
@ -117,13 +128,58 @@ for o in old:
|
|||
new = {}
|
||||
|
||||
for f in TRANSACTION_FIELDS:
|
||||
new[f] = o.__dict__.get(f, None)
|
||||
tmp = o.__dict__.get(f, None)
|
||||
if isinstance(tmp, str):
|
||||
new[f] = tmp.replace('Paypal', 'PayPal')
|
||||
else:
|
||||
new[f] = tmp
|
||||
|
||||
models.Transaction.objects.create(**new)
|
||||
print('Imported transaction #{} - {} {}'.format(
|
||||
o.id, o.member_id, o.category
|
||||
))
|
||||
|
||||
print('Faking membership months...')
|
||||
members = models.Member.objects.all()
|
||||
bad_count = 0
|
||||
|
||||
for m in members:
|
||||
old_status = m.status
|
||||
old_expire = m.expire_date
|
||||
|
||||
if 'Former' in old_status:
|
||||
m.status = 'Old Portal ' + old_status
|
||||
m.save()
|
||||
continue
|
||||
if not m.current_start_date: continue
|
||||
|
||||
tx, _ = utils.fake_missing_membership_months(m)
|
||||
utils.tally_membership_months(m, import_date)
|
||||
utils.gen_member_forms(m)
|
||||
|
||||
if tx:
|
||||
print(m.first_name, m.last_name, tx.memo)
|
||||
|
||||
if old_status != m.status or old_expire != m.expire_date:
|
||||
print('Expire / status mismatch member:', m.__dict__)
|
||||
print('New status:', m.status)
|
||||
print('Old status:', old_status)
|
||||
print('New expire:', m.expire_date)
|
||||
print('Old expire:', old_expire)
|
||||
print('')
|
||||
bad_count += 1
|
||||
|
||||
print('Import mismatch count:', bad_count)
|
||||
|
||||
print('Pausing former members...')
|
||||
for m in members:
|
||||
if 'Former' in m.status:
|
||||
paused_date = m.expire_date or datetime.date.today()
|
||||
m.paused_date = paused_date
|
||||
m.save()
|
||||
print('Paused', m.first_name, m.last_name)
|
||||
|
||||
|
||||
|
||||
print('Deleting all cards...')
|
||||
models.Card.objects.all().delete()
|
||||
|
@ -173,7 +229,9 @@ for o in old:
|
|||
new[f] = o.__dict__.get(f, None)
|
||||
new['course'] = models.Course.objects.get(id=o.course_id)
|
||||
new['old_instructor'] = o.instructor
|
||||
new['datetime'] = str(o.datetime).replace('Z', '-07:00')
|
||||
dt = o.datetime.replace(tzinfo=None)
|
||||
dt = timezone.pytz.timezone('America/Edmonton').localize(dt)
|
||||
new['datetime'] = dt.astimezone(timezone.pytz.UTC)
|
||||
|
||||
models.Session.objects.create(**new)
|
||||
print('Imported session #{} - {} {}'.format(
|
||||
|
@ -192,6 +250,7 @@ for o in old:
|
|||
for f in TRAINING_FIELDS:
|
||||
new[f] = o.__dict__.get(f, None)
|
||||
new['session'] = models.Session.objects.get(id=o.class_session_id)
|
||||
new['attendance_status'] = o.attendance_status.capitalize()
|
||||
|
||||
models.Training.objects.create(**new)
|
||||
print('Imported training #{} - {} {}'.format(
|
34
apiserver/scripts/import_paypal_txn_type.py
Executable file
34
apiserver/scripts/import_paypal_txn_type.py
Executable file
|
@ -0,0 +1,34 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from urllib.parse import parse_qs
|
||||
|
||||
from apiserver.api import models
|
||||
|
||||
ipns = models.IPN.objects.all()
|
||||
transactions = models.Transaction.objects.filter(paypal_txn_id__isnull=False)
|
||||
txs = {}
|
||||
|
||||
for tx in transactions:
|
||||
txs[tx.paypal_txn_id] = tx
|
||||
|
||||
for ipn in ipns:
|
||||
data = parse_qs(ipn.data)
|
||||
|
||||
if data.get('payment_status', [False])[0] != 'Completed':
|
||||
continue
|
||||
|
||||
txn_id = data['txn_id'][0]
|
||||
txn_type = data['txn_type'][0]
|
||||
|
||||
print('Processing tx id:', txn_id, '| type:', txn_type)
|
||||
|
||||
txs[txn_id].paypal_txn_type = txn_type
|
||||
|
||||
print('Performing bulk update...')
|
||||
transactions.bulk_update(txs.values(), ['paypal_txn_type'])
|
||||
|
||||
print('Processed', ipns.count(), 'IPNs.')
|
||||
|
||||
print('Done.')
|
59
apiserver/scripts/import_rabbit_group.py
Executable file
59
apiserver/scripts/import_rabbit_group.py
Executable file
|
@ -0,0 +1,59 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import re
|
||||
from apiserver.api import models, utils
|
||||
|
||||
def clean(name):
|
||||
return re.sub(r'[^a-z]', '', name.lower())
|
||||
|
||||
with open('ad-rabbit.json', 'r') as f:
|
||||
ad_dirty = json.load(f)
|
||||
|
||||
with open('ad-dump.json', 'r') as f:
|
||||
ad_dump = json.load(f)
|
||||
|
||||
ad = {}
|
||||
for sam in ad_dirty:
|
||||
try:
|
||||
ad[clean(sam)] = ad_dump[sam]['mail']
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
members = models.Member.objects.all()
|
||||
|
||||
portal = {}
|
||||
for m in members:
|
||||
name = m.first_name + m.last_name
|
||||
portal[clean(name)] = m
|
||||
|
||||
good_members = {}
|
||||
|
||||
for ad_name, email in ad.items():
|
||||
if ad_name in portal:
|
||||
good_members[ad_name] = portal[ad_name]
|
||||
print('found ad name match', ad_name)
|
||||
else:
|
||||
print('cant find ad name', ad_name)
|
||||
print('searching for email...')
|
||||
for m in members:
|
||||
if m.old_email and m.old_email.lower() == email.lower():
|
||||
good_members[ad_name] = m
|
||||
print(' found email', email)
|
||||
break
|
||||
else:
|
||||
print(' cant link email', email)
|
||||
|
||||
print()
|
||||
print()
|
||||
|
||||
for m in good_members.values():
|
||||
if not m.rabbit_cert_date:
|
||||
m.rabbit_cert_date = utils.today_alberta_tz()
|
||||
print('certified', m.first_name, m.last_name)
|
||||
m.save()
|
||||
else:
|
||||
print('skipping', m.first_name, m.last_name)
|
22
apiserver/scripts/import_signup_counts.py
Executable file
22
apiserver/scripts/import_signup_counts.py
Executable file
|
@ -0,0 +1,22 @@
|
|||
# Expects a old_counts.csv of the historical counts in format:
|
||||
# month,signup_count
|
||||
# month in YYYY-MM format
|
||||
|
||||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import csv
|
||||
from apiserver.api import models
|
||||
|
||||
with open('old_counts.csv', newline='') as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
print('Adding', row['month'], row['signup_count'])
|
||||
|
||||
models.StatsSignupCount.objects.update_or_create(
|
||||
month=row['month']+'-01',
|
||||
defaults=dict(signup_count=row['signup_count']),
|
||||
)
|
||||
|
||||
print('Done.')
|
21
apiserver/scripts/import_six_month_plus_count.py
Executable file
21
apiserver/scripts/import_six_month_plus_count.py
Executable file
|
@ -0,0 +1,21 @@
|
|||
# Expects a old_counts.csv of the historical counts in format:
|
||||
# date,six_month_plus_count
|
||||
|
||||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import csv
|
||||
from apiserver.api import models
|
||||
|
||||
with open('old_counts.csv', newline='') as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
print('Adding', row['date'], row['six_month_plus_count'])
|
||||
|
||||
models.StatsMemberCount.objects.update_or_create(
|
||||
date=row['date'],
|
||||
defaults=dict(six_month_plus_count=row['six_month_plus_count']),
|
||||
)
|
||||
|
||||
print('Done.')
|
711
apiserver/scripts/import_subs_counts.py
Executable file
711
apiserver/scripts/import_subs_counts.py
Executable file
|
@ -0,0 +1,711 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from apiserver.api import models
|
||||
|
||||
data = '''
|
||||
2020-03-02,74
|
||||
2020-03-03,77
|
||||
2020-03-04,79
|
||||
2020-03-05,83
|
||||
2020-03-06,87
|
||||
2020-03-07,93
|
||||
2020-03-08,98
|
||||
2020-03-09,103
|
||||
2020-03-10,109
|
||||
2020-03-11,115
|
||||
2020-03-12,118
|
||||
2020-03-13,120
|
||||
2020-03-14,124
|
||||
2020-03-15,125
|
||||
2020-03-16,127
|
||||
2020-03-17,128
|
||||
2020-03-18,127
|
||||
2020-03-19,127
|
||||
2020-03-20,126
|
||||
2020-03-21,127
|
||||
2020-03-22,127
|
||||
2020-03-23,127
|
||||
2020-03-24,127
|
||||
2020-03-25,127
|
||||
2020-03-26,129
|
||||
2020-03-27,128
|
||||
2020-03-28,129
|
||||
2020-03-29,129
|
||||
2020-03-30,129
|
||||
2020-03-31,129
|
||||
2020-04-01,127
|
||||
2020-04-02,127
|
||||
2020-04-03,127
|
||||
2020-04-04,127
|
||||
2020-04-05,127
|
||||
2020-04-06,127
|
||||
2020-04-07,129
|
||||
2020-04-08,129
|
||||
2020-04-09,129
|
||||
2020-04-10,128
|
||||
2020-04-11,127
|
||||
2020-04-12,127
|
||||
2020-04-13,128
|
||||
2020-04-14,128
|
||||
2020-04-15,129
|
||||
2020-04-16,129
|
||||
2020-04-17,129
|
||||
2020-04-18,129
|
||||
2020-04-19,130
|
||||
2020-04-20,130
|
||||
2020-04-21,130
|
||||
2020-04-22,130
|
||||
2020-04-23,130
|
||||
2020-04-24,130
|
||||
2020-04-25,131
|
||||
2020-04-26,132
|
||||
2020-04-27,133
|
||||
2020-04-28,133
|
||||
2020-04-29,133
|
||||
2020-04-30,133
|
||||
2020-05-01,133
|
||||
2020-05-02,133
|
||||
2020-05-03,133
|
||||
2020-05-04,133
|
||||
2020-05-05,132
|
||||
2020-05-06,132
|
||||
2020-05-07,132
|
||||
2020-05-08,132
|
||||
2020-05-09,132
|
||||
2020-05-10,132
|
||||
2020-05-11,131
|
||||
2020-05-12,131
|
||||
2020-05-13,129
|
||||
2020-05-14,129
|
||||
2020-05-15,127
|
||||
2020-05-16,127
|
||||
2020-05-17,127
|
||||
2020-05-18,127
|
||||
2020-05-19,127
|
||||
2020-05-20,127
|
||||
2020-05-21,127
|
||||
2020-05-22,128
|
||||
2020-05-23,127
|
||||
2020-05-24,127
|
||||
2020-05-25,127
|
||||
2020-05-26,127
|
||||
2020-05-27,127
|
||||
2020-05-28,127
|
||||
2020-05-29,127
|
||||
2020-05-30,127
|
||||
2020-05-31,127
|
||||
2020-06-01,126
|
||||
2020-06-02,125
|
||||
2020-06-03,126
|
||||
2020-06-04,126
|
||||
2020-06-05,126
|
||||
2020-06-06,126
|
||||
2020-06-07,126
|
||||
2020-06-08,126
|
||||
2020-06-09,126
|
||||
2020-06-10,126
|
||||
2020-06-11,126
|
||||
2020-06-12,126
|
||||
2020-06-13,127
|
||||
2020-06-14,127
|
||||
2020-06-15,128
|
||||
2020-06-16,128
|
||||
2020-06-17,128
|
||||
2020-06-18,128
|
||||
2020-06-19,128
|
||||
2020-06-20,128
|
||||
2020-06-21,128
|
||||
2020-06-22,128
|
||||
2020-06-23,128
|
||||
2020-06-24,129
|
||||
2020-06-25,129
|
||||
2020-06-26,130
|
||||
2020-06-27,129
|
||||
2020-06-28,128
|
||||
2020-06-29,128
|
||||
2020-06-30,128
|
||||
2020-07-01,127
|
||||
2020-07-02,128
|
||||
2020-07-03,129
|
||||
2020-07-04,129
|
||||
2020-07-05,129
|
||||
2020-07-06,129
|
||||
2020-07-07,130
|
||||
2020-07-08,131
|
||||
2020-07-09,131
|
||||
2020-07-10,130
|
||||
2020-07-11,130
|
||||
2020-07-12,129
|
||||
2020-07-13,129
|
||||
2020-07-14,129
|
||||
2020-07-15,130
|
||||
2020-07-16,131
|
||||
2020-07-17,131
|
||||
2020-07-18,131
|
||||
2020-07-19,131
|
||||
2020-07-20,131
|
||||
2020-07-21,131
|
||||
2020-07-22,130
|
||||
2020-07-23,130
|
||||
2020-07-24,130
|
||||
2020-07-25,130
|
||||
2020-07-26,130
|
||||
2020-07-27,129
|
||||
2020-07-28,129
|
||||
2020-07-29,129
|
||||
2020-07-30,131
|
||||
2020-07-31,131
|
||||
2020-08-01,131
|
||||
2020-08-02,131
|
||||
2020-08-03,131
|
||||
2020-08-04,131
|
||||
2020-08-05,131
|
||||
2020-08-06,130
|
||||
2020-08-07,130
|
||||
2020-08-08,129
|
||||
2020-08-09,128
|
||||
2020-08-10,128
|
||||
2020-08-11,127
|
||||
2020-08-12,127
|
||||
2020-08-13,127
|
||||
2020-08-14,127
|
||||
2020-08-15,126
|
||||
2020-08-16,126
|
||||
2020-08-17,126
|
||||
2020-08-18,126
|
||||
2020-08-19,125
|
||||
2020-08-20,125
|
||||
2020-08-21,125
|
||||
2020-08-22,125
|
||||
2020-08-23,125
|
||||
2020-08-24,124
|
||||
2020-08-25,124
|
||||
2020-08-26,124
|
||||
2020-08-27,124
|
||||
2020-08-28,124
|
||||
2020-08-29,124
|
||||
2020-08-30,123
|
||||
2020-08-31,123
|
||||
2020-09-01,123
|
||||
2020-09-02,123
|
||||
2020-09-03,123
|
||||
2020-09-04,123
|
||||
2020-09-05,124
|
||||
2020-09-06,124
|
||||
2020-09-07,124
|
||||
2020-09-08,124
|
||||
2020-09-09,123
|
||||
2020-09-10,123
|
||||
2020-09-11,122
|
||||
2020-09-12,122
|
||||
2020-09-13,122
|
||||
2020-09-14,122
|
||||
2020-09-15,122
|
||||
2020-09-16,122
|
||||
2020-09-17,122
|
||||
2020-09-18,122
|
||||
2020-09-19,122
|
||||
2020-09-20,122
|
||||
2020-09-21,122
|
||||
2020-09-22,122
|
||||
2020-09-23,124
|
||||
2020-09-24,124
|
||||
2020-09-25,124
|
||||
2020-09-26,126
|
||||
2020-09-27,129
|
||||
2020-09-28,129
|
||||
2020-09-29,129
|
||||
2020-09-30,129
|
||||
2020-10-01,128
|
||||
2020-10-02,128
|
||||
2020-10-03,128
|
||||
2020-10-04,128
|
||||
2020-10-05,128
|
||||
2020-10-06,129
|
||||
2020-10-07,128
|
||||
2020-10-08,129
|
||||
2020-10-09,129
|
||||
2020-10-10,128
|
||||
2020-10-11,129
|
||||
2020-10-12,129
|
||||
2020-10-13,129
|
||||
2020-10-14,129
|
||||
2020-10-15,129
|
||||
2020-10-16,130
|
||||
2020-10-17,129
|
||||
2020-10-18,129
|
||||
2020-10-19,129
|
||||
2020-10-20,128
|
||||
2020-10-21,128
|
||||
2020-10-22,128
|
||||
2020-10-23,128
|
||||
2020-10-24,128
|
||||
2020-10-25,128
|
||||
2020-10-26,128
|
||||
2020-10-27,128
|
||||
2020-10-28,128
|
||||
2020-10-29,129
|
||||
2020-10-30,129
|
||||
2020-10-31,129
|
||||
2020-11-01,129
|
||||
2020-11-02,130
|
||||
2020-11-03,130
|
||||
2020-11-04,131
|
||||
2020-11-05,132
|
||||
2020-11-06,133
|
||||
2020-11-07,133
|
||||
2020-11-08,133
|
||||
2020-11-09,133
|
||||
2020-11-10,133
|
||||
2020-11-11,133
|
||||
2020-11-12,133
|
||||
2020-11-13,134
|
||||
2020-11-14,134
|
||||
2020-11-15,134
|
||||
2020-11-16,133
|
||||
2020-11-17,133
|
||||
2020-11-18,132
|
||||
2020-11-19,132
|
||||
2020-11-20,132
|
||||
2020-11-21,132
|
||||
2020-11-22,132
|
||||
2020-11-23,132
|
||||
2020-11-24,133
|
||||
2020-11-25,133
|
||||
2020-11-26,132
|
||||
2020-11-27,132
|
||||
2020-11-28,132
|
||||
2020-11-29,132
|
||||
2020-11-30,132
|
||||
2020-12-01,131
|
||||
2020-12-02,131
|
||||
2020-12-03,132
|
||||
2020-12-04,132
|
||||
2020-12-05,132
|
||||
2020-12-06,132
|
||||
2020-12-07,131
|
||||
2020-12-08,130
|
||||
2020-12-09,130
|
||||
2020-12-10,130
|
||||
2020-12-11,130
|
||||
2020-12-12,130
|
||||
2020-12-13,130
|
||||
2020-12-14,129
|
||||
2020-12-15,129
|
||||
2020-12-16,129
|
||||
2020-12-17,128
|
||||
2020-12-18,127
|
||||
2020-12-19,127
|
||||
2020-12-20,127
|
||||
2020-12-21,127
|
||||
2020-12-22,127
|
||||
2020-12-23,126
|
||||
2020-12-24,126
|
||||
2020-12-25,126
|
||||
2020-12-26,125
|
||||
2020-12-27,125
|
||||
2020-12-28,125
|
||||
2020-12-29,125
|
||||
2020-12-30,125
|
||||
2020-12-31,125
|
||||
2021-01-01,125
|
||||
2021-01-02,125
|
||||
2021-01-03,125
|
||||
2021-01-04,125
|
||||
2021-01-05,125
|
||||
2021-01-06,125
|
||||
2021-01-07,125
|
||||
2021-01-08,125
|
||||
2021-01-09,125
|
||||
2021-01-10,124
|
||||
2021-01-11,124
|
||||
2021-01-12,123
|
||||
2021-01-13,123
|
||||
2021-01-14,123
|
||||
2021-01-15,123
|
||||
2021-01-16,123
|
||||
2021-01-17,123
|
||||
2021-01-18,123
|
||||
2021-01-19,123
|
||||
2021-01-20,123
|
||||
2021-01-21,122
|
||||
2021-01-22,122
|
||||
2021-01-23,122
|
||||
2021-01-24,122
|
||||
2021-01-25,121
|
||||
2021-01-26,119
|
||||
2021-01-27,119
|
||||
2021-01-28,118
|
||||
2021-01-29,118
|
||||
2021-01-30,118
|
||||
2021-01-31,118
|
||||
2021-02-01,118
|
||||
2021-02-02,117
|
||||
2021-02-03,117
|
||||
2021-02-04,117
|
||||
2021-02-05,118
|
||||
2021-02-06,118
|
||||
2021-02-07,118
|
||||
2021-02-08,118
|
||||
2021-02-09,118
|
||||
2021-02-10,118
|
||||
2021-02-11,116
|
||||
2021-02-12,116
|
||||
2021-02-13,115
|
||||
2021-02-14,115
|
||||
2021-02-15,115
|
||||
2021-02-16,115
|
||||
2021-02-17,115
|
||||
2021-02-18,115
|
||||
2021-02-19,115
|
||||
2021-02-20,114
|
||||
2021-02-21,114
|
||||
2021-02-22,114
|
||||
2021-02-23,114
|
||||
2021-02-24,114
|
||||
2021-02-25,114
|
||||
2021-02-26,114
|
||||
2021-02-27,114
|
||||
2021-02-28,115
|
||||
2021-03-01,115
|
||||
2021-03-02,115
|
||||
2021-03-03,115
|
||||
2021-03-04,113
|
||||
2021-03-05,113
|
||||
2021-03-06,113
|
||||
2021-03-07,112
|
||||
2021-03-08,112
|
||||
2021-03-09,113
|
||||
2021-03-10,113
|
||||
2021-03-11,113
|
||||
2021-03-12,113
|
||||
2021-03-13,113
|
||||
2021-03-14,113
|
||||
2021-03-15,113
|
||||
2021-03-16,112
|
||||
2021-03-17,111
|
||||
2021-03-18,111
|
||||
2021-03-19,111
|
||||
2021-03-20,111
|
||||
2021-03-21,111
|
||||
2021-03-22,112
|
||||
2021-03-23,112
|
||||
2021-03-24,112
|
||||
2021-03-25,111
|
||||
2021-03-26,111
|
||||
2021-03-27,111
|
||||
2021-03-28,111
|
||||
2021-03-29,111
|
||||
2021-03-30,111
|
||||
2021-03-31,111
|
||||
2021-04-01,111
|
||||
2021-04-02,112
|
||||
2021-04-03,112
|
||||
2021-04-04,112
|
||||
2021-04-05,112
|
||||
2021-04-06,112
|
||||
2021-04-07,112
|
||||
2021-04-08,112
|
||||
2021-04-09,111
|
||||
2021-04-10,111
|
||||
2021-04-11,111
|
||||
2021-04-12,111
|
||||
2021-04-13,111
|
||||
2021-04-14,110
|
||||
2021-04-15,110
|
||||
2021-04-16,110
|
||||
2021-04-17,110
|
||||
2021-04-18,110
|
||||
2021-04-19,110
|
||||
2021-04-20,110
|
||||
2021-04-21,110
|
||||
2021-04-22,110
|
||||
2021-04-23,110
|
||||
2021-04-24,110
|
||||
2021-04-25,110
|
||||
2021-04-26,109
|
||||
2021-04-27,109
|
||||
2021-04-28,109
|
||||
2021-04-29,109
|
||||
2021-04-30,109
|
||||
2021-05-01,109
|
||||
2021-05-02,109
|
||||
2021-05-03,109
|
||||
2021-05-04,109
|
||||
2021-05-05,109
|
||||
2021-05-06,109
|
||||
2021-05-07,109
|
||||
2021-05-08,109
|
||||
2021-05-09,109
|
||||
2021-05-10,109
|
||||
2021-05-11,109
|
||||
2021-05-12,109
|
||||
2021-05-13,109
|
||||
2021-05-14,109
|
||||
2021-05-15,108
|
||||
2021-05-16,108
|
||||
2021-05-17,108
|
||||
2021-05-18,109
|
||||
2021-05-19,109
|
||||
2021-05-20,109
|
||||
2021-05-21,109
|
||||
2021-05-22,109
|
||||
2021-05-23,109
|
||||
2021-05-24,109
|
||||
2021-05-25,110
|
||||
2021-05-26,110
|
||||
2021-05-27,110
|
||||
2021-05-28,111
|
||||
2021-05-29,111
|
||||
2021-05-30,112
|
||||
2021-05-31,112
|
||||
2021-06-01,112
|
||||
2021-06-02,112
|
||||
2021-06-03,112
|
||||
2021-06-04,111
|
||||
2021-06-05,111
|
||||
2021-06-06,111
|
||||
2021-06-07,111
|
||||
2021-06-08,111
|
||||
2021-06-09,111
|
||||
2021-06-10,111
|
||||
2021-06-11,111
|
||||
2021-06-12,111
|
||||
2021-06-13,111
|
||||
2021-06-14,111
|
||||
2021-06-15,111
|
||||
2021-06-16,111
|
||||
2021-06-17,111
|
||||
2021-06-18,113
|
||||
2021-06-19,112
|
||||
2021-06-20,112
|
||||
2021-06-21,112
|
||||
2021-06-22,114
|
||||
2021-06-23,114
|
||||
2021-06-24,113
|
||||
2021-06-25,113
|
||||
2021-06-26,113
|
||||
2021-06-27,113
|
||||
2021-06-28,113
|
||||
2021-06-29,113
|
||||
2021-06-30,113
|
||||
2021-07-01,111
|
||||
2021-07-02,111
|
||||
2021-07-03,110
|
||||
2021-07-04,111
|
||||
2021-07-05,111
|
||||
2021-07-06,111
|
||||
2021-07-07,111
|
||||
2021-07-08,111
|
||||
2021-07-09,111
|
||||
2021-07-10,111
|
||||
2021-07-11,111
|
||||
2021-07-12,111
|
||||
2021-07-13,111
|
||||
2021-07-14,111
|
||||
2021-07-15,110
|
||||
2021-07-16,110
|
||||
2021-07-17,110
|
||||
2021-07-18,110
|
||||
2021-07-19,111
|
||||
2021-07-20,111
|
||||
2021-07-21,111
|
||||
2021-07-22,111
|
||||
2021-07-23,111
|
||||
2021-07-24,111
|
||||
2021-07-25,111
|
||||
2021-07-26,111
|
||||
2021-07-27,111
|
||||
2021-07-28,112
|
||||
2021-07-29,112
|
||||
2021-07-30,112
|
||||
2021-07-31,112
|
||||
2021-08-01,112
|
||||
2021-08-02,112
|
||||
2021-08-03,112
|
||||
2021-08-04,112
|
||||
2021-08-05,112
|
||||
2021-08-06,112
|
||||
2021-08-07,112
|
||||
2021-08-08,112
|
||||
2021-08-09,112
|
||||
2021-08-10,112
|
||||
2021-08-11,112
|
||||
2021-08-12,112
|
||||
2021-08-13,112
|
||||
2021-08-14,113
|
||||
2021-08-15,113
|
||||
2021-08-16,113
|
||||
2021-08-17,113
|
||||
2021-08-18,113
|
||||
2021-08-19,114
|
||||
2021-08-20,114
|
||||
2021-08-21,114
|
||||
2021-08-22,114
|
||||
2021-08-23,114
|
||||
2021-08-24,114
|
||||
2021-08-25,114
|
||||
2021-08-26,114
|
||||
2021-08-27,114
|
||||
2021-08-28,114
|
||||
2021-08-29,114
|
||||
2021-08-30,113
|
||||
2021-08-31,113
|
||||
2021-09-01,115
|
||||
2021-09-02,115
|
||||
2021-09-03,115
|
||||
2021-09-04,115
|
||||
2021-09-05,115
|
||||
2021-09-06,115
|
||||
2021-09-07,115
|
||||
2021-09-08,115
|
||||
2021-09-09,115
|
||||
2021-09-10,115
|
||||
2021-09-11,115
|
||||
2021-09-12,115
|
||||
2021-09-13,115
|
||||
2021-09-14,115
|
||||
2021-09-15,119
|
||||
2021-09-16,119
|
||||
2021-09-17,119
|
||||
2021-09-18,119
|
||||
2021-09-19,119
|
||||
2021-09-20,119
|
||||
2021-09-21,119
|
||||
2021-09-22,121
|
||||
2021-09-23,118
|
||||
2021-09-24,118
|
||||
2021-09-25,118
|
||||
2021-09-26,118
|
||||
2021-09-27,118
|
||||
2021-09-28,118
|
||||
2021-09-29,118
|
||||
2021-09-30,119
|
||||
2021-10-01,119
|
||||
2021-10-02,119
|
||||
2021-10-03,119
|
||||
2021-10-04,119
|
||||
2021-10-05,120
|
||||
2021-10-06,121
|
||||
2021-10-07,121
|
||||
2021-10-08,121
|
||||
2021-10-09,121
|
||||
2021-10-10,121
|
||||
2021-10-11,121
|
||||
2021-10-12,121
|
||||
2021-10-13,123
|
||||
2021-10-14,124
|
||||
2021-10-15,124
|
||||
2021-10-16,124
|
||||
2021-10-17,124
|
||||
2021-10-18,124
|
||||
2021-10-19,124
|
||||
2021-10-20,124
|
||||
2021-10-21,124
|
||||
2021-10-22,124
|
||||
2021-10-23,124
|
||||
2021-10-24,124
|
||||
2021-10-25,124
|
||||
2021-10-26,125
|
||||
2021-10-27,126
|
||||
2021-10-28,126
|
||||
2021-10-29,126
|
||||
2021-10-30,125
|
||||
2021-10-31,125
|
||||
2021-11-01,125
|
||||
2021-11-02,125
|
||||
2021-11-03,125
|
||||
2021-11-04,125
|
||||
2021-11-05,124
|
||||
2021-11-06,125
|
||||
2021-11-07,125
|
||||
2021-11-08,124
|
||||
2021-11-09,124
|
||||
2021-11-10,124
|
||||
2021-11-11,123
|
||||
2021-11-12,123
|
||||
2021-11-13,122
|
||||
2021-11-14,122
|
||||
2021-11-15,122
|
||||
2021-11-16,123
|
||||
2021-11-17,123
|
||||
2021-11-18,123
|
||||
2021-11-19,123
|
||||
2021-11-20,123
|
||||
2021-11-21,124
|
||||
2021-11-22,124
|
||||
2021-11-23,123
|
||||
2021-11-24,125
|
||||
2021-11-25,126
|
||||
2021-11-26,126
|
||||
2021-11-27,126
|
||||
2021-11-28,127
|
||||
2021-11-29,127
|
||||
2021-11-30,127
|
||||
2021-12-01,129
|
||||
2021-12-02,129
|
||||
2021-12-03,129
|
||||
2021-12-04,130
|
||||
2021-12-05,130
|
||||
2021-12-06,130
|
||||
2021-12-07,129
|
||||
2021-12-08,129
|
||||
2021-12-09,129
|
||||
2021-12-10,129
|
||||
2021-12-11,129
|
||||
2021-12-12,129
|
||||
2021-12-13,129
|
||||
2021-12-14,129
|
||||
2021-12-15,129
|
||||
2021-12-16,129
|
||||
2021-12-17,129
|
||||
2021-12-18,129
|
||||
2021-12-19,129
|
||||
2021-12-20,129
|
||||
2021-12-21,129
|
||||
2021-12-22,129
|
||||
2021-12-23,129
|
||||
2021-12-24,129
|
||||
2021-12-25,129
|
||||
2021-12-26,129
|
||||
2021-12-27,129
|
||||
2021-12-28,129
|
||||
2021-12-29,131
|
||||
2021-12-30,131
|
||||
2021-12-31,131
|
||||
2022-01-01,131
|
||||
2022-01-02,131
|
||||
2022-01-03,131
|
||||
2022-01-04,131
|
||||
2022-01-05,132
|
||||
2022-01-06,132
|
||||
2022-01-07,131
|
||||
2022-01-08,131
|
||||
2022-01-09,131
|
||||
2022-01-10,130
|
||||
2022-01-11,130
|
||||
2022-01-12,130
|
||||
2022-01-13,130
|
||||
2022-01-14,129
|
||||
2022-01-15,129
|
||||
2022-01-16,129
|
||||
2022-01-17,129
|
||||
2022-01-18,129
|
||||
2022-01-19,131
|
||||
2022-01-20,131
|
||||
2022-01-21,131
|
||||
2022-01-22,131
|
||||
'''
|
||||
|
||||
for row in data.split():
|
||||
date, count = row.split(',')
|
||||
print('Adding', date, count)
|
||||
|
||||
models.StatsMemberCount.objects.update_or_create(
|
||||
date=date,
|
||||
defaults=dict(subscriber_count=count),
|
||||
)
|
||||
|
||||
print('Done.')
|
59
apiserver/scripts/import_trotec_group.py
Executable file
59
apiserver/scripts/import_trotec_group.py
Executable file
|
@ -0,0 +1,59 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import re
|
||||
from apiserver.api import models, utils
|
||||
|
||||
def clean(name):
|
||||
return re.sub(r'[^a-z]', '', name.lower())
|
||||
|
||||
with open('ad-trotec.json', 'r') as f:
|
||||
ad_dirty = json.load(f)
|
||||
|
||||
with open('ad-dump.json', 'r') as f:
|
||||
ad_dump = json.load(f)
|
||||
|
||||
ad = {}
|
||||
for sam in ad_dirty:
|
||||
try:
|
||||
ad[clean(sam)] = ad_dump[sam]['mail']
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
members = models.Member.objects.all()
|
||||
|
||||
portal = {}
|
||||
for m in members:
|
||||
name = m.first_name + m.last_name
|
||||
portal[clean(name)] = m
|
||||
|
||||
good_members = {}
|
||||
|
||||
for ad_name, email in ad.items():
|
||||
if ad_name in portal:
|
||||
good_members[ad_name] = portal[ad_name]
|
||||
print('found ad name match', ad_name)
|
||||
else:
|
||||
print('cant find ad name', ad_name)
|
||||
print('searching for email...')
|
||||
for m in members:
|
||||
if m.old_email and m.old_email.lower() == email.lower():
|
||||
good_members[ad_name] = m
|
||||
print(' found email', email)
|
||||
break
|
||||
else:
|
||||
print(' cant link email', email)
|
||||
|
||||
print()
|
||||
print()
|
||||
|
||||
for m in good_members.values():
|
||||
if not m.trotec_cert_date:
|
||||
m.trotec_cert_date = utils.today_alberta_tz()
|
||||
print('certified', m.first_name, m.last_name)
|
||||
m.save()
|
||||
else:
|
||||
print('skipping', m.first_name, m.last_name)
|
21
apiserver/scripts/import_vetted_count.py
Executable file
21
apiserver/scripts/import_vetted_count.py
Executable file
|
@ -0,0 +1,21 @@
|
|||
# Expects a old_counts.csv of the historical counts in format:
|
||||
# date,vetted_count
|
||||
|
||||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import csv
|
||||
from apiserver.api import models
|
||||
|
||||
with open('old_counts.csv', newline='') as csvfile:
|
||||
reader = csv.DictReader(csvfile)
|
||||
for row in reader:
|
||||
print('Adding', row['date'], row['vetted_count'])
|
||||
|
||||
models.StatsMemberCount.objects.update_or_create(
|
||||
date=row['date'],
|
||||
defaults=dict(vetted_count=row['vetted_count']),
|
||||
)
|
||||
|
||||
print('Done.')
|
98
apiserver/scripts/lockout_auth_update.py
Executable file
98
apiserver/scripts/lockout_auth_update.py
Executable file
|
@ -0,0 +1,98 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import datetime
|
||||
import json
|
||||
from django.utils.timezone import now, pytz
|
||||
from apiserver.api import models, utils
|
||||
|
||||
# Member orientation
|
||||
print('Updating member orientation dates')
|
||||
sessions = models.Session.objects.filter(course = 249)
|
||||
|
||||
def get_member(obj):
|
||||
# same as in serialzers.py -> get_cards for example
|
||||
if obj.user:
|
||||
member = obj.user.member
|
||||
else:
|
||||
member = models.Member.objects.get(id=obj.member_id)
|
||||
return member
|
||||
|
||||
|
||||
for session in sessions:
|
||||
students = models.Training.objects.filter(session = session)
|
||||
for student in students:
|
||||
if student.attendance_status == 'Attended':
|
||||
member = get_member(student)
|
||||
if not member.orientation_date:
|
||||
member.orientation_date = session.datetime.astimezone(pytz.timezone('America/Edmonton')).date()
|
||||
member.save()
|
||||
|
||||
# Lathe
|
||||
print('Updating lathe training dates')
|
||||
sessions = models.Session.objects.filter(course = 281)
|
||||
|
||||
for session in sessions:
|
||||
students = models.Training.objects.filter(session = session)
|
||||
for student in students:
|
||||
if student.attendance_status == 'Attended':
|
||||
member = get_member(student)
|
||||
if not member.lathe_cert_date:
|
||||
member.lathe_cert_date = session.datetime.astimezone(pytz.timezone('America/Edmonton')).date()
|
||||
member.save()
|
||||
|
||||
# Manual Mill
|
||||
print('Updating mill training dates')
|
||||
sessions = models.Session.objects.filter(course = 283)
|
||||
|
||||
for session in sessions:
|
||||
students = models.Training.objects.filter(session = session)
|
||||
for student in students:
|
||||
if student.attendance_status == 'Attended':
|
||||
member = get_member(student)
|
||||
if not member.mill_cert_date:
|
||||
member.mill_cert_date = session.datetime.astimezone(pytz.timezone('America/Edmonton')).date()
|
||||
member.save()
|
||||
|
||||
|
||||
# Woodworking tools
|
||||
print('Updating woodworking training dates')
|
||||
sessions = models.Session.objects.filter(course = 261)
|
||||
|
||||
for session in sessions:
|
||||
students = models.Training.objects.filter(session = session)
|
||||
for student in students:
|
||||
if student.attendance_status == 'Attended':
|
||||
member = get_member(student)
|
||||
if not member.wood_cert_date:
|
||||
member.wood_cert_date = session.datetime.astimezone(pytz.timezone('America/Edmonton')).date()
|
||||
member.save()
|
||||
|
||||
# Woodworking-2 tools
|
||||
print('Updating woodworking-2 training dates')
|
||||
sessions = models.Session.objects.filter(course = 401)
|
||||
|
||||
for session in sessions:
|
||||
students = models.Training.objects.filter(session = session)
|
||||
for student in students:
|
||||
if student.attendance_status == 'Attended':
|
||||
member = get_member(student)
|
||||
if not member.wood2_cert_date:
|
||||
member.wood2_cert_date = session.datetime.astimezone(pytz.timezone('America/Edmonton')).date()
|
||||
member.save()
|
||||
|
||||
# CNC tools
|
||||
print('Updating CNC training dates')
|
||||
sessions = models.Session.objects.filter(course = 259)
|
||||
|
||||
for session in sessions:
|
||||
students = models.Training.objects.filter(session = session)
|
||||
for student in students:
|
||||
if student.attendance_status == 'Attended':
|
||||
member = get_member(student)
|
||||
if not member.cnc_cert_date:
|
||||
member.cnc_cert_date = session.datetime.astimezone(pytz.timezone('America/Edmonton')).date()
|
||||
member.save()
|
||||
|
||||
print('Done.')
|
17
apiserver/scripts/sync_member_statuses.py
Executable file
17
apiserver/scripts/sync_member_statuses.py
Executable file
|
@ -0,0 +1,17 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
from apiserver.api import models
|
||||
|
||||
members = models.Member.objects.all()
|
||||
count = 0
|
||||
|
||||
for m in members:
|
||||
if m.paused_date and m.status in ['Prepaid', 'Current', 'Due', 'Overdue']:
|
||||
print('Setting', m.first_name, m.last_name, 'to Former Member.')
|
||||
m.status = 'Former Member'
|
||||
count += 1
|
||||
m.save()
|
||||
|
||||
print('Processed', count)
|
91
apiserver/scripts/train_paypal_ids.py
Executable file
91
apiserver/scripts/train_paypal_ids.py
Executable file
|
@ -0,0 +1,91 @@
|
|||
import django, sys, os
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'apiserver.settings'
|
||||
django.setup()
|
||||
|
||||
import datetime
|
||||
import json
|
||||
from apiserver.api import models, old_models, utils
|
||||
|
||||
PAYPAL_FOLDER = 'old_paypal/'
|
||||
transactions = models.Transaction.objects.all()
|
||||
|
||||
paypal_files = os.listdir(PAYPAL_FOLDER)
|
||||
paypal_json = [x for x in paypal_files if x.endswith('.json')]
|
||||
|
||||
if paypal_json:
|
||||
print('Found paypal json files:', paypal_json)
|
||||
else:
|
||||
print('Couldnt find any paypal json files in', PAYPAL_FOLDER)
|
||||
exit(1)
|
||||
|
||||
paypal_txs = []
|
||||
|
||||
for filename in paypal_json:
|
||||
with open(PAYPAL_FOLDER + filename) as f:
|
||||
j = json.load(f)
|
||||
paypal_txs.extend(j['transaction_details'])
|
||||
|
||||
print('Num transactions found:', len(paypal_txs))
|
||||
print('Linking with portal transactions...')
|
||||
|
||||
paypal_accounts = {}
|
||||
|
||||
for t in paypal_txs:
|
||||
t_info = t['transaction_info']
|
||||
|
||||
|
||||
account_id = t_info.get('paypal_account_id', None)
|
||||
if not account_id:
|
||||
print('Skipping tx id: {}, no payer (could be bank tx)'.format(
|
||||
t_info['transaction_id'],
|
||||
))
|
||||
continue
|
||||
|
||||
if account_id not in paypal_accounts:
|
||||
paypal_accounts[account_id] = []
|
||||
|
||||
|
||||
reference = t_info['transaction_id'][:11]
|
||||
try:
|
||||
portal_tx = transactions.get(reference_number=reference)
|
||||
paypal_accounts[account_id].append(portal_tx.member_id)
|
||||
except models.Transaction.DoesNotExist:
|
||||
print('Unable to find portal transaction for id: {}, ref: {}, date: {}, name: {} {}, email: {}'.format(
|
||||
t_info['transaction_id'],
|
||||
reference,
|
||||
t_info['transaction_initiation_date'][:10],
|
||||
t['payer_info']['payer_name'].get('given_name', 'unknown'),
|
||||
t['payer_info']['payer_name'].get('surname', 'unknown'),
|
||||
t['payer_info'].get('email_address', 'unknown'),
|
||||
))
|
||||
|
||||
print('Num paypal accounts found:', len(paypal_accounts))
|
||||
print('Linking with portal members...')
|
||||
count = 0
|
||||
|
||||
for account_id, member_ids in paypal_accounts.items():
|
||||
if len(member_ids) == 0:
|
||||
print('Skipping account {}, no members found'.format(
|
||||
account_id,
|
||||
))
|
||||
continue
|
||||
|
||||
member_id = member_ids[0]
|
||||
|
||||
if len(set(member_ids)) > 1:
|
||||
print('Account {} has multiple members {}, assuming {}'.format(
|
||||
account_id,
|
||||
str(set(member_ids)),
|
||||
member_id,
|
||||
))
|
||||
|
||||
print(account_id, '-->', member_id)
|
||||
|
||||
models.PayPalHint.objects.update_or_create(
|
||||
account=account_id,
|
||||
defaults=dict(member_id=member_id),
|
||||
)
|
||||
count += 1
|
||||
|
||||
print('Num paypal hints processed:', count)
|
||||
print('Done.')
|
105
authserver/.gitignore
vendored
Normal file
105
authserver/.gitignore
vendored
Normal file
|
@ -0,0 +1,105 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
# Editor
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
secrets.py
|
17
authserver/README.md
Normal file
17
authserver/README.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
# Auth Server
|
||||
|
||||
Runs on Protospace's webhost and passes credentials around.
|
||||
|
||||
Exposes a REST API to Spaceport that allows setting wiki, etc passwords.
|
||||
|
||||
## Setup
|
||||
|
||||
Basically the exact same as:
|
||||
|
||||
https://docs.my.protospace.ca/ldap.html
|
||||
|
||||
## License
|
||||
|
||||
This program is free and open-source software licensed under the MIT License. Please see the `LICENSE` file for details.
|
||||
|
||||
That means you have the right to study, change, and distribute the software and source code to anyone and for any purpose. You deserve these rights.
|
332
authserver/auth_functions.py
Normal file
332
authserver/auth_functions.py
Normal file
|
@ -0,0 +1,332 @@
|
|||
from log import logger
|
||||
import time
|
||||
import secrets
|
||||
import subprocess
|
||||
import requests
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import abort
|
||||
|
||||
HTTP_NOTFOUND = 404
|
||||
|
||||
random_email = lambda: 'spaceport-' + str(uuid4()).split('-')[0] + '@protospace.ca'
|
||||
|
||||
def set_wiki_password(username, password):
|
||||
# sets a user's wiki password
|
||||
# creates the account if it doesn't exist
|
||||
|
||||
if not secrets.WIKI_MAINTENANCE:
|
||||
logger.error('Wiki setting not configured, aborting')
|
||||
abort(400)
|
||||
|
||||
if not username:
|
||||
logger.error('Empty username, aborting')
|
||||
abort(400)
|
||||
|
||||
logger.info('Setting wiki password for: ' + username)
|
||||
|
||||
if not password:
|
||||
logger.error('Empty password, aborting')
|
||||
abort(400)
|
||||
|
||||
script = secrets.WIKI_MAINTENANCE + '/createAndPromote.php'
|
||||
|
||||
result = subprocess.run(['php', script, '--force', username, password],
|
||||
shell=False, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
output = result.stdout or result.stderr
|
||||
output = output.strip()
|
||||
|
||||
logger.info('Output: ' + output)
|
||||
|
||||
if result.stderr:
|
||||
abort(400)
|
||||
|
||||
def discourse_api_get(url, params={}):
|
||||
headers = {
|
||||
'Api-Key': secrets.DISCOURSE_API_KEY,
|
||||
'Api-Username': secrets.DISCOURSE_API_USER,
|
||||
}
|
||||
response = requests.get(url, headers=headers, params=params, timeout=10)
|
||||
logger.debug('Response: %s %s', response.status_code, response.text)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
|
||||
def discourse_api_put(url, data={}):
|
||||
headers = {
|
||||
'Api-Key': secrets.DISCOURSE_API_KEY,
|
||||
'Api-Username': secrets.DISCOURSE_API_USER,
|
||||
}
|
||||
response = requests.put(url, headers=headers, data=data, timeout=10)
|
||||
logger.debug('Response: %s %s', response.status_code, response.text)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
|
||||
def discourse_api_post(url, data={}):
|
||||
headers = {
|
||||
'Api-Key': secrets.DISCOURSE_API_KEY,
|
||||
'Api-Username': secrets.DISCOURSE_API_USER,
|
||||
}
|
||||
response = requests.post(url, headers=headers, data=data, timeout=10)
|
||||
logger.debug('Response: %s %s', response.status_code, response.text)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
|
||||
def discourse_api_delete(url, data={}):
|
||||
headers = {
|
||||
'Api-Key': secrets.DISCOURSE_API_KEY,
|
||||
'Api-Username': secrets.DISCOURSE_API_USER,
|
||||
}
|
||||
response = requests.delete(url, headers=headers, data=data, timeout=10)
|
||||
logger.debug('Response: %s %s', response.status_code, response.text)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
|
||||
def discourse_rails_script(script):
|
||||
result = subprocess.run(['docker', 'exec', '-i', secrets.DISCOURSE_CONTAINER, 'rails', 'runner', script],
|
||||
shell=False, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=60)
|
||||
output = result.stdout or result.stderr
|
||||
output = output.strip() or 'No complaints'
|
||||
return result, output
|
||||
|
||||
def get_discourse_group_id(group_name):
|
||||
logger.info('Getting the ID of group %s', group_name)
|
||||
|
||||
url = 'https://forum.protospace.ca/groups/{}.json'.format(group_name)
|
||||
response = discourse_api_get(url)
|
||||
response = response.json()
|
||||
return response['group']['id']
|
||||
|
||||
def get_discourse_usernames():
|
||||
usernames = []
|
||||
|
||||
response = discourse_api_get('https://forum.protospace.ca/groups/trust_level_0/members.json?limit=1000')
|
||||
response = response.json()
|
||||
|
||||
for user in response['members']:
|
||||
usernames.append(user['username'])
|
||||
|
||||
if len(usernames) == 1000:
|
||||
logger.error('Hit username limit, aborting!')
|
||||
abort(400)
|
||||
|
||||
return usernames
|
||||
|
||||
def translate_usernames(portal_usernames, discourse_usernames):
|
||||
# the case of portal and discourse usernames might not match
|
||||
# this causes a problem if someone creates a discourse user
|
||||
# as John.Smith and later sets up a portal account as john.smith
|
||||
#
|
||||
# solution: look for usernames in discourse with the same letters,
|
||||
# and then convert to the discourse version when using the API
|
||||
|
||||
result = []
|
||||
|
||||
for pu in portal_usernames:
|
||||
for du in discourse_usernames:
|
||||
if pu.lower() == du.lower():
|
||||
result.append(du)
|
||||
break
|
||||
else: # for
|
||||
result.append(pu)
|
||||
|
||||
return result
|
||||
|
||||
def set_discourse_password(username, password, first_name, email):
|
||||
# sets a user's discourse password
|
||||
# creates the account if it doesn't exist
|
||||
# things to test:
|
||||
# - user changes Spaceport password
|
||||
# - user changes Spaceport password to same
|
||||
# - new Spaceport signup
|
||||
# - existing Discourse user Spaceport signup
|
||||
# - existing Discourse user Spaceport signup with same email
|
||||
# note: Spaceport emails are unconfirmed!!
|
||||
|
||||
if not secrets.DISCOURSE_CONTAINER or not secrets.DISCOURSE_API_KEY or not secrets.DISCOURSE_API_USER:
|
||||
logger.error('Discourse setting not configured, aborting')
|
||||
abort(400)
|
||||
|
||||
if not username:
|
||||
logger.error('Empty username, aborting')
|
||||
abort(400)
|
||||
|
||||
if not password:
|
||||
logger.error('Empty password, aborting')
|
||||
abort(400)
|
||||
|
||||
if not first_name:
|
||||
logger.error('Empty first_name, aborting')
|
||||
abort(400)
|
||||
|
||||
if not email:
|
||||
logger.error('Empty email, aborting')
|
||||
abort(400)
|
||||
|
||||
discourse_usernames = get_discourse_usernames()
|
||||
username = translate_usernames([username], discourse_usernames)[0]
|
||||
|
||||
logger.info('Checking Discourse for existing email: ' + email)
|
||||
params = {
|
||||
'filter': email,
|
||||
'show_emails': 'true',
|
||||
}
|
||||
response = discourse_api_get('https://forum.protospace.ca/admin/users/list/active.json', params)
|
||||
response = response.json()
|
||||
|
||||
for user in response:
|
||||
if user['email'].lower() == email.lower():
|
||||
if user['username'] == username:
|
||||
logger.info('Username match, skipping')
|
||||
continue
|
||||
|
||||
new_email = random_email()
|
||||
logger.info('Email found on different user %s, changing to: %s', user['username'], new_email)
|
||||
|
||||
script = 'UserEmail.find_by(email: "{}").update!(email: "{}")'.format(email, new_email)
|
||||
result, output = discourse_rails_script(script)
|
||||
|
||||
logger.info('Confirming email change...')
|
||||
response = discourse_api_get('https://forum.protospace.ca/admin/users/list/active.json', params)
|
||||
if len(response.json()):
|
||||
logger.error('Email change failed, aborting')
|
||||
abort(400)
|
||||
|
||||
|
||||
user_exists = username in discourse_usernames
|
||||
|
||||
if not user_exists:
|
||||
logger.info('Creating Discourse user for: ' + username)
|
||||
|
||||
data = {
|
||||
'name': first_name,
|
||||
'username': username,
|
||||
'password': password,
|
||||
'email': email,
|
||||
'active': True,
|
||||
'approved': True,
|
||||
'user_fields[10]': 'Spaceport auth',
|
||||
'user_fields[11]': 'other',
|
||||
}
|
||||
response = discourse_api_post('https://forum.protospace.ca/users.json', data)
|
||||
response = response.json()
|
||||
logger.info('Response: %s', response)
|
||||
|
||||
logger.info('Skipping set password')
|
||||
return True
|
||||
|
||||
else:
|
||||
logger.info('User exists, setting Discourse password for: ' + username)
|
||||
|
||||
script = 'User.find_by(username: "{}").update!(password: "{}")'.format(username, password)
|
||||
result, output = discourse_rails_script(script)
|
||||
|
||||
if 'Password is the same' in result.stderr:
|
||||
logger.info('Output: Password is the same as your current password. (ActiveRecord::RecordInvalid)')
|
||||
return True
|
||||
else:
|
||||
logger.info('Output: ' + output)
|
||||
|
||||
if result.stderr:
|
||||
abort(400)
|
||||
|
||||
|
||||
def add_discourse_group_members(group_name, usernames):
|
||||
if not group_name:
|
||||
logger.error('Empty group_name, aborting')
|
||||
abort(400)
|
||||
|
||||
if not usernames:
|
||||
logger.error('Empty usernames, aborting')
|
||||
abort(400)
|
||||
|
||||
discourse_usernames = get_discourse_usernames()
|
||||
usernames = translate_usernames(usernames, discourse_usernames)
|
||||
usernames = set(usernames)
|
||||
group_id = get_discourse_group_id(group_name)
|
||||
|
||||
logger.info('Filtering out usernames not on Discourse...')
|
||||
|
||||
discourse_usernames = set(discourse_usernames)
|
||||
usernames = usernames & discourse_usernames
|
||||
|
||||
logger.info('Filtering out usernames that are already group members...')
|
||||
|
||||
url = 'https://forum.protospace.ca/groups/{}/members.json?limit=1000'.format(group_name)
|
||||
response = discourse_api_get(url)
|
||||
response = response.json()
|
||||
|
||||
member_usernames = set([m['username'] for m in response['members']])
|
||||
usernames = usernames - member_usernames
|
||||
usernames = list(usernames)
|
||||
|
||||
if not len(usernames):
|
||||
logger.info('Skipping, no one left to add')
|
||||
return True
|
||||
|
||||
logger.info('Adding %s remaining usernames to the group...', len(usernames))
|
||||
|
||||
url = 'https://forum.protospace.ca/groups/{}/members.json'.format(group_id)
|
||||
data = {
|
||||
'usernames': ','.join(usernames)
|
||||
}
|
||||
discourse_api_put(url, data)
|
||||
return True
|
||||
|
||||
def remove_discourse_group_members(group_name, usernames):
|
||||
if not group_name:
|
||||
logger.error('Empty group_name, aborting')
|
||||
abort(400)
|
||||
|
||||
if not usernames:
|
||||
logger.error('Empty usernames, aborting')
|
||||
abort(400)
|
||||
|
||||
discourse_usernames = get_discourse_usernames()
|
||||
usernames = translate_usernames(usernames, discourse_usernames)
|
||||
usernames = set(usernames)
|
||||
group_id = get_discourse_group_id(group_name)
|
||||
|
||||
logger.info('Filtering out usernames not on Discourse...')
|
||||
|
||||
discourse_usernames = set(discourse_usernames)
|
||||
usernames = usernames & discourse_usernames
|
||||
usernames = list(usernames)
|
||||
|
||||
if not len(usernames):
|
||||
logger.info('Skipping, no one left to remove')
|
||||
return True
|
||||
|
||||
logger.info('Removing %s remaining usernames from the group...', len(usernames))
|
||||
|
||||
url = 'https://forum.protospace.ca/groups/{}/members.json'.format(group_id)
|
||||
data = {
|
||||
'usernames': ','.join(usernames)
|
||||
}
|
||||
discourse_api_delete(url, data)
|
||||
return True
|
||||
|
||||
def change_discourse_username(username, new_username):
|
||||
if not username:
|
||||
logger.error('Empty username, aborting')
|
||||
abort(400)
|
||||
|
||||
if not new_username:
|
||||
logger.error('Empty new_username, aborting')
|
||||
abort(400)
|
||||
|
||||
logger.info('Changing username %s to %s...', username, new_username)
|
||||
|
||||
url = 'https://forum.protospace.ca/users/{}/preferences/username'.format(username)
|
||||
data = {
|
||||
'new_username': new_username,
|
||||
}
|
||||
discourse_api_put(url, data)
|
||||
return True
|
||||
|
||||
if __name__ == '__main__':
|
||||
#set_wiki_password('tanner.collin', 'protospace1')
|
||||
set_discourse_password('test8a', 'protospace1', 'testie', 'test8@example.com')
|
||||
#for u in get_discourse_usernames():
|
||||
# print(u)
|
||||
#pass
|
60
authserver/log.py
Normal file
60
authserver/log.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
import logging
|
||||
import logging.config
|
||||
|
||||
class IgnorePing(logging.Filter):
|
||||
def filter(self, record):
|
||||
return 'GET /ping' not in record.getMessage()
|
||||
|
||||
LOG_DICT = {
|
||||
'version': 1,
|
||||
'formatters': {
|
||||
'default': {
|
||||
'format': '[%(asctime)s] [%(process)d] [%(levelname)7s] %(message)s',
|
||||
},
|
||||
},
|
||||
'filters': {
|
||||
'ignore_ping': {
|
||||
'()': 'log.IgnorePing',
|
||||
},
|
||||
},
|
||||
'handlers': {
|
||||
'wsgi': {
|
||||
'class': 'logging.StreamHandler',
|
||||
'filters': ['ignore_ping'],
|
||||
'stream': 'ext://flask.logging.wsgi_errors_stream',
|
||||
'formatter': 'default'
|
||||
},
|
||||
'console': {
|
||||
'level': 'DEBUG',
|
||||
'filters': ['ignore_ping'],
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'default'
|
||||
},
|
||||
'null': {
|
||||
'level': 'DEBUG',
|
||||
'filters': ['ignore_ping'],
|
||||
'class': 'logging.NullHandler',
|
||||
'formatter': 'default'
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'gunicorn': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
},
|
||||
'root': {
|
||||
'level': 'INFO',
|
||||
'handlers': ['wsgi']
|
||||
}
|
||||
}
|
||||
|
||||
logging.config.dictConfig(LOG_DICT)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
logger.info('Logging enabled.')
|
||||
|
||||
from logging_tree import printout
|
||||
printout()
|
||||
|
13
authserver/requirements.txt
Normal file
13
authserver/requirements.txt
Normal file
|
@ -0,0 +1,13 @@
|
|||
certifi==2021.5.30
|
||||
charset-normalizer==2.0.4
|
||||
click==7.1.2
|
||||
Flask==1.1.2
|
||||
gunicorn==20.1.0
|
||||
idna==3.2
|
||||
itsdangerous==1.1.0
|
||||
Jinja2==2.11.3
|
||||
logging-tree==1.9
|
||||
MarkupSafe==1.1.1
|
||||
requests==2.26.0
|
||||
urllib3==1.26.6
|
||||
Werkzeug==1.0.1
|
25
authserver/secrets.py.example
Normal file
25
authserver/secrets.py.example
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Auth server secrets file, don't commit to version control!
|
||||
|
||||
# Auth token, used by Spaceport to authenticate
|
||||
# Set this to random characters
|
||||
# For example, use the first output of this:
|
||||
# head /dev/urandom | sha1sum
|
||||
AUTH_TOKEN = ''
|
||||
|
||||
# Absolute path of Mediawiki maintenance directory
|
||||
# Probably:
|
||||
# /var/www/wiki/maintenance
|
||||
WIKI_MAINTENANCE = ''
|
||||
|
||||
# The name of the Docker container.
|
||||
# Find it with docker ps
|
||||
# Probably something like:
|
||||
# app
|
||||
DISCOURSE_CONTAINER = 'app'
|
||||
|
||||
# API key created here:
|
||||
# https://forum.protospace.ca/admin/api/keys
|
||||
DISCOURSE_API_KEY = ''
|
||||
|
||||
# Username who created the API key
|
||||
DISCOURSE_API_USER = ''
|
82
authserver/server.py
Normal file
82
authserver/server.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
from log import logger
|
||||
|
||||
from flask import Flask, abort, request
|
||||
app = Flask(__name__)
|
||||
|
||||
import auth_functions
|
||||
import secrets
|
||||
|
||||
HTTP_UNAUTHORIZED = 401
|
||||
|
||||
def check_auth():
|
||||
auth_header = request.headers.get('Authorization', '')
|
||||
if auth_header != 'Token ' + secrets.AUTH_TOKEN:
|
||||
logger.info('Bad auth token, aborting.')
|
||||
abort(HTTP_UNAUTHORIZED)
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
logger.info('Index page requested')
|
||||
|
||||
return '<i>LIFE IS BUT A DREAM...</i>'
|
||||
|
||||
@app.route('/ping')
|
||||
def ping():
|
||||
return 'pong'
|
||||
|
||||
@app.route('/set-wiki-password', methods=['POST'])
|
||||
def set_wiki_password():
|
||||
check_auth()
|
||||
|
||||
username = request.form['username']
|
||||
password = request.form['password']
|
||||
|
||||
auth_functions.set_wiki_password(username, password)
|
||||
return ''
|
||||
|
||||
@app.route('/set-discourse-password', methods=['POST'])
|
||||
def set_discourse_password():
|
||||
check_auth()
|
||||
|
||||
username = request.form['username']
|
||||
password = request.form['password']
|
||||
first_name = request.form['first_name']
|
||||
email = request.form['email']
|
||||
|
||||
auth_functions.set_discourse_password(username, password, first_name, email)
|
||||
return ''
|
||||
|
||||
@app.route('/add-discourse-group-members', methods=['POST'])
|
||||
def add_discourse_group_members():
|
||||
check_auth()
|
||||
|
||||
data = request.get_json()
|
||||
group_name = data['group_name']
|
||||
usernames = data['usernames']
|
||||
|
||||
auth_functions.add_discourse_group_members(group_name, usernames)
|
||||
return ''
|
||||
|
||||
@app.route('/remove-discourse-group-members', methods=['POST'])
|
||||
def remove_discourse_group_members():
|
||||
check_auth()
|
||||
|
||||
data = request.get_json()
|
||||
group_name = data['group_name']
|
||||
usernames = data['usernames']
|
||||
|
||||
auth_functions.remove_discourse_group_members(group_name, usernames)
|
||||
return ''
|
||||
|
||||
@app.route('/change-discourse-username', methods=['POST'])
|
||||
def change_discourse_username():
|
||||
check_auth()
|
||||
|
||||
username = request.form['username']
|
||||
new_username = request.form['new_username']
|
||||
|
||||
auth_functions.change_discourse_username(username, new_username)
|
||||
return ''
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True, host='0.0.0.0')
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user