Compare commits
613 Commits
feature/bi
...
idhub
Author | SHA1 | Date |
---|---|---|
pedro | c4dff2afb6 | |
pedro | a0b14461b3 | |
pedro | b607eedf5f | |
Cayo Puigdefabregas | 8b4f4b2c6e | |
Cayo Puigdefabregas | 2a7a10178c | |
Cayo Puigdefabregas | 2283f20ab2 | |
Cayo Puigdefabregas | a4c7b2a744 | |
Cayo Puigdefabregas | da8d43f9f6 | |
Cayo Puigdefabregas | f0710e88ec | |
Cayo Puigdefabregas | 55839a26ea | |
Cayo Puigdefabregas | 39f0300a28 | |
Cayo Puigdefabregas | e5dbb09025 | |
Cayo Puigdefabregas | c948b0bca5 | |
Cayo Puigdefabregas | cd440b9931 | |
Cayo Puigdefabregas | 44b1a245b6 | |
Cayo Puigdefabregas | f9ec594a0e | |
pedro | 397e4978e2 | |
Cayo Puigdefabregas | 77dd11ea23 | |
Cayo Puigdefabregas | 2c039c0a12 | |
Cayo Puigdefabregas | 4d416f426c | |
Cayo Puigdefabregas | 74fe50b6fb | |
Cayo Puigdefabregas | 9fb2b1c94a | |
Cayo Puigdefabregas | 708dad64aa | |
Cayo Puigdefabregas | b680a3574d | |
Cayo Puigdefabregas | b6e9be306b | |
Cayo Puigdefabregas | 95700e7ba4 | |
Cayo Puigdefabregas | 5c02b424c1 | |
Cayo Puigdefabregas | 51e54450d8 | |
Cayo Puigdefabregas | 9100f315f9 | |
Cayo Puigdefabregas | 8b0d1f4b7d | |
Cayo Puigdefabregas | 46bfef2585 | |
Cayo Puigdefabregas | 660fe41b62 | |
Cayo Puigdefabregas | e687cef4a3 | |
Cayo Puigdefabregas | af338cfa4c | |
Cayo Puigdefabregas | 72c8fac29e | |
Cayo Puigdefabregas | ca273285fe | |
Cayo Puigdefabregas | 0197ddb4d1 | |
Cayo Puigdefabregas | 87e3c3e917 | |
Cayo Puigdefabregas | 5aaa88971a | |
Cayo Puigdefabregas | 16ae2c08da | |
Cayo Puigdefabregas | 6a75423532 | |
Cayo Puigdefabregas | c9238e8e6a | |
Cayo Puigdefabregas | 53289b0dfc | |
pedro | 14e37ef8c4 | |
Cayo Puigdefabregas | 1a5384e302 | |
Cayo Puigdefabregas | d2b4de7c41 | |
Cayo Puigdefabregas | fcc4b34424 | |
Cayo Puigdefabregas | 69d1873da4 | |
Cayo Puigdefabregas | a33613b21d | |
Cayo Puigdefabregas | 9721344244 | |
Cayo Puigdefabregas | 2806e7ab18 | |
Cayo Puigdefabregas | 37fb688f77 | |
Cayo Puigdefabregas | 6c7395c26f | |
Cayo Puigdefabregas | 18600af272 | |
Cayo Puigdefabregas | 278377090a | |
Cayo Puigdefabregas | fc7d7b4549 | |
Cayo Puigdefabregas | 543aad813d | |
Cayo Puigdefabregas | 950cc59cae | |
Cayo Puigdefabregas | ada42f291a | |
Cayo Puigdefabregas | ab4ec523c3 | |
pedro | 20ee5ae411 | |
pedro | 57eb978dc4 | |
pedro | c6ec665865 | |
pedro | 5a0990f22a | |
Cayo Puigdefabregas | 0ad35de5d6 | |
Cayo Puigdefabregas | aa966b5b93 | |
Cayo Puigdefabregas | bc3d3abcd7 | |
pedro | 9ca92f4c56 | |
Cayo Puigdefabregas | 15d6851043 | |
pedro | bcb4c69677 | |
pedro | b594022194 | |
pedro | c226138ff2 | |
Cayo Puigdefabregas | 6c3831d103 | |
Cayo Puigdefabregas | a7f5de96a5 | |
Cayo Puigdefabregas | 594fe1483f | |
Cayo Puigdefabregas | ece944ea3f | |
Cayo Puigdefabregas | ac3d318fc9 | |
Cayo Puigdefabregas | 0181bd34ae | |
Cayo Puigdefabregas | 5fa6f46acc | |
Cayo Puigdefabregas | 4ba7bcc956 | |
cayop | fde966ec13 | |
Cayo Puigdefabregas | cb0c7f1cb6 | |
Cayo Puigdefabregas | 68c342ee18 | |
Cayo Puigdefabregas | b614fad41f | |
Cayo Puigdefabregas | 7e088eefc8 | |
Cayo Puigdefabregas | 82bf535915 | |
Cayo Puigdefabregas | 843324bd17 | |
Cayo Puigdefabregas | 740007b804 | |
Cayo Puigdefabregas | c8ab0a959e | |
pedro | dce2873158 | |
pedro | 2dc40e95fe | |
pedro | 5a965e245e | |
pedro | 6a58dcc68f | |
pedro | 2c4b0006cc | |
pedro | 7a85ebd8f8 | |
pedro | 9dec42bd05 | |
pedro | 37069ff561 | |
pedro | b423a53cfe | |
pedro | 260ac90f86 | |
pedro | f37800dcd3 | |
pedro | 907bf2dba0 | |
Cayo Puigdefabregas | 0b70f42daa | |
Cayo Puigdefabregas | 8a7a9476fe | |
Cayo Puigdefabregas | 5069c793cf | |
Cayo Puigdefabregas | 0f26bf63c6 | |
Cayo Puigdefabregas | bf3474e3db | |
Cayo Puigdefabregas | 274c99db43 | |
Cayo Puigdefabregas | 2f250402e3 | |
Cayo Puigdefabregas | 3f86242bfb | |
Cayo Puigdefabregas | 5de416796e | |
Cayo Puigdefabregas | 0fb4fa5ba6 | |
Cayo Puigdefabregas | 33fc69013f | |
Cayo Puigdefabregas | d5f8b1ec75 | |
Cayo Puigdefabregas | 5aee8f3f8f | |
Cayo Puigdefabregas | 9c2f22c77a | |
Cayo Puigdefabregas | db706503fc | |
Cayo Puigdefabregas | 11f3b7730a | |
Cayo Puigdefabregas | 7857a85e8f | |
Cayo Puigdefabregas | ac02246ddc | |
Cayo Puigdefabregas | 33efa7ab75 | |
Cayo Puigdefabregas | 74789d66d1 | |
Cayo Puigdefabregas | 1a107bb2db | |
Cayo Puigdefabregas | 0d2dd2fcb1 | |
Cayo Puigdefabregas | 7f449aa95c | |
Cayo Puigdefabregas | 4f2cfe5c47 | |
Cayo Puigdefabregas | 1b4159d58b | |
Cayo Puigdefabregas | 13d36f5650 | |
Cayo Puigdefabregas | 9ff20740dd | |
Cayo Puigdefabregas | 0b0d9edaad | |
cayop | 77f39ef78c | |
cayop | 947deb45af | |
Cayo Puigdefabregas | 7c91314d4a | |
Cayo Puigdefabregas | 56b36ab244 | |
Cayo Puigdefabregas | 79cb5279e9 | |
Cayo Puigdefabregas | fdb4d90ab4 | |
Cayo Puigdefabregas | b5ae2b0629 | |
Cayo Puigdefabregas | 748516edaf | |
Cayo Puigdefabregas | 5e3af04a8c | |
Cayo Puigdefabregas | 472d742db2 | |
Cayo Puigdefabregas | 36c61d49ff | |
Cayo Puigdefabregas | 2f9a2edb44 | |
Cayo Puigdefabregas | 8762705cb5 | |
Cayo Puigdefabregas | 0438cbb509 | |
Cayo Puigdefabregas | e451668ff9 | |
Cayo Puigdefabregas | 945c0d42f9 | |
Cayo Puigdefabregas | 94ddc76e17 | |
Cayo Puigdefabregas | 453fa52963 | |
Cayo Puigdefabregas | 73fa8a6d28 | |
Cayo Puigdefabregas | a68784c94c | |
Cayo Puigdefabregas | 311ca3ca51 | |
Cayo Puigdefabregas | 1f78c184b5 | |
Cayo Puigdefabregas | 99f4c71ee1 | |
Cayo Puigdefabregas | a6684999a8 | |
Cayo Puigdefabregas | 8f333e04ae | |
Cayo Puigdefabregas | 57caf52c02 | |
Cayo Puigdefabregas | 52da9c99ba | |
Cayo Puigdefabregas | 7ddcb8ead0 | |
Cayo Puigdefabregas | 523ca3e892 | |
cayop | c1d03e9525 | |
Cayo Puigdefabregas | 5f1c7c8b4a | |
Cayo Puigdefabregas | fad008b25d | |
Cayo Puigdefabregas | 17c88ef4b1 | |
Cayo Puigdefabregas | 3c23c8ce09 | |
Cayo Puigdefabregas | 47e918dc07 | |
Cayo Puigdefabregas | ffaff20025 | |
cayop | e0f986f4fe | |
Cayo Puigdefabregas | 7638a6dab4 | |
Cayo Puigdefabregas | c27040296d | |
cayop | 56e04cf7c5 | |
Cayo Puigdefabregas | 1bdf0c8baa | |
Cayo Puigdefabregas | 80486136bd | |
cayop | 48604e1a8f | |
Cayo Puigdefabregas | 314e944208 | |
cayop | 7c032a2d27 | |
Cayo Puigdefabregas | dc07b4973b | |
Cayo Puigdefabregas | 57882ca2c5 | |
Cayo Puigdefabregas | 6aa643c197 | |
Cayo Puigdefabregas | 2543f7f761 | |
Cayo Puigdefabregas | 850398f7ed | |
Cayo Puigdefabregas | f7a60647b9 | |
cayop | a38c990412 | |
Cayo Puigdefabregas | 4390f681f5 | |
Cayo Puigdefabregas | bb6365c519 | |
Cayo Puigdefabregas | 2ef6e6cb49 | |
Cayo Puigdefabregas | 2c171d8e26 | |
Cayo Puigdefabregas | 4d5761ac02 | |
Cayo Puigdefabregas | 66e162db4d | |
Cayo Puigdefabregas | 79c2ecbd81 | |
Cayo Puigdefabregas | b29086e46d | |
Cayo Puigdefabregas | 9119143a63 | |
Cayo Puigdefabregas | 9671333635 | |
Cayo Puigdefabregas | e3b8543a12 | |
Cayo Puigdefabregas | 49d19ec38e | |
Cayo Puigdefabregas | 312f8a01bf | |
Cayo Puigdefabregas | c82db8caa0 | |
Cayo Puigdefabregas | 4db71b2af4 | |
Cayo Puigdefabregas | 4a6c82ef55 | |
Cayo Puigdefabregas | df6b09d051 | |
cayop | aab3addc20 | |
Cayo Puigdefabregas | 5ceeba3af7 | |
Cayo Puigdefabregas | f423d5ea34 | |
Cayo Puigdefabregas | 15b25d8aeb | |
Cayo Puigdefabregas | 2c2cd19688 | |
Cayo Puigdefabregas | 851fc123e1 | |
cayop | 1ffad380b3 | |
Cayo Puigdefabregas | 1f913eebaa | |
Cayo Puigdefabregas | ccbf2f98a7 | |
Cayo Puigdefabregas | efacba6aab | |
cayop | e4017cf5cc | |
Cayo Puigdefabregas | 35ffb8239f | |
Cayo Puigdefabregas | a95d643755 | |
Cayo Puigdefabregas | 80f7d102e1 | |
Cayo Puigdefabregas | 5af553b6a3 | |
Cayo Puigdefabregas | 8c69585800 | |
Cayo Puigdefabregas | 085c5151bc | |
Cayo Puigdefabregas | 7990f4518b | |
cayop | a42dfe5469 | |
Cayo Puigdefabregas | a3b71ec996 | |
Cayo Puigdefabregas | 2e2b346daa | |
Cayo Puigdefabregas | 631bfa2774 | |
Cayo Puigdefabregas | 5eea698695 | |
Cayo Puigdefabregas | b571b26433 | |
Cayo Puigdefabregas | 46860660e0 | |
Cayo Puigdefabregas | 838d9180ad | |
Cayo Puigdefabregas | 48be3bae64 | |
Cayo Puigdefabregas | 4e610f0903 | |
Cayo Puigdefabregas | 39b04f3709 | |
Cayo Puigdefabregas | b5a77ace2f | |
Cayo Puigdefabregas | 7b2bfd095c | |
Cayo Puigdefabregas | 7a128e6e7f | |
Cayo Puigdefabregas | 3cf87f7f95 | |
Cayo Puigdefabregas | ebdb6949c9 | |
Cayo Puigdefabregas | 8e54f34519 | |
cayop | 31629b3f16 | |
Cayo Puigdefabregas | 906dceed56 | |
Cayo Puigdefabregas | acc5f6ed78 | |
Cayo Puigdefabregas | 02752ee2b8 | |
cayop | b8593bd63f | |
Cayo Puigdefabregas | e845709027 | |
Cayo Puigdefabregas | 7b9c33ca4f | |
Cayo Puigdefabregas | 9cafc4f72b | |
cayop | 8ba853b14a | |
Cayo Puigdefabregas | 4e5dbe8cd1 | |
Cayo Puigdefabregas | c9f996dd8e | |
Cayo Puigdefabregas | d3b624fbd1 | |
Cayo Puigdefabregas | 0547e4cf32 | |
Cayo Puigdefabregas | e365c366f4 | |
Cayo Puigdefabregas | e649d65b5d | |
cayop | 9da841fc1f | |
Cayo Puigdefabregas | 0e3aa1ce04 | |
Cayo Puigdefabregas | 075dc8d5b0 | |
Cayo Puigdefabregas | 750d3e7db6 | |
cayop | a47e99ce0b | |
Cayo Puigdefabregas | 8f5835fa4f | |
Cayo Puigdefabregas | f9ed33d46d | |
Cayo Puigdefabregas | 119b4938c0 | |
Cayo Puigdefabregas | e224f22b85 | |
Cayo Puigdefabregas | 1a7c2f3a01 | |
Cayo Puigdefabregas | ce7693dd9b | |
Cayo Puigdefabregas | dcae036271 | |
cayop | 6dce475256 | |
Cayo Puigdefabregas | 0ce12bbfba | |
cayop | 9e7a1ec5bd | |
Cayo Puigdefabregas | 6692233a22 | |
Cayo Puigdefabregas | b6b8c6a1f9 | |
Cayo Puigdefabregas | ae5992f4c0 | |
cayop | 2e0173b7dc | |
Cayo Puigdefabregas | cd4d1bb095 | |
Cayo Puigdefabregas | 2f27095c84 | |
Cayo Puigdefabregas | 645bdf3750 | |
Cayo Puigdefabregas | 3a08347276 | |
cayop | 4059dc3a7a | |
Cayo Puigdefabregas | 509a445480 | |
Cayo Puigdefabregas | ca0b31059f | |
Cayo Puigdefabregas | 7d228a61f9 | |
cayop | 8f8883242e | |
Cayo Puigdefabregas | 8b69962374 | |
Cayo Puigdefabregas | d1c332e891 | |
Cayo Puigdefabregas | 1515302d98 | |
cayop | 290c20d46e | |
Cayo Puigdefabregas | 4b9f1c02b9 | |
Cayo Puigdefabregas | 745b9966df | |
Cayo Puigdefabregas | f3926e3b92 | |
Cayo Puigdefabregas | 309b266fe9 | |
Cayo Puigdefabregas | e6c07851d4 | |
Cayo Puigdefabregas | e6f91db4e4 | |
Cayo Puigdefabregas | 83f1e4c18f | |
Cayo Puigdefabregas | 01ef359bd4 | |
Cayo Puigdefabregas | e624ab7a7a | |
Cayo Puigdefabregas | 2ff6f40228 | |
cayop | 91d13808a7 | |
Cayo Puigdefabregas | 6a14727f31 | |
Cayo Puigdefabregas | a7aae591aa | |
Cayo Puigdefabregas | ad52bad3f6 | |
Cayo Puigdefabregas | ad1e5e06d9 | |
Cayo Puigdefabregas | 7dc7ca2026 | |
cayop | 494a14c7f0 | |
Cayo Puigdefabregas | c94801deab | |
Cayo Puigdefabregas | a27dc0914c | |
Cayo Puigdefabregas | e559ea30da | |
Cayo Puigdefabregas | 174928872f | |
Cayo Puigdefabregas | ddcd1697e6 | |
Cayo Puigdefabregas | 6fa5a25a4a | |
Cayo Puigdefabregas | d1abc8075f | |
cayop | 56c970e810 | |
Cayo Puigdefabregas | e7833c1727 | |
Cayo Puigdefabregas | 21d251e6c0 | |
Cayo Puigdefabregas | b140dc5f89 | |
cayop | d97eb08cce | |
Cayo Puigdefabregas | d0688cc751 | |
cayop | 25794f2fb4 | |
Cayo Puigdefabregas | 498d2ec92f | |
Cayo Puigdefabregas | cf5150b7c9 | |
Cayo Puigdefabregas | ba02351f59 | |
Cayo Puigdefabregas | 1c82fcfa30 | |
Cayo Puigdefabregas | 20ccb385d8 | |
Cayo Puigdefabregas | 6d722bb19f | |
Cayo Puigdefabregas | 93d6502a66 | |
Cayo Puigdefabregas | 6b0110adda | |
Cayo Puigdefabregas | de24cae235 | |
cayop | e6de54873e | |
Cayo Puigdefabregas | 40a151df5a | |
Cayo Puigdefabregas | 54372ad2f9 | |
Cayo Puigdefabregas | eb945ae348 | |
Cayo Puigdefabregas | ab6f89c3d2 | |
Cayo Puigdefabregas | eed1075771 | |
Cayo Puigdefabregas | 7534df083c | |
Cayo Puigdefabregas | 0ff2bcae92 | |
Cayo Puigdefabregas | 8f58bcb24e | |
Cayo Puigdefabregas | 327e5f20cb | |
Cayo Puigdefabregas | 1e62af56a6 | |
Cayo Puigdefabregas | 2da17d06c0 | |
Cayo Puigdefabregas | 5bfd69f785 | |
Cayo Puigdefabregas | c8fb5db63c | |
Cayo Puigdefabregas | b6b5e5d29d | |
Cayo Puigdefabregas | 12b196fd8e | |
cayop | ea2d446595 | |
Cayo Puigdefabregas | be271d59ea | |
Cayo Puigdefabregas | 6b9965f57e | |
Cayo Puigdefabregas | 7f6acf2db8 | |
Cayo Puigdefabregas | e900f5f298 | |
Cayo Puigdefabregas | 69cb07d55a | |
Cayo Puigdefabregas | fb413671fc | |
Cayo Puigdefabregas | 26ed0f3577 | |
Cayo Puigdefabregas | c9d23d5e6a | |
Cayo Puigdefabregas | 5f9abe83d6 | |
Cayo Puigdefabregas | 26f8d191fb | |
nad | c24460009e | |
nad | 5bdcf4da7f | |
Cayo Puigdefabregas | ef8825568f | |
Cayo Puigdefabregas | 599f15d5ae | |
Cayo Puigdefabregas | 1907f2508a | |
Cayo Puigdefabregas | 80013bcc90 | |
Cayo Puigdefabregas | 680a7b89e2 | |
Cayo Puigdefabregas | 12d64aefdc | |
Cayo Puigdefabregas | 8207ca9ab2 | |
Cayo Puigdefabregas | 8cd7777fc6 | |
cayop | c58dc367e6 | |
Cayo Puigdefabregas | ead6f3af78 | |
Cayo Puigdefabregas | 77c8e2181f | |
Cayo Puigdefabregas | 8efccf4f58 | |
Cayo Puigdefabregas | 6b54521d50 | |
Cayo Puigdefabregas | 184a9877bc | |
Cayo Puigdefabregas | 2553a9cdd0 | |
Cayo Puigdefabregas | 4055365bc9 | |
cayop | 6fc6402397 | |
Stephan Fortelny | bed2c534b5 | |
Cayo Puigdefabregas | 799c003a09 | |
Cayo Puigdefabregas | 7a8cbe11a6 | |
cayop | bea63148b2 | |
Cayo Puigdefabregas | 7dc3ac9530 | |
Cayo Puigdefabregas | 6b5db31922 | |
cayop | cbf2d607a1 | |
Cayo Puigdefabregas | 00c07161e0 | |
Nadeu | 38b77c111a | |
cayop | c961573c91 | |
Cayo Puigdefabregas | 6c9334fa76 | |
Cayo Puigdefabregas | de9b525737 | |
Cayo Puigdefabregas | 32dc4445e4 | |
Cayo Puigdefabregas | e7e595f2c2 | |
Cayo Puigdefabregas | 45a787aa39 | |
Cayo Puigdefabregas | 574ab36da4 | |
Cayo Puigdefabregas | fbe8600cc1 | |
Cayo Puigdefabregas | 9053c89f47 | |
Cayo Puigdefabregas | 8ed1e2296d | |
Cayo Puigdefabregas | 8dd926de80 | |
Nadeu | 253371376d | |
Nadeu | 850278c297 | |
Cayo Puigdefabregas | faa7c1d605 | |
cayop | 149bc1c6f6 | |
Cayo Puigdefabregas | 9db949cef2 | |
Cayo Puigdefabregas | c26d2d69b9 | |
Cayo Puigdefabregas | f8d418b4a9 | |
Cayo Puigdefabregas | a572ef0507 | |
Cayo Puigdefabregas | 6fc802e159 | |
Cayo Puigdefabregas | 520f1726be | |
Cayo Puigdefabregas | 5e7d95ba31 | |
cayop | 12aa5e917d | |
Lint Action | 00310a3228 | |
Cayo Puigdefabregas | 7789d972eb | |
Cayo Puigdefabregas | 23f4a0e412 | |
Cayo Puigdefabregas | ac4e87d2b3 | |
Cayo Puigdefabregas | bc12258db0 | |
Cayo Puigdefabregas | 5a0692216a | |
cayop | b77c81c16a | |
Cayo Puigdefabregas | a265af8864 | |
Cayo Puigdefabregas | 9603ac1f43 | |
cayop | 3606f9dee6 | |
Cayo Puigdefabregas | f929283f4f | |
Cayo Puigdefabregas | 350aad19c4 | |
Cayo Puigdefabregas | 9c2bc7d7fa | |
Cayo Puigdefabregas | 50856671ed | |
Nadeu | 8a8feb3165 | |
Cayo Puigdefabregas | 14ce3892ac | |
Cayo Puigdefabregas | bf5c3d6abc | |
Cayo Puigdefabregas | 62de2126c7 | |
Cayo Puigdefabregas | 5d88d4e516 | |
Cayo Puigdefabregas | 205e111e9d | |
Cayo Puigdefabregas | 2bb6b13e07 | |
Cayo Puigdefabregas | 31b62a2d81 | |
Cayo Puigdefabregas | 84e937026e | |
Cayo Puigdefabregas | b3b4e8cd2b | |
cayop | 14b61cbaeb | |
Cayo Puigdefabregas | 1924d7fe71 | |
Cayo Puigdefabregas | 0f7c9504b0 | |
Cayo Puigdefabregas | 30474fa7e6 | |
Cayo Puigdefabregas | 83d62b4cb4 | |
Cayo Puigdefabregas | 63c7b29233 | |
Cayo Puigdefabregas | c066d8a2af | |
Cayo Puigdefabregas | 2cc677a555 | |
Cayo Puigdefabregas | fc36218124 | |
Cayo Puigdefabregas | a0c1cce69a | |
Cayo Puigdefabregas | 1b11162522 | |
Cayo Puigdefabregas | da1feef746 | |
Cayo Puigdefabregas | 27910f2c20 | |
Cayo Puigdefabregas | 756925d657 | |
Cayo Puigdefabregas | 254a32d989 | |
Cayo Puigdefabregas | 9736ee323a | |
Cayo Puigdefabregas | fac6380bcf | |
Cayo Puigdefabregas | 4debac4fbe | |
Cayo Puigdefabregas | 5e8cf78751 | |
Cayo Puigdefabregas | d6bee33601 | |
Cayo Puigdefabregas | 4b13995b4f | |
Cayo Puigdefabregas | 534adbba75 | |
cayop | 01a3a97c2b | |
Cayo Puigdefabregas | 9a39f3ae9d | |
Cayo Puigdefabregas | e3f01f4795 | |
Cayo Puigdefabregas | 94a586f00b | |
Cayo Puigdefabregas | 39f19f676b | |
Cayo Puigdefabregas | 8c979d7741 | |
Cayo Puigdefabregas | a0e63b2ae9 | |
cayop | 046a902005 | |
Cayo Puigdefabregas | a2626a0b58 | |
Cayo Puigdefabregas | 7c6290bd89 | |
Cayo Puigdefabregas | 4fa6f9f343 | |
Cayo Puigdefabregas | 341a7b2034 | |
Cayo Puigdefabregas | fe9c2c1c2d | |
cayop | f1042b627b | |
Cayo Puigdefabregas | 7bc4a7d387 | |
Cayo Puigdefabregas | e36f813d92 | |
Cayo Puigdefabregas | 7be17ead58 | |
Cayo Puigdefabregas | 0a6f9e3bf4 | |
Cayo Puigdefabregas | 18281c95e5 | |
Cayo Puigdefabregas | 3f5ae9fefd | |
Cayo Puigdefabregas | 785f72692a | |
cayop | f49ba1f7ba | |
Cayo Puigdefabregas | 9fbdaf5834 | |
Cayo Puigdefabregas | b1ad7b151d | |
cayop | 0233a2ba6b | |
cayop | 4dff032fe6 | |
Cayo Puigdefabregas | d2a4c0ef83 | |
Cayo Puigdefabregas | 4be07130c2 | |
Cayo Puigdefabregas | 040421172f | |
Cayo Puigdefabregas | c392270676 | |
Cayo Puigdefabregas | b84f379468 | |
cayop | b98be622e1 | |
Cayo Puigdefabregas | 9ee24c3e82 | |
Cayo Puigdefabregas | 07f947b1ad | |
Cayo Puigdefabregas | 79fbe323a2 | |
Cayo Puigdefabregas | b39388bef0 | |
Cayo Puigdefabregas | 994cd3c6ca | |
Cayo Puigdefabregas | 9da321a925 | |
Lint Action | 368d82c007 | |
Cayo Puigdefabregas | 0b6cfb210d | |
cayop | 6eb820b69d | |
Cayo Puigdefabregas | 3757873b02 | |
Cayo Puigdefabregas | f9d679547f | |
Cayo Puigdefabregas | 5e2dd3344b | |
Cayo Puigdefabregas | abc773fe6e | |
Cayo Puigdefabregas | 789eb1b526 | |
Cayo Puigdefabregas | e7cf069a33 | |
Cayo Puigdefabregas | 88b13961fe | |
Cayo Puigdefabregas | 3a952324c2 | |
cayop | 4ae9eeb4c7 | |
Cayo Puigdefabregas | eee61dca8e | |
Cayo Puigdefabregas | a544549a81 | |
Cayo Puigdefabregas | 0f033c8d83 | |
Cayo Puigdefabregas | d7500a2596 | |
Cayo Puigdefabregas | 925ce473e6 | |
Cayo Puigdefabregas | ade9fbd018 | |
Cayo Puigdefabregas | 1efc650bea | |
Cayo Puigdefabregas | a9504b0b71 | |
Cayo Puigdefabregas | 9d8fb6b04a | |
Cayo Puigdefabregas | ac535cc9b5 | |
cayop | 1ab6606cf0 | |
Cayo Puigdefabregas | 796f49f6cf | |
Cayo Puigdefabregas | 7bf879bbcf | |
Cayo Puigdefabregas | 67cf797c6c | |
cayop | 5d8c26ade6 | |
Cayo Puigdefabregas | 2d67c9afd2 | |
Cayo Puigdefabregas | 15d9c86104 | |
Cayo Puigdefabregas | 0e85b81857 | |
Cayo Puigdefabregas | 6fd87ac8e8 | |
Cayo Puigdefabregas | fb430f2138 | |
Cayo Puigdefabregas | 35dd08c9c8 | |
Cayo Puigdefabregas | a09e087b0e | |
Cayo Puigdefabregas | d1eed13b0a | |
Cayo Puigdefabregas | 905716f74b | |
Cayo Puigdefabregas | c263dc0ae0 | |
Cayo Puigdefabregas | 9b08c083ce | |
Cayo Puigdefabregas | f0ffe27671 | |
Cayo Puigdefabregas | 81b28b2663 | |
Cayo Puigdefabregas | d030ed9b23 | |
Cayo Puigdefabregas | 236fcac0b8 | |
Cayo Puigdefabregas | 0ae12e71a6 | |
cayop | 4bf27706db | |
Cayo Puigdefabregas | 17df239599 | |
cayop | 3809ae76e1 | |
Cayo Puigdefabregas | efa9693f16 | |
Cayo Puigdefabregas | b99be1a144 | |
cayop | 6addddd974 | |
Cayo Puigdefabregas | 79dff1d361 | |
cayop | cf5a7651a0 | |
Cayo Puigdefabregas | 25f2287e98 | |
cayop | fe33f2d7e7 | |
Cayo Puigdefabregas | 2c01183604 | |
Cayo Puigdefabregas | 1b7d37b50a | |
Cayo Puigdefabregas | 4df33e8808 | |
cayop | 48de1d5f47 | |
Cayo Puigdefabregas | 79877d09a1 | |
cayop | 3d658a9051 | |
Cayo Puigdefabregas | 41eb390e39 | |
Cayo Puigdefabregas | d8541d917c | |
cayop | 375a2de5c1 | |
Cayo Puigdefabregas | b2205c56f9 | |
cayop | c784fb7499 | |
Cayo Puigdefabregas | 09a0a30386 | |
Cayo Puigdefabregas | c14f40fccb | |
Cayo Puigdefabregas | 74d10e1951 | |
Cayo Puigdefabregas | 6ee62eb415 | |
Cayo Puigdefabregas | 6c7ae1bb75 | |
Cayo Puigdefabregas | eae036d7c0 | |
Cayo Puigdefabregas | 1cb0634bf1 | |
cayop | 3a7a8d6e1d | |
Cayo Puigdefabregas | 04a3ec99de | |
cayop | 2252fe93cd | |
Cayo Puigdefabregas | 67cfac6adc | |
Cayo Puigdefabregas | 4bbec903ef | |
Cayo Puigdefabregas | 7041bfcf76 | |
Cayo Puigdefabregas | 045b7b4f95 | |
Cayo Puigdefabregas | 61f2507ead | |
cayop | eb1649f506 | |
Cayo Puigdefabregas | 44985d07a1 | |
cayop | 0aa4696fe0 | |
Cayo Puigdefabregas | 563e2ec652 | |
Cayo Puigdefabregas | 7c4a3bd770 | |
Cayo Puigdefabregas | e30990cc1b | |
Cayo Puigdefabregas | 2a8da254d9 | |
Cayo Puigdefabregas | 9a157eaaee | |
cayop | dfdbf8f1f7 | |
Cayo Puigdefabregas | 694663c412 | |
Cayo Puigdefabregas | 9d7869929c | |
Cayo Puigdefabregas | 4c2f37dfec | |
cayop | 95ac4e794b | |
Cayo Puigdefabregas | 4fa4a4c553 | |
Cayo Puigdefabregas | b73526768b | |
cayop | 9d3b54c593 | |
Cayo Puigdefabregas | aba6548d9c | |
Cayo Puigdefabregas | a5bf59c561 | |
cayop | 13605abbaf | |
Cayo Puigdefabregas | 72b139a2bc | |
Cayo Puigdefabregas | da7f03aa53 | |
Cayo Puigdefabregas | 61bd4e0642 | |
Cayo Puigdefabregas | 469ce1c6e0 | |
Cayo Puigdefabregas | c93c143cc8 | |
Cayo Puigdefabregas | 9e86f0e3ae | |
Cayo Puigdefabregas | c2d81b1b29 | |
Cayo Puigdefabregas | c8a4b16667 | |
cayop | f33499cb23 | |
Cayo Puigdefabregas | 2a32ba47b8 | |
cayop | 177fbe1109 | |
Cayo Puigdefabregas | 106ab2f650 | |
Cayo Puigdefabregas | fc86181311 | |
Cayo Puigdefabregas | 5d5ed0c552 | |
cayop | 00e40cbdeb | |
Cayo Puigdefabregas | fb46f48461 | |
Cayo Puigdefabregas | fc297ee5f8 | |
Cayo Puigdefabregas | fee98f9d30 | |
Cayo Puigdefabregas | 2e31af4dfa | |
cayop | fa990fe050 | |
Cayo Puigdefabregas | c9a3dbf6c6 | |
cayop | 99dfe40984 | |
Cayo Puigdefabregas | a54942a8a8 | |
Cayo Puigdefabregas | ce09de4133 | |
Cayo Puigdefabregas | 6b173de5a8 | |
Cayo Puigdefabregas | c8c542189a | |
Cayo Puigdefabregas | b77d7abaa4 | |
Cayo Puigdefabregas | b66871e9c2 | |
Cayo Puigdefabregas | a9d6ac22f9 | |
Cayo Puigdefabregas | 9184edf9b1 | |
Cayo Puigdefabregas | 6357a15f01 | |
Cayo Puigdefabregas | d36008934a | |
Cayo Puigdefabregas | 76c5100fa3 |
|
@ -127,3 +127,16 @@ yarn.lock
|
||||||
# ESLint Report
|
# ESLint Report
|
||||||
eslint_report.json
|
eslint_report.json
|
||||||
|
|
||||||
|
# modules/
|
||||||
|
tmp/
|
||||||
|
.env*
|
||||||
|
bin/
|
||||||
|
env*
|
||||||
|
examples/create-db2.sh
|
||||||
|
package-lock.json
|
||||||
|
snapshots/
|
||||||
|
!examples/snapshots
|
||||||
|
modules/
|
||||||
|
|
||||||
|
# emacs
|
||||||
|
*~
|
||||||
|
|
88
CHANGELOG.md
88
CHANGELOG.md
|
@ -7,6 +7,94 @@ ml).
|
||||||
|
|
||||||
## testing
|
## testing
|
||||||
|
|
||||||
|
## [2.5.3] - 2023-05-13
|
||||||
|
- [added] #450 add new datawipe in csv.
|
||||||
|
- [changed] #447 Share a lot between 2 users, one is owner the other is read only.
|
||||||
|
- [changed] #448 enhancements in export lots.
|
||||||
|
- [changed] #449 remove button of submit in filter of list of devices.
|
||||||
|
- [changed] #452 New version of settings for workbench.
|
||||||
|
- [fixed] #445 required File for new documents bat optional for edit document.
|
||||||
|
- [fixed] #446 Fix id_supplier and id_internal in export devices.
|
||||||
|
- [fixed] #451 fix new datawipe in certificate erasure.
|
||||||
|
- [fixed] #453 fix value method in certificate erasure.
|
||||||
|
- [fixed] #454 remove validation of email for placeholders type mobile.
|
||||||
|
- [fixed] #455 add placeholders in csv metrics and pdf certificate.
|
||||||
|
- [fixed] #456 upload placeholders with type datastorage.
|
||||||
|
- [fixed] #457 change format erase datawipe.
|
||||||
|
- [fixed] #458 not datawipe for placeholders computers.
|
||||||
|
|
||||||
|
## [2.5.2] - 2023-04-20
|
||||||
|
- [added] #414 add new vars in the settings file for wb.
|
||||||
|
- [added] #440 add lots in export devices.
|
||||||
|
- [added] #441 allow remove documents.
|
||||||
|
- [added] #442 allow edit documents.
|
||||||
|
- [added] #443 add documents to devices.
|
||||||
|
- [added] #444 add new columns in list of documents.
|
||||||
|
- [changed] #439 move teal as internal module.
|
||||||
|
- [fixed] #437 replace names erasure by sanitization in templates.
|
||||||
|
|
||||||
|
## [2.5.1] - 2023-03-17
|
||||||
|
- [changed] #423 new hid.
|
||||||
|
- [changed] #426 new version of public page of device.
|
||||||
|
- [changed] #427 update links of terms and condotions.
|
||||||
|
- [changed] #428 only the data storage allow syncrinize, the rest are duplicate.
|
||||||
|
- [changed] #430 new version of erasure certificate.
|
||||||
|
- [fixed] #416 fix dhid in snapshot logs.
|
||||||
|
- [fixed] #419 fix settings version and template.
|
||||||
|
- [fixed] #420 not appear all lots in the dropdown menu for select the a lot.
|
||||||
|
- [fixed] #421 fix remove a placeholder from one old trade lot.
|
||||||
|
- [fixed] #422 fix simple datatables.
|
||||||
|
- [fixed] #424 fix new hid.
|
||||||
|
- [fixed] #431 fix forms for customer details.
|
||||||
|
- [fixed] #432 fix erasure certificate for a servers.
|
||||||
|
- [fixed] #433 fix get the last incoming for show customer datas in certificate.
|
||||||
|
- [fixed] #434 fix reopen transfer.
|
||||||
|
- [fixed] #436 fix hid in erasure certificate.
|
||||||
|
|
||||||
|
## [2.5.0] - 2022-11-30
|
||||||
|
- [added] #407 erasure section with tabs in top.
|
||||||
|
- [added] #411 add new generic device as Other.
|
||||||
|
- [changed] #409 add backend pagination instead of javascript.
|
||||||
|
- [changed] #410 change teh top search for advanced search.
|
||||||
|
- [fixed] #412 show in snapshots log, type upload correctly.
|
||||||
|
- [fixed] #413 put order in documents.
|
||||||
|
- [fixed] #415 put prefix of lot in result of search.
|
||||||
|
|
||||||
|
## [2.4.3] - 2022-11-18
|
||||||
|
- [added] #386 add registration module.
|
||||||
|
- [added] #387 add template settings for Secure Erasure.
|
||||||
|
- [added] #397 add obada standard export.
|
||||||
|
- [added] #402 add reset password module.
|
||||||
|
- [added] #406 add orphans disks page.
|
||||||
|
- [changed] #391 add dhid in table and export of Erasure section.
|
||||||
|
- [changed] #395 change response for the new api to workbench.
|
||||||
|
- [changed] #396 modularize commands.
|
||||||
|
- [fixed] #388 lock update different motherboard with the same id.
|
||||||
|
- [fixed] #389 some datastorage without placeholder.
|
||||||
|
- [fixed] #390 fix image in form edit device.
|
||||||
|
- [fixed] #398 placeholder in new components.
|
||||||
|
- [fixed] #399 add api_host in config.
|
||||||
|
- [fixed] #401 db_host need to be api address.
|
||||||
|
- [fixed] #403 change delimiter in obada export.
|
||||||
|
- [fixed] #404 javascript select all devices.
|
||||||
|
- [fixed] #405 update pillow.
|
||||||
|
|
||||||
|
## [2.4.2] - 2022-10-18
|
||||||
|
- [added] #373 Enhancement - UX Lots.
|
||||||
|
- [added] #377 add prefix in lots in device list.
|
||||||
|
- [added] #378 add new button transfer.
|
||||||
|
- [added] #381 add servers erase and show storage disk in list of device.
|
||||||
|
- [added] #383 new setup page and add server_erase in placeholder.
|
||||||
|
- [added] #384 add redirect snapshot to twin public page.
|
||||||
|
- [changed] #371 changes phid.
|
||||||
|
- [changed] #372 remove logo.
|
||||||
|
- [changed] #374 changes links UI management and Data Storage Erasure.
|
||||||
|
- [changed] #375 changes columns in snapshot logs.
|
||||||
|
- [changed] #379 changes representation date times.
|
||||||
|
- [fixed] #380 fix layout print label.
|
||||||
|
- [fixed] #382 fix template device list.
|
||||||
|
- [fixed] #385 components in unbinding process.
|
||||||
|
|
||||||
## [2.4.1] - 2022-10-05
|
## [2.4.1] - 2022-10-05
|
||||||
- [added] #365 Manage dependencies using pip-tools.
|
- [added] #365 Manage dependencies using pip-tools.
|
||||||
- [added] #368 add migrations of monitors and mobiles.
|
- [added] #368 add migrations of monitors and mobiles.
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
# Definitions
|
||||||
|
* A dpp is two hash strings joined by the character ":"
|
||||||
|
We call the first chain chid and the second phid.
|
||||||
|
|
||||||
|
* The chid and phid are hash strings of certain values.
|
||||||
|
We call the set of these values Documents.
|
||||||
|
Here we define these values.
|
||||||
|
|
||||||
|
## Chid
|
||||||
|
The chid is the part of dpp that defines a device, be it a computer,
|
||||||
|
a hard drive, etc. The chid is the most important part of a dpp since
|
||||||
|
anyone who comes across a device should be able to play it.
|
||||||
|
|
||||||
|
The chid is made up of four values:
|
||||||
|
* type
|
||||||
|
* manufacturer
|
||||||
|
* model
|
||||||
|
* serial_number
|
||||||
|
|
||||||
|
type represents the device type according to the devicehub.
|
||||||
|
|
||||||
|
These values are always represented in lowercase.
|
||||||
|
These values have to be ordered and concatenated with the character "-"
|
||||||
|
|
||||||
|
So:
|
||||||
|
|
||||||
|
{type}-{manufacturer}-{model}-{serial_number}
|
||||||
|
|
||||||
|
For example:
|
||||||
|
```
|
||||||
|
harddrive-seagate-st500lt0121dg15-s3p9a81f
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
In computer types this combination is not perfect and **can lead to collisions**.
|
||||||
|
That is why we need a value that is reliable and comes from the manufacturer.
|
||||||
|
|
||||||
|
## Phid
|
||||||
|
The values of the phid do not have to be reproducible. For this reason, each inventory can establish its own values and its order as a document.
|
||||||
|
It is important that each inventory store the document in string so that it can reproduce exactly the document that was hashed. So a document can be verifiable.
|
||||||
|
|
||||||
|
In the case of the DeviceHub, we use as the chid document all the values that the Workbench collects that describe the hardware's own data.
|
||||||
|
These data change depending on the version of the Workbench used.
|
|
@ -0,0 +1,49 @@
|
||||||
|
project := dkr-dsg.ac.upc.edu/ereuse
|
||||||
|
|
||||||
|
branch := `git branch --show-current`
|
||||||
|
commit := `git log -1 --format=%h`
|
||||||
|
#tag := ${branch}__${commit}
|
||||||
|
tag := latest
|
||||||
|
|
||||||
|
# docker images
|
||||||
|
devicehub_image := ${project}/devicehub:${tag}
|
||||||
|
postgres_image := ${project}/postgres:${tag}
|
||||||
|
|
||||||
|
# 2. Create a virtual environment.
|
||||||
|
docker_build:
|
||||||
|
docker build -f docker/devicehub.Dockerfile -t ${devicehub_image} .
|
||||||
|
# DEBUG
|
||||||
|
#docker build -f docker/devicehub.Dockerfile -t ${devicehub_image} . --progress=plain --no-cache
|
||||||
|
|
||||||
|
docker build -f docker/postgres.Dockerfile -t ${postgres_image} .
|
||||||
|
# DEBUG
|
||||||
|
#docker build -f docker/postgres.Dockerfile -t ${postgres_image} . --progress=plain --no-cache
|
||||||
|
@printf "\n##########################\n"
|
||||||
|
@printf "\ndevicehub image: ${devicehub_image}\n"
|
||||||
|
@printf "postgres image: ${postgres_image}\n"
|
||||||
|
@printf "\ndocker images built\n"
|
||||||
|
@printf "\n##########################\n\n"
|
||||||
|
|
||||||
|
docker_publish:
|
||||||
|
docker push ${devicehub_image}
|
||||||
|
docker push ${postgres_image}
|
||||||
|
|
||||||
|
.PHONY: docker
|
||||||
|
docker:
|
||||||
|
$(MAKE) docker_build
|
||||||
|
$(MAKE) docker_publish
|
||||||
|
@printf "\ndocker images published\n"
|
||||||
|
|
||||||
|
# manage 2 kinds of deployments with docker compose
|
||||||
|
|
||||||
|
dc_up_devicehub:
|
||||||
|
docker compose -f docker-compose_devicehub.yml up || true
|
||||||
|
|
||||||
|
dc_down_devicehub:
|
||||||
|
docker compose -f docker-compose_devicehub.yml down -v || true
|
||||||
|
|
||||||
|
dc_up_devicehub_dpp:
|
||||||
|
docker compose -f docker-compose_devicehub-dpp.yml up || true
|
||||||
|
|
||||||
|
dc_down_devicehub_dpp:
|
||||||
|
docker compose -f docker-compose_devicehub-dpp.yml down -v || true
|
203
README.md
203
README.md
|
@ -1,151 +1,122 @@
|
||||||
# Devicehub
|
# Devicehub
|
||||||
|
|
||||||
Devicehub is a distributed IT Asset Management System focused in reusing devices, created under the project [eReuse.org](https://www.ereuse.org)
|
Devicehub is a distributed IT Asset Management System focused on reusing digital devices, created under the [eReuse.org](https://www.ereuse.org) initiative.
|
||||||
|
|
||||||
This README explains how to install and use Devicehub. [The documentation](http://devicehub.ereuse.org) explains the concepts and the API.
|
This README explains how to install and use Devicehub. [The documentation](http://devicehub.ereuse.org) explains the concepts, usage and the API it provides.
|
||||||
|
|
||||||
Devicehub is built with [Teal](https://github.com/ereuse/teal) and [Flask](http://flask.pocoo.org).
|
Devicehub is built with [Teal](https://github.com/ereuse/teal) and [Flask](http://flask.pocoo.org).
|
||||||
|
|
||||||
|
Devicehub relies on the existence of an [API_DLT connector](https://gitlab.com/dsg-upc/ereuse-dpp) verifiable data registry service, where specific operations are recorded to keep an external track record (ledger).
|
||||||
|
|
||||||
# Installing
|
# Installing
|
||||||
The requirements are:
|
Please visit the [Manual Installation](README_MANUAL_INSTALLATION.md) instructions to understand the detailed steps to install it locally or deploy it on a server. However, we recommend the following Docker deployment process.
|
||||||
|
|
||||||
- Python 3.7.3 or higher. In debian 10 is `# apt install python3`.
|
# Docker
|
||||||
- [PostgreSQL 11 or higher](https://www.postgresql.org/download/).
|
There is a Docker compose file for an automated deployment. Two instances of DeviceHub will be deployed. The following steps describe how to run and use it.
|
||||||
- Weasyprint [dependencie](http://weasyprint.readthedocs.io/en/stable/install.html)
|
|
||||||
|
|
||||||
Install Devicehub with *pip*: `pip3 install -U -r requirements.txt -e .`
|
1. Download the sources:
|
||||||
|
```
|
||||||
# Running
|
git clone https://github.com/eReuse/devicehub-teal.git -b oidc4vp
|
||||||
Create a PostgreSQL database called *devicehub* by running [create-db](examples/create-db.sh):
|
cd devicehub-teal
|
||||||
|
|
||||||
- In Linux, execute the following two commands (adapt them to your distro):
|
|
||||||
|
|
||||||
1. `sudo su - postgres`.
|
|
||||||
2. `bash examples/create-db.sh devicehub dhub`, and password `ereuse`.
|
|
||||||
|
|
||||||
- In MacOS: `bash examples/create-db.sh devicehub dhub`, and password `ereuse`.
|
|
||||||
|
|
||||||
Configure project using environment file (you can use provided example as quickstart):
|
|
||||||
```bash
|
|
||||||
$ cp examples/env.example .env
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Using the `dh` tool for set up with one or multiple inventories.
|
2. If you want to initialise one of DeviceHub instances (running on port 5000) with sample device snapshots, copy it/them into that directory. e.g.
|
||||||
Create the tables in the database by executing:
|
```
|
||||||
|
cp snapshot01.json examples/snapshots/
|
||||||
```bash
|
|
||||||
$ export dhi=dbtest; dh inv add --common --name dbtest
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Finally, run the app:
|
Otherwise, the device inventory of your DeviceHub instance will be empty and ready to add new devices. For that (no snapshot import), you need to change the var to 'n' in the **.env** file
|
||||||
|
```
|
||||||
```bash
|
IMPORT_SNAPSHOTS='n'
|
||||||
$ export dhi=dbtest;dh run --debugger
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The error ‘bdist_wheel’ can happen when you work with a *virtual environment*.
|
To register new devices, the [workbench software](https://github.com/eReuse/workbench) can be run on a device to generate its hardware snapshot that can be uploaded to one of the two DeviceHub instance.
|
||||||
To fix it, install in the *virtual environment* wheel
|
|
||||||
package. `pip3 install wheel`
|
|
||||||
|
|
||||||
## Multiple instances
|
3. Setup the environment variables in the .env file. You can find one example in examples/env.example.
|
||||||
|
If you don't have any, you can copy that example and modify the basic vars
|
||||||
|
```
|
||||||
|
cp examples/env.example .env
|
||||||
|
```
|
||||||
|
You can use these parameters as default for a local test, but default values may not be suitable for an internet-exposed service for security reasons. However, these six variables need to be initialised:
|
||||||
|
```
|
||||||
|
API_DLT
|
||||||
|
API_DLT_TOKEN
|
||||||
|
API_RESOLVER
|
||||||
|
ABAC_TOKEN
|
||||||
|
ABAC_USER
|
||||||
|
ABAC_URL
|
||||||
|
SERVER_ID_FEDERATED
|
||||||
|
CLIENT_ID_FEDERATED
|
||||||
|
```
|
||||||
|
The first six values should come from an already operational [API_DLT connector](https://gitlab.com/dsg-upc/ereuse-dpp) service instance.
|
||||||
|
|
||||||
Devicehub can run as a single inventory or with multiple inventories, each inventory being an instance of the `devicehub`. To add a new inventory execute:
|
For the last two values check [manual install step 9]('https://github.com/eReuse/devicehub-teal/blob/oidc4vp/README_MANUAL_INSTALLATION.md#installing') for more details.
|
||||||
```bash
|
|
||||||
$ export dhi=dbtest; dh inv add --name dbtest
|
4. Build and run the docker containers:
|
||||||
|
```
|
||||||
|
./launcher.sh
|
||||||
|
```
|
||||||
|
To stop these docker containers, you can use Ctl+C. You'll maintain the data and infrastructure state if you run "compose up" again.
|
||||||
|
|
||||||
|
On the terminal screen, you can follow the installation steps. If there are any problems, error messages will appear here. The appearance of several warnings is normal and can be ignored.
|
||||||
|
|
||||||
|
If the last line you see one text like this, *exited with code*:
|
||||||
|
```
|
||||||
|
devicehub-teal-devicehub-id-client-1 exited with code 1
|
||||||
|
```
|
||||||
|
means the installation failed.
|
||||||
|
|
||||||
|
If the deployment was end-to-end successful (two running Devicehub instances successfully connected to the DLT backend selected in the .env file), you can see this text in the last lines:
|
||||||
|
```
|
||||||
|
devicehub-teal-devicehub-id-client-1 | * Running on http://172.28.0.2:5000/ (Press CTRL+C to quit)
|
||||||
|
devicehub-teal-devicehub-id-server-1 | * Running on all addresses.
|
||||||
|
devicehub-teal-devicehub-id-server-1 | WARNING: This is a development server. Do not use it in a production deployment.
|
||||||
|
devicehub-teal-devicehub-id-server-1 | * Running on http://172.28.0.5:5000/ (Press CTRL+C to quit)
|
||||||
```
|
```
|
||||||
|
|
||||||
Note: The `dh` command is like `flask`, but it allows you to create and delete instances, and interface to them directly.
|
That means the two Devicehub instances are running in their containers, which can be reached as http://localhost:5000/ and http://localhost:5001/
|
||||||
|
|
||||||
|
Once the DeviceHub instances are running, you might want to register a user binding to the DLT with the following commands (here, it assumes you want to execute it on devicehub-id-client, you might also want to do it in devicehub-id-server). Change the variables accordingly
|
||||||
|
|
||||||
# Testing
|
```
|
||||||
|
FILE=my_users_devicehub.json
|
||||||
1. `git clone` this project.
|
DOCKER_SERVICE=devicehub-id-server
|
||||||
2. Create a database for testing executing `create-db.sh` like the normal installation but changing the first parameter from `devicehub` to `dh_test`: `create-db.sh dh_test dhub` and password `ereuse`.
|
docker compose cp /path/to/${FILE} ${DOCKER_SERVICE}:/tmp/
|
||||||
3. Execute at the root folder of the project `python3 setup.py test`.
|
docker compose exec ${DOCKER_SERVICE} flask dlt_register_user /tmp/${FILE}
|
||||||
|
|
||||||
|
|
||||||
# Migrations
|
|
||||||
|
|
||||||
At this stage, migration files are created manually.
|
|
||||||
Set up the database:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ sudo su - postgres
|
|
||||||
$ bash $PATH_TO_DEVIHUBTEAL/examples/create-db.sh devicehub dhub
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Initialize the database:
|
**my_users_devicehub.json** is a custom file which is similar to the one provided in `examples/users_devicehub.json`
|
||||||
|
|
||||||
```bash
|
5. To shut down the services and remove the corresponding data, you can use:
|
||||||
$ export dhi=dbtest; dh inv add --common --name dbtest
|
```
|
||||||
|
docker compose down -v
|
||||||
```
|
```
|
||||||
|
|
||||||
This command will create the schemas, tables in the specified database.
|
If you want to enter a shell inside a **new instance of the container**:
|
||||||
Then we need to stamp the initial migration.
|
```
|
||||||
|
docker run -it --entrypoint= ${target_docker_image} bash
|
||||||
```bash
|
|
||||||
$ alembic stamp head
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you want to enter a shell on an **already running container**:
|
||||||
This command will set the revision **fbb7e2a0cde0_initial** as our initial migration.
|
```
|
||||||
For more info in migration stamping please see https://alembic.sqlalchemy.org/en/latest/cookbook.html
|
docker exec -it ${target_docker_image} bash
|
||||||
|
|
||||||
|
|
||||||
Whenever a change needed eg to create a new schema, alter an existing table, column or perform any
|
|
||||||
operation on tables, create a new revision file:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ alembic revision -m "A table change"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This command will create a new revision file with name `<revision_id>_a_table_change`.
|
To know the valid value for ${target_docker_image} you can use:
|
||||||
Edit the generated file with the necessary operations to perform the migration:
|
```
|
||||||
|
docker ps
|
||||||
```bash
|
|
||||||
$ alembic edit <revision_id>
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Apply migrations using:
|
6. These are the details for use in this implementation:
|
||||||
|
|
||||||
```bash
|
Devicehub with URL (http://localhost:5000) is the identity provider of OIDC and have a user defined in **.env** file with SERVER_ID_EMAIL_DEMO var.
|
||||||
$ alembic -x inventory=dbtest upgrade head
|
|
||||||
|
Devicehub with URL (http://localhost:5001) is the client identity of OIDC and have a user defined in **.env** file with SERVER_ID_EMAIL_DEMO var.
|
||||||
|
|
||||||
|
You can change these values in the *.env* file
|
||||||
|
|
||||||
|
7. If you want to use Workbench for these DeviceHub instances, you need to go to
|
||||||
```
|
```
|
||||||
Then to go back to previous db version:
|
http://localhost:5001/workbench/
|
||||||
|
|
||||||
```bash
|
|
||||||
$ alembic -x inventory=dbtest downgrade <revision_id>
|
|
||||||
```
|
```
|
||||||
|
with the demo user and then download the settings and ISO files. Follow the instructions on the [help](https://help.usody.com/en/setup/setup-pendrive/) page.
|
||||||
To see a full list of migrations use
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ alembic history
|
|
||||||
```
|
|
||||||
|
|
||||||
# Upgrade a deployment
|
|
||||||
|
|
||||||
For upgrade an instance of devicehub you need to do:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ cd $PATH_TO_DEVIHUBTEAL
|
|
||||||
$ source venv/bin/activate
|
|
||||||
$ git pull
|
|
||||||
$ alembic -x inventory=dbtest upgrade head
|
|
||||||
```
|
|
||||||
|
|
||||||
If all migrations pass successfully, then it is necessary restart the devicehub.
|
|
||||||
Normaly you can use a little script for restart.
|
|
||||||
```
|
|
||||||
# sh gunicorn_api.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Generating the docs
|
|
||||||
|
|
||||||
|
|
||||||
1. `git clone` this project.
|
|
||||||
2. Install plantuml. In Debian 9 is `# apt install plantuml`.
|
|
||||||
3. Execute `pip3 install -e .[docs]` in the project root folder.
|
|
||||||
4. Go to `<project root folder>/docs` and execute `make html`. Repeat this step to generate new docs.
|
|
||||||
|
|
||||||
To auto-generate the docs do `pip3 install -e .[docs-auto]`, then execute, in the root folder of the project `sphinx-autobuild docs docs/_build/html`.
|
|
||||||
|
|
|
@ -0,0 +1,187 @@
|
||||||
|
# Devicehub
|
||||||
|
|
||||||
|
Devicehub is a distributed IT Asset Management System focused in reusing devices, created under the project [eReuse.org](https://www.ereuse.org)
|
||||||
|
|
||||||
|
This README explains how to install and use Devicehub. [The documentation](http://devicehub.ereuse.org) explains the concepts and the API.
|
||||||
|
|
||||||
|
Devicehub is built with [Teal](https://github.com/ereuse/teal) and [Flask](http://flask.pocoo.org).
|
||||||
|
|
||||||
|
# Installing
|
||||||
|
The requirements are:
|
||||||
|
|
||||||
|
0. Required
|
||||||
|
- python3.9
|
||||||
|
- [PostgreSQL 11 or higher](https://www.postgresql.org/download/).
|
||||||
|
- Weasyprint [dependencie](http://weasyprint.readthedocs.io/en/stable/install.html)
|
||||||
|
|
||||||
|
1. Generate a clone of the repository.
|
||||||
|
```
|
||||||
|
git clone git@github.com:eReuse/devicehub-teal.git -b oidc4vp
|
||||||
|
cd devicehub-teal
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Create a virtual environment and install Devicehub with *pip*.
|
||||||
|
```
|
||||||
|
python3.9 -m venv env
|
||||||
|
source env/bin/activate
|
||||||
|
sh examples/pip_install.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Create a PostgreSQL database called *devicehub* by running [create-db](examples/create-db.sh):
|
||||||
|
|
||||||
|
- In Linux, execute the following two commands (adapt them to your distro):
|
||||||
|
|
||||||
|
1. `sudo su - postgres`.
|
||||||
|
2. `bash examples/create-db.sh devicehub dhub`, and password `ereuse`.
|
||||||
|
|
||||||
|
- In MacOS: `bash examples/create-db.sh devicehub dhub`, and password `ereuse`.
|
||||||
|
|
||||||
|
Configure project using environment file (you can use provided example as quickstart):
|
||||||
|
```bash
|
||||||
|
$ cp examples/env.example .env
|
||||||
|
```
|
||||||
|
You can use these parameters as default for a local test, but default values may not be suitable for an internet-exposed service for security reasons. However, these six variables need to be initialized:
|
||||||
|
```
|
||||||
|
API_DLT
|
||||||
|
API_DLT_TOKEN
|
||||||
|
API_RESOLVER
|
||||||
|
ABAC_TOKEN
|
||||||
|
ABAC_USER
|
||||||
|
ABAC_URL
|
||||||
|
```
|
||||||
|
These values should come from an already operational [API_DLT connector](https://gitlab.com/dsg-upc/ereuse-dpp) service instance.
|
||||||
|
|
||||||
|
4. Running alembic from oidc module.
|
||||||
|
```
|
||||||
|
alembic -x inventory=dbtest upgrade head
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Running alembic from oidc module.
|
||||||
|
```
|
||||||
|
cd ereuse_devicehub/modules/oidc
|
||||||
|
alembic -x inventory=dbtest upgrade head
|
||||||
|
```
|
||||||
|
|
||||||
|
6. Running alembic from dpp module.
|
||||||
|
```
|
||||||
|
cd ereuse_devicehub/modules/dpp/
|
||||||
|
alembic -x inventory=dbtest upgrade head
|
||||||
|
```
|
||||||
|
|
||||||
|
7. Add a suitable app.py file.
|
||||||
|
```
|
||||||
|
cp examples/app.py .
|
||||||
|
```
|
||||||
|
|
||||||
|
8. Generate a minimal data structure.
|
||||||
|
```
|
||||||
|
flask initdata
|
||||||
|
```
|
||||||
|
|
||||||
|
9. Add a new server to the 'api resolver' to be able to integrate it into the federation.
|
||||||
|
The domain name for this new server has to be unique. When installing two instances their domain name must differ: e.g. dpp.mydomain1.cxm, dpp.mydomain2.cxm.
|
||||||
|
If your domain is dpp.mydomain.cxm:
|
||||||
|
```
|
||||||
|
flask dlt_insert_members http://dpp.mydomain.cxm
|
||||||
|
```
|
||||||
|
|
||||||
|
modify the .env file as indicated in point 3.
|
||||||
|
Add the corresponding 'DH' in ID_FEDERATED.
|
||||||
|
example: ID_FEDERATED='DH10'
|
||||||
|
|
||||||
|
10. Do a rsync api resolve.
|
||||||
|
```
|
||||||
|
flask dlt_rsync_members
|
||||||
|
```
|
||||||
|
|
||||||
|
11. Register a new user in devicehub.
|
||||||
|
```
|
||||||
|
flask adduser email@example.org password
|
||||||
|
```
|
||||||
|
|
||||||
|
12. Register a new user to the DLT.
|
||||||
|
```
|
||||||
|
flask dlt_register_user examples/users_devicehub.json
|
||||||
|
```
|
||||||
|
You need define your users in the file **users_devicehub.json**
|
||||||
|
|
||||||
|
13. Finally, run the app:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ flask run --debugger
|
||||||
|
```
|
||||||
|
|
||||||
|
The error ‘bdist_wheel’ can happen when you work with a *virtual environment*.
|
||||||
|
To fix it, install in the *virtual environment* wheel
|
||||||
|
package. `pip3 install wheel`
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
|
||||||
|
1. `git clone` this project.
|
||||||
|
2. Create a database for testing executing `create-db.sh` like the normal installation but changing the first parameter from `devicehub` to `dh_test`: `create-db.sh dh_test dhub` and password `ereuse`.
|
||||||
|
3. Execute at the root folder of the project `python3 setup.py test`.
|
||||||
|
|
||||||
|
# Upgrade a deployment
|
||||||
|
|
||||||
|
For upgrade an instance of devicehub you need to do:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cd $PATH_TO_DEVIHUBTEAL
|
||||||
|
$ source venv/bin/activate
|
||||||
|
$ git pull
|
||||||
|
$ alembic -x inventory=dbtest upgrade head
|
||||||
|
```
|
||||||
|
|
||||||
|
If all migrations pass successfully, then it is necessary restart the devicehub.
|
||||||
|
Normaly you can use a little script for restart or run.
|
||||||
|
```
|
||||||
|
# systemctl stop gunicorn_devicehub.socket
|
||||||
|
# systemctl stop gunicorn_devicehub.service
|
||||||
|
# systemctl start gunicorn_devicehub.service
|
||||||
|
```
|
||||||
|
|
||||||
|
# OpenId Connect:
|
||||||
|
We want to interconnect two devicehub instances already installed. One has a set of devices (OIDC client), the other has a set of users (OIDC identity server). Let's assume their domains are: dpp.mydomain1.cxm, dpp.mydomain2.cxm
|
||||||
|
20. In order to connect the two devicehub instances, it is necessary:
|
||||||
|
* 20.1. Register a user in the devicehub instance acting as OIDC identity server.
|
||||||
|
* 20.2. Fill in the openid connect form.
|
||||||
|
* 20.3. Add in the OIDC client inventory the data of client_id, client_secret.
|
||||||
|
|
||||||
|
For 20.1. This can be achieved on the terminal on the devicehub instance acting as OIDC identity server.
|
||||||
|
```
|
||||||
|
flask adduser email@example.org password
|
||||||
|
```
|
||||||
|
|
||||||
|
* 20.2. This is an example of how to fill in the form.
|
||||||
|
|
||||||
|
In the web interface of the OIDC identity service, click on the profile of the just added user, select "My Profile" and click on "OpenID Connect":
|
||||||
|
Then we can go to the "OpenID Connect" panel and fill out the form:
|
||||||
|
|
||||||
|
The important thing about this form is:
|
||||||
|
* "Client URL" The URL of the OIDC Client instance, as registered in point 12. dpp.mydomain1.cxm in our example.
|
||||||
|
* "Allowed Scope" has to have these three words:
|
||||||
|
```
|
||||||
|
openid profile rols
|
||||||
|
```
|
||||||
|
* "Redirect URIs" it has to be the URL that was put in "Client URL" plus "/allow_code"
|
||||||
|
* "Allowed Grant Types" has to be "authorization_code"
|
||||||
|
* "Allowed Response Types" has to be "code"
|
||||||
|
* "Token Endpoint Auth Method" has to be "Client Secret Basic"
|
||||||
|
|
||||||
|
After clicking on "Submit" the "OpenID Connect" tab of the user profile should now include details for "client_id" and "client_secret".
|
||||||
|
|
||||||
|
* 20.3. In the OIDC client inventory run: (in our example: url_domain is dpp.mydomain2.cxm, client_id and client_secret as resulting from the previous step)
|
||||||
|
```
|
||||||
|
flask add_client_oidc url_domain client_id client_secret
|
||||||
|
```
|
||||||
|
After this step, both servers must be connected. Opening one DPP page on dpp.mydomain1.cxm (OIDC Client) the user can choose to authenticate using dpp.mydomain2.cxm (OIDC Server).
|
||||||
|
|
||||||
|
## Generating the docs
|
||||||
|
|
||||||
|
|
||||||
|
1. `git clone` this project.
|
||||||
|
2. Install plantuml. In Debian 9 is `# apt install plantuml`.
|
||||||
|
3. Execute `pip3 install -e .[docs]` in the project root folder.
|
||||||
|
4. Go to `<project root folder>/docs` and execute `make html`. Repeat this step to generate new docs.
|
||||||
|
|
||||||
|
To auto-generate the docs do `pip3 install -e .[docs-auto]`, then execute, in the root folder of the project `sphinx-autobuild docs docs/_build/html`.
|
|
@ -0,0 +1 @@
|
||||||
|
docker-compose_devicehub-dpp.yml
|
|
@ -0,0 +1,103 @@
|
||||||
|
version: "3.9"
|
||||||
|
services:
|
||||||
|
|
||||||
|
devicehub-id-server:
|
||||||
|
init: true
|
||||||
|
image: dkr-dsg.ac.upc.edu/ereuse/devicehub:latest
|
||||||
|
environment:
|
||||||
|
- DB_USER=${DB_USER}
|
||||||
|
- DB_PASSWORD=${DB_PASSWORD}
|
||||||
|
- DB_HOST=postgres-id-server
|
||||||
|
- DB_DATABASE=${DB_DATABASE}
|
||||||
|
- HOST=${HOST}
|
||||||
|
- EMAIL_DEMO=${SERVER_ID_EMAIL_DEMO}
|
||||||
|
- PASSWORD_DEMO=${PASSWORD_DEMO}
|
||||||
|
- JWT_PASS=${JWT_PASS}
|
||||||
|
- SECRET_KEY=${SECRET_KEY}
|
||||||
|
- API_DLT=${API_DLT}
|
||||||
|
- API_RESOLVER=${API_RESOLVER}
|
||||||
|
- API_DLT_TOKEN=${API_DLT_TOKEN}
|
||||||
|
- DEVICEHUB_HOST=${SERVER_ID_DEVICEHUB_HOST}
|
||||||
|
- ID_FEDERATED=${SERVER_ID_FEDERATED}
|
||||||
|
- URL_MANUALS=${URL_MANUALS}
|
||||||
|
- ID_SERVICE=${SERVER_ID_SERVICE}
|
||||||
|
- AUTHORIZED_CLIENT_URL=${CLIENT_ID_DEVICEHUB_HOST}
|
||||||
|
- DPP_MODULE=y
|
||||||
|
- IMPORT_SNAPSHOTS=${IMPORT_SNAPSHOTS}
|
||||||
|
ports:
|
||||||
|
- 5000:5000
|
||||||
|
volumes:
|
||||||
|
- ${SNAPSHOTS_PATH:-./examples/snapshots}:/mnt/snapshots:ro
|
||||||
|
- shared:/shared:rw
|
||||||
|
- app_id_server:/opt/devicehub:rw
|
||||||
|
|
||||||
|
postgres-id-server:
|
||||||
|
image: dkr-dsg.ac.upc.edu/ereuse/postgres:latest
|
||||||
|
# 4. To create the database.
|
||||||
|
# 5. Give permissions to the corresponding users in the database.
|
||||||
|
# extra src https://github.com/docker-library/docs/blob/master/postgres/README.md#environment-variables
|
||||||
|
environment:
|
||||||
|
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||||
|
- POSTGRES_USER=${DB_USER}
|
||||||
|
- POSTGRES_DB=${DB_DATABASE}
|
||||||
|
# DEBUG
|
||||||
|
#ports:
|
||||||
|
# - 5432:5432
|
||||||
|
# TODO persistence
|
||||||
|
#volumes:
|
||||||
|
# - pg_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
devicehub-id-client:
|
||||||
|
init: true
|
||||||
|
image: dkr-dsg.ac.upc.edu/ereuse/devicehub:latest
|
||||||
|
environment:
|
||||||
|
- DB_USER=${DB_USER}
|
||||||
|
- DB_PASSWORD=${DB_PASSWORD}
|
||||||
|
- DB_HOST=postgres-id-client
|
||||||
|
- DB_DATABASE=${DB_DATABASE}
|
||||||
|
- HOST=${HOST}
|
||||||
|
- EMAIL_DEMO=${CLIENT_ID_EMAIL_DEMO}
|
||||||
|
- PASSWORD_DEMO=${PASSWORD_DEMO}
|
||||||
|
- JWT_PASS=${JWT_PASS}
|
||||||
|
- SECRET_KEY=${SECRET_KEY}
|
||||||
|
- API_DLT=${API_DLT}
|
||||||
|
- API_RESOLVER=${API_RESOLVER}
|
||||||
|
- API_DLT_TOKEN=${API_DLT_TOKEN}
|
||||||
|
- DEVICEHUB_HOST=${CLIENT_ID_DEVICEHUB_HOST}
|
||||||
|
- SERVER_ID_HOST=${SERVER_ID_DEVICEHUB_HOST}
|
||||||
|
- ID_FEDERATED=${CLIENT_ID_FEDERATED}
|
||||||
|
- URL_MANUALS=${URL_MANUALS}
|
||||||
|
- ID_SERVICE=${CLIENT_ID_SERVICE}
|
||||||
|
- DPP_MODULE=y
|
||||||
|
- IMPORT_SNAPSHOTS=${IMPORT_SNAPSHOTS}
|
||||||
|
ports:
|
||||||
|
- 5001:5000
|
||||||
|
volumes:
|
||||||
|
- ${SNAPSHOTS_PATH:-./examples/snapshots}:/mnt/snapshots:ro
|
||||||
|
- shared:/shared:ro
|
||||||
|
- app_id_client:/opt/devicehub:rw
|
||||||
|
|
||||||
|
postgres-id-client:
|
||||||
|
image: dkr-dsg.ac.upc.edu/ereuse/postgres:latest
|
||||||
|
# 4. To create the database.
|
||||||
|
# 5. Give permissions to the corresponding users in the database.
|
||||||
|
# extra src https://github.com/docker-library/docs/blob/master/postgres/README.md#environment-variables
|
||||||
|
environment:
|
||||||
|
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||||
|
- POSTGRES_USER=${DB_USER}
|
||||||
|
- POSTGRES_DB=${DB_DATABASE}
|
||||||
|
# DEBUG
|
||||||
|
#ports:
|
||||||
|
# - 5432:5432
|
||||||
|
# TODO persistence
|
||||||
|
#volumes:
|
||||||
|
# - pg_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
|
||||||
|
# TODO https://testdriven.io/blog/dockerizing-django-with-postgres-gunicorn-and-nginx/
|
||||||
|
#nginx
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
shared:
|
||||||
|
app_id_client:
|
||||||
|
app_id_server:
|
|
@ -0,0 +1,54 @@
|
||||||
|
version: "3.9"
|
||||||
|
services:
|
||||||
|
|
||||||
|
devicehub:
|
||||||
|
init: true
|
||||||
|
image: dkr-dsg.ac.upc.edu/ereuse/devicehub:dpp__c6ec6658
|
||||||
|
environment:
|
||||||
|
- DB_USER=${DB_USER}
|
||||||
|
- DB_PASSWORD=${DB_PASSWORD}
|
||||||
|
- DB_HOST=postgres
|
||||||
|
- DB_DATABASE=${DB_DATABASE}
|
||||||
|
- HOST=${HOST}
|
||||||
|
- EMAIL_DEMO=${EMAIL_DEMO}
|
||||||
|
- PASSWORD_DEMO=${PASSWORD_DEMO}
|
||||||
|
- JWT_PASS=${JWT_PASS}
|
||||||
|
- SECRET_KEY=${SECRET_KEY}
|
||||||
|
- DEVICEHUB_HOST=${DEVICEHUB_HOST}
|
||||||
|
- URL_MANUALS=${URL_MANUALS}
|
||||||
|
- DPP_MODULE=n
|
||||||
|
- IMPORT_SNAPSHOTS=${IMPORT_SNAPSHOTS}
|
||||||
|
- DEPLOYMENT=${DEPLOYMENT}
|
||||||
|
ports:
|
||||||
|
- 5000:5000
|
||||||
|
volumes:
|
||||||
|
- ${SNAPSHOTS_PATH:-./examples/snapshots}:/mnt/snapshots:ro
|
||||||
|
- shared:/shared:rw
|
||||||
|
- app:/opt/devicehub:rw
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: dkr-dsg.ac.upc.edu/ereuse/postgres:dpp__c6ec6658
|
||||||
|
# 4. To create the database.
|
||||||
|
# 5. Give permissions to the corresponding users in the database.
|
||||||
|
# extra src https://github.com/docker-library/docs/blob/master/postgres/README.md#environment-variables
|
||||||
|
environment:
|
||||||
|
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||||
|
- POSTGRES_USER=${DB_USER}
|
||||||
|
- POSTGRES_DB=${DB_DATABASE}
|
||||||
|
volumes:
|
||||||
|
- pg_data:/var/lib/postgresql/data
|
||||||
|
# DEBUG
|
||||||
|
#ports:
|
||||||
|
# - 5432:5432
|
||||||
|
|
||||||
|
nginx:
|
||||||
|
image: nginx
|
||||||
|
ports:
|
||||||
|
- 8080:8080
|
||||||
|
volumes:
|
||||||
|
- ./docker/nginx-devicehub.nginx.conf:/etc/nginx/nginx.conf:ro
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
shared:
|
||||||
|
pg_data:
|
||||||
|
app:
|
|
@ -0,0 +1,32 @@
|
||||||
|
FROM debian:bullseye-slim
|
||||||
|
|
||||||
|
RUN apt update && apt-get install --no-install-recommends -y \
|
||||||
|
python3-minimal \
|
||||||
|
python3-pip \
|
||||||
|
python-is-python3 \
|
||||||
|
python3-psycopg2 \
|
||||||
|
python3-dev \
|
||||||
|
libpq-dev \
|
||||||
|
build-essential \
|
||||||
|
libpangocairo-1.0-0 \
|
||||||
|
curl \
|
||||||
|
jq \
|
||||||
|
time \
|
||||||
|
netcat
|
||||||
|
|
||||||
|
WORKDIR /opt/devicehub
|
||||||
|
|
||||||
|
# this is exactly the same as examples/pip_install.sh except the last command
|
||||||
|
# to improve the docker layer builds, it has been separated
|
||||||
|
RUN pip install --upgrade pip
|
||||||
|
RUN pip install alembic==1.8.1 anytree==2.8.0 apispec==0.39.0 atomicwrites==1.4.0 blinker==1.5 boltons==23.0.0 cairocffi==1.4.0 cairosvg==2.5.2 certifi==2022.9.24 cffi==1.15.1 charset-normalizer==2.0.12 click==6.7 click-spinner==0.1.8 colorama==0.3.9 colour==0.1.5 cssselect2==0.7.0 defusedxml==0.7.1 et-xmlfile==1.1.0 flask==1.0.2 flask-cors==3.0.10 flask-login==0.5.0 flask-sqlalchemy==2.5.1 flask-weasyprint==0.4 flask-wtf==1.0.0 hashids==1.2.0 html5lib==1.1 idna==3.4 inflection==0.5.1 itsdangerous==2.0.1 jinja2==3.0.3 mako==1.2.3 markupsafe==2.1.1 marshmallow==3.0.0b11 marshmallow-enum==1.4.1 more-itertools==8.12.0 numpy==1.22.0 odfpy==1.4.1 openpyxl==3.0.10 pandas==1.3.5 passlib==1.7.1 phonenumbers==8.9.11 pillow==9.2.0 pint==0.9 psycopg2-binary==2.8.3 py-dmidecode==0.1.0 pycparser==2.21 pyjwt==2.4.0 pyphen==0.13.0 python-dateutil==2.7.3 python-decouple==3.3 python-dotenv==0.14.0 python-editor==1.0.4 python-stdnum==1.9 pytz==2022.2.1 pyyaml==5.4 requests==2.27.1 requests-mock==1.5.2 requests-toolbelt==0.9.1 six==1.16.0 sortedcontainers==2.1.0 sqlalchemy==1.3.24 sqlalchemy-citext==1.3.post0 sqlalchemy-utils==0.33.11 tinycss2==1.1.1 tqdm==4.32.2 urllib3==1.26.12 weasyprint==44 webargs==5.5.3 webencodings==0.5.1 werkzeug==2.0.3 wtforms==3.0.1 xlrd==2.0.1 cryptography==39.0.1 Authlib==1.2.1 gunicorn==21.2.0
|
||||||
|
|
||||||
|
RUN pip install -i https://test.pypi.org/simple/ ereuseapitest==0.0.14
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
# this operation might be overriding inside container another app.py you would have
|
||||||
|
COPY examples/app.py .
|
||||||
|
RUN pip install -e .
|
||||||
|
|
||||||
|
COPY docker/devicehub.entrypoint.sh /
|
||||||
|
ENTRYPOINT sh /devicehub.entrypoint.sh
|
|
@ -0,0 +1,12 @@
|
||||||
|
.git
|
||||||
|
.env
|
||||||
|
# TODO need to comment it to copy the entrypoint
|
||||||
|
#docker
|
||||||
|
Makefile
|
||||||
|
|
||||||
|
# Emacs backup files
|
||||||
|
*~
|
||||||
|
.\#*
|
||||||
|
# Vim swap files
|
||||||
|
*.swp
|
||||||
|
*.swo
|
|
@ -0,0 +1,228 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
set -u
|
||||||
|
# DEBUG
|
||||||
|
set -x
|
||||||
|
|
||||||
|
# 3. Generate an environment .env file.
|
||||||
|
gen_env_vars() {
|
||||||
|
CONFIG_OIDC="${CONFIG_OIDC:-y}"
|
||||||
|
# specific dpp env vars
|
||||||
|
if [ "${DPP_MODULE}" = 'y' ]; then
|
||||||
|
dpp_env_vars="$(cat <<END
|
||||||
|
API_DLT='${API_DLT}'
|
||||||
|
API_DLT_TOKEN='${API_DLT_TOKEN}'
|
||||||
|
API_RESOLVER='${API_RESOLVER}'
|
||||||
|
ID_FEDERATED='${ID_FEDERATED}'
|
||||||
|
END
|
||||||
|
)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# generate config using env vars from docker
|
||||||
|
cat > .env <<END
|
||||||
|
${dpp_env_vars:-}
|
||||||
|
DB_USER='${DB_USER}'
|
||||||
|
DB_PASSWORD='${DB_PASSWORD}'
|
||||||
|
DB_HOST='${DB_HOST}'
|
||||||
|
DB_DATABASE='${DB_DATABASE}'
|
||||||
|
URL_MANUALS='${URL_MANUALS}'
|
||||||
|
|
||||||
|
HOST='${HOST}'
|
||||||
|
|
||||||
|
SCHEMA='dbtest'
|
||||||
|
DB_SCHEMA='dbtest'
|
||||||
|
|
||||||
|
EMAIL_DEMO='${EMAIL_DEMO}'
|
||||||
|
PASSWORD_DEMO='${PASSWORD_DEMO}'
|
||||||
|
|
||||||
|
JWT_PASS=${JWT_PASS}
|
||||||
|
SECRET_KEY=${SECRET_KEY}
|
||||||
|
END
|
||||||
|
}
|
||||||
|
|
||||||
|
wait_for_postgres() {
|
||||||
|
# old one was
|
||||||
|
#sleep 4
|
||||||
|
|
||||||
|
default_postgres_port=5432
|
||||||
|
# thanks https://testdriven.io/blog/dockerizing-django-with-postgres-gunicorn-and-nginx/
|
||||||
|
while ! nc -z ${DB_HOST} ${default_postgres_port}; do
|
||||||
|
sleep 0.5
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
init_data() {
|
||||||
|
|
||||||
|
# 7. Run alembic of the project.
|
||||||
|
alembic -x inventory=dbtest upgrade head
|
||||||
|
# 8. Running alembic from oidc module.y
|
||||||
|
cd ereuse_devicehub/modules/oidc
|
||||||
|
alembic -x inventory=dbtest upgrade head
|
||||||
|
cd -
|
||||||
|
# 9. Running alembic from dpp module.
|
||||||
|
cd ereuse_devicehub/modules/dpp/
|
||||||
|
alembic -x inventory=dbtest upgrade head
|
||||||
|
cd -
|
||||||
|
|
||||||
|
# 11. Generate a minimal data structure.
|
||||||
|
# TODO it has some errors (?)
|
||||||
|
flask initdata || true
|
||||||
|
|
||||||
|
if [ "${EREUSE_PILOT:-}" = 'y' ]; then
|
||||||
|
flask dlt_register_user /opt/devicehub/users_devicehub.json || true
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
big_error() {
|
||||||
|
local message="${@}"
|
||||||
|
echo "###############################################" >&2
|
||||||
|
echo "# ERROR: ${message}" >&2
|
||||||
|
echo "###############################################" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
handle_federated_id() {
|
||||||
|
|
||||||
|
# devicehub host and id federated checker
|
||||||
|
|
||||||
|
# //getAll queries are not accepted by this service, so we remove them
|
||||||
|
EXPECTED_ID_FEDERATED="$(curl -s "${API_RESOLVER%/}/getAll" \
|
||||||
|
| jq -r '.url | to_entries | .[] | select(.value == "'"${DEVICEHUB_HOST}"'") | .key' \
|
||||||
|
| head -n 1)"
|
||||||
|
|
||||||
|
# if is a new DEVICEHUB_HOST, then register it
|
||||||
|
if [ -z "${EXPECTED_ID_FEDERATED}" ]; then
|
||||||
|
# TODO better docker compose run command
|
||||||
|
cmd="docker compose run --entrypoint= devicehub flask dlt_insert_members ${DEVICEHUB_HOST}"
|
||||||
|
big_error "No FEDERATED ID maybe you should run \`${cmd}\`"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# if not new DEVICEHUB_HOST, then check consistency
|
||||||
|
|
||||||
|
# if there is already an ID in the DLT, it should match with my internal ID
|
||||||
|
if [ ! "${EXPECTED_ID_FEDERATED}" = "${ID_FEDERATED}" ]; then
|
||||||
|
|
||||||
|
big_error "ID_FEDERATED should be ${EXPECTED_ID_FEDERATED} instead of ${ID_FEDERATED}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# not needed, but reserved
|
||||||
|
# EXPECTED_DEVICEHUB_HOST="$(curl -s "${API_RESOLVER%/}/getAll" \
|
||||||
|
# | jq -r '.url | to_entries | .[] | select(.key == "'"${ID_FEDERATED}"'") | .value' \
|
||||||
|
# | head -n 1)"
|
||||||
|
# if [ ! "${EXPECTED_DEVICEHUB_HOST}" = "${DEVICEHUB_HOST}" ]; then
|
||||||
|
# big_error "ERROR: DEVICEHUB_HOST should be ${EXPECTED_DEVICEHUB_HOST} instead of ${DEVICEHUB_HOST}"
|
||||||
|
# fi
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
config_oidc() {
|
||||||
|
# TODO test allowing more than 1 client
|
||||||
|
if [ "${ID_SERVICE}" = "server_id" ]; then
|
||||||
|
|
||||||
|
client_description="client identity from docker compose demo"
|
||||||
|
|
||||||
|
# in AUTHORIZED_CLIENT_URL we remove anything before ://
|
||||||
|
flask add_contract_oidc \
|
||||||
|
"${EMAIL_DEMO}" \
|
||||||
|
"${client_description}" \
|
||||||
|
"${AUTHORIZED_CLIENT_URL}" \
|
||||||
|
> /shared/client_id_${AUTHORIZED_CLIENT_URL#*://}
|
||||||
|
|
||||||
|
elif [ "${ID_SERVICE}" = "client_id" ]; then
|
||||||
|
|
||||||
|
# in DEVICEHUB_HOST we remove anything before ://
|
||||||
|
client_id_config="/shared/client_id_${DEVICEHUB_HOST#*://}"
|
||||||
|
client_id=
|
||||||
|
client_secret=
|
||||||
|
|
||||||
|
# wait that the file generated by the server_id is readable
|
||||||
|
while true; do
|
||||||
|
if [ -f "${client_id_config}" ]; then
|
||||||
|
client_id="$(cat "${client_id_config}" | jq -r '.client_id')"
|
||||||
|
client_secret="$(cat "${client_id_config}" | jq -r '.client_secret')"
|
||||||
|
if [ "${client_id}" ] && [ "${client_secret}" ]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
flask add_client_oidc \
|
||||||
|
"${SERVER_ID_HOST}" \
|
||||||
|
"${client_id}" \
|
||||||
|
"${client_secret}"
|
||||||
|
|
||||||
|
else
|
||||||
|
big_error "Something went wrong ${ID_SERVICE} is not server_id nor client_id"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
config_dpp_part1() {
|
||||||
|
# 12. Add a new server to the 'api resolver'
|
||||||
|
handle_federated_id
|
||||||
|
|
||||||
|
# 13. Do a rsync api resolve
|
||||||
|
flask dlt_rsync_members
|
||||||
|
|
||||||
|
# 14. Register a new user to the DLT
|
||||||
|
#flask dlt_register_user "${EMAIL_DEMO}" ${PASSWORD_DEMO} Operator
|
||||||
|
}
|
||||||
|
|
||||||
|
config_phase() {
|
||||||
|
init_flagfile='docker__already_configured'
|
||||||
|
if [ ! -f "${init_flagfile}" ]; then
|
||||||
|
# 7, 8, 9, 11
|
||||||
|
init_data
|
||||||
|
|
||||||
|
if [ "${DPP_MODULE}" = 'y' ]; then
|
||||||
|
# 12, 13, 14
|
||||||
|
config_dpp_part1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# non DL user (only for the inventory)
|
||||||
|
# flask adduser user2@dhub.com ${PASSWORD_DEMO}
|
||||||
|
|
||||||
|
# # 15. Add inventory snapshots for user "${EMAIL_DEMO}".
|
||||||
|
if [ "${IMPORT_SNAPSHOTS}" = 'y' ]; then
|
||||||
|
mkdir -p ereuse_devicehub/commands/snapshot_files
|
||||||
|
cp /mnt/snapshots/snapshot*.json ereuse_devicehub/commands/snapshot_files/
|
||||||
|
/usr/bin/time flask snapshot "${EMAIL_DEMO}" ${PASSWORD_DEMO}
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${CONFIG_OIDC}" = 'y' ]; then
|
||||||
|
# 16.
|
||||||
|
# commented because this fails with wrong DLT credentials
|
||||||
|
#flask check_install "${EMAIL_DEMO}" "${PASSWORD_DEMO}"
|
||||||
|
# 20. config server or client ID
|
||||||
|
config_oidc
|
||||||
|
fi
|
||||||
|
|
||||||
|
# remain next command as the last operation for this if conditional
|
||||||
|
touch "${init_flagfile}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
main() {
|
||||||
|
|
||||||
|
gen_env_vars
|
||||||
|
|
||||||
|
wait_for_postgres
|
||||||
|
|
||||||
|
config_phase
|
||||||
|
|
||||||
|
# 17. Use gunicorn
|
||||||
|
# thanks https://akira3030.github.io/formacion/articulos/python-flask-gunicorn-docker.html
|
||||||
|
if [ "${DEPLOYMENT:-}" = "PROD" ]; then
|
||||||
|
# TODO workers 1 because we have a shared secret in RAM
|
||||||
|
gunicorn --access-logfile - --error-logfile - --workers 1 -b :5000 app:app
|
||||||
|
else
|
||||||
|
# run development server
|
||||||
|
FLASK_DEBUG=1 flask run --host=0.0.0.0 --port 5000
|
||||||
|
fi
|
||||||
|
|
||||||
|
# DEBUG
|
||||||
|
#sleep infinity
|
||||||
|
}
|
||||||
|
|
||||||
|
main "${@}"
|
|
@ -0,0 +1,32 @@
|
||||||
|
user www-data;
|
||||||
|
worker_processes auto;
|
||||||
|
pid /run/nginx.pid;
|
||||||
|
error_log /var/log/nginx/error.log;
|
||||||
|
include /etc/nginx/modules-enabled/*.conf;
|
||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 768;
|
||||||
|
# multi_accept on;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
http {
|
||||||
|
#upstream socket_backend {
|
||||||
|
# server unix:/socket/gunicorn.sock fail_timeout=0;
|
||||||
|
#}
|
||||||
|
server {
|
||||||
|
listen 8080;
|
||||||
|
listen [::]:8080;
|
||||||
|
#server_name devicehub.example.org;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
# TODO env var on proxy_pass
|
||||||
|
proxy_pass http://devicehub:5000/;
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_redirect off;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
FROM postgres:15.4-bookworm
|
||||||
|
# this is the latest in 2023-09-14_13-01-38
|
||||||
|
#FROM postgres:latest
|
||||||
|
|
||||||
|
# Add a SQL script that will be executed upon container startup
|
||||||
|
COPY docker/postgres.setupdb.sql /docker-entrypoint-initdb.d/
|
||||||
|
|
||||||
|
EXPOSE 5432
|
|
@ -0,0 +1,5 @@
|
||||||
|
-- 6. Create the necessary extensions.
|
||||||
|
CREATE EXTENSION pgcrypto SCHEMA public;
|
||||||
|
CREATE EXTENSION ltree SCHEMA public;
|
||||||
|
CREATE EXTENSION citext SCHEMA public;
|
||||||
|
CREATE EXTENSION pg_trgm SCHEMA public;
|
40
docs/conf.py
40
docs/conf.py
|
@ -30,7 +30,6 @@ from teal.enums import Country, Currency, Layouts, Subdivision
|
||||||
from teal.marshmallow import EnumField
|
from teal.marshmallow import EnumField
|
||||||
|
|
||||||
from ereuse_devicehub.marshmallow import NestedOn
|
from ereuse_devicehub.marshmallow import NestedOn
|
||||||
from ereuse_devicehub.resources.schemas import Thing
|
|
||||||
|
|
||||||
project = 'Devicehub'
|
project = 'Devicehub'
|
||||||
copyright = '2020, eReuse.org team'
|
copyright = '2020, eReuse.org team'
|
||||||
|
@ -56,7 +55,7 @@ extensions = [
|
||||||
'sphinx.ext.viewcode',
|
'sphinx.ext.viewcode',
|
||||||
'sphinxcontrib.plantuml',
|
'sphinxcontrib.plantuml',
|
||||||
'sphinx.ext.autosectionlabel',
|
'sphinx.ext.autosectionlabel',
|
||||||
'sphinx.ext.autodoc'
|
'sphinx.ext.autodoc',
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
|
@ -126,15 +125,12 @@ latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
#
|
#
|
||||||
# 'papersize': 'letterpaper',
|
# 'papersize': 'letterpaper',
|
||||||
|
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
#
|
#
|
||||||
# 'pointsize': '10pt',
|
# 'pointsize': '10pt',
|
||||||
|
|
||||||
# Additional stuff for the LaTeX preamble.
|
# Additional stuff for the LaTeX preamble.
|
||||||
#
|
#
|
||||||
# 'preamble': '',
|
# 'preamble': '',
|
||||||
|
|
||||||
# Latex figure (float) alignment
|
# Latex figure (float) alignment
|
||||||
#
|
#
|
||||||
# 'figure_align': 'htbp',
|
# 'figure_align': 'htbp',
|
||||||
|
@ -144,18 +140,20 @@ latex_elements = {
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
(master_doc, 'Devicehub.tex', 'Devicehub Documentation',
|
(
|
||||||
'eReuse.org team', 'manual'),
|
master_doc,
|
||||||
|
'Devicehub.tex',
|
||||||
|
'Devicehub Documentation',
|
||||||
|
'eReuse.org team',
|
||||||
|
'manual',
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
# -- Options for manual page output ------------------------------------------
|
# -- Options for manual page output ------------------------------------------
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [(master_doc, 'devicehub', 'Devicehub Documentation', [author], 1)]
|
||||||
(master_doc, 'devicehub', 'Devicehub Documentation',
|
|
||||||
[author], 1)
|
|
||||||
]
|
|
||||||
|
|
||||||
# -- Options for Texinfo output ----------------------------------------------
|
# -- Options for Texinfo output ----------------------------------------------
|
||||||
|
|
||||||
|
@ -163,9 +161,15 @@ man_pages = [
|
||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
(master_doc, 'Devicehub', 'Devicehub Documentation',
|
(
|
||||||
author, 'Devicehub', 'One line description of project.',
|
master_doc,
|
||||||
'Miscellaneous'),
|
'Devicehub',
|
||||||
|
'Devicehub Documentation',
|
||||||
|
author,
|
||||||
|
'Devicehub',
|
||||||
|
'One line description of project.',
|
||||||
|
'Miscellaneous',
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
# -- Extension configuration -------------------------------------------------
|
# -- Extension configuration -------------------------------------------------
|
||||||
|
@ -199,6 +203,7 @@ class DhlistDirective(Directive):
|
||||||
This requires :py:class:`ereuse_devicehub.resources.schemas.SchemaMeta`.
|
This requires :py:class:`ereuse_devicehub.resources.schemas.SchemaMeta`.
|
||||||
You will find in that module more information.
|
You will find in that module more information.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
has_content = False
|
has_content = False
|
||||||
|
|
||||||
# Definition of passed-in options
|
# Definition of passed-in options
|
||||||
|
@ -216,7 +221,7 @@ class DhlistDirective(Directive):
|
||||||
|
|
||||||
sections = []
|
sections = []
|
||||||
sections.append(self.links(things)) # Make index
|
sections.append(self.links(things)) # Make index
|
||||||
for thng in things: # type: Thing
|
for thng in things:
|
||||||
# Generate a section for each class, with a title,
|
# Generate a section for each class, with a title,
|
||||||
# fields description and a paragraph
|
# fields description and a paragraph
|
||||||
section = n.section(ids=[self._id(thng)])
|
section = n.section(ids=[self._id(thng)])
|
||||||
|
@ -228,7 +233,9 @@ class DhlistDirective(Directive):
|
||||||
for key, f in thng._own:
|
for key, f in thng._own:
|
||||||
name = n.field_name(text=f.data_key or key)
|
name = n.field_name(text=f.data_key or key)
|
||||||
body = [
|
body = [
|
||||||
self.parse('{} {}'.format(self.type(f), f.metadata.get('description', '')))
|
self.parse(
|
||||||
|
'{} {}'.format(self.type(f), f.metadata.get('description', ''))
|
||||||
|
)
|
||||||
]
|
]
|
||||||
if isinstance(f, EnumField):
|
if isinstance(f, EnumField):
|
||||||
body.append(self._parse_enum_field(f))
|
body.append(self._parse_enum_field(f))
|
||||||
|
@ -244,6 +251,7 @@ class DhlistDirective(Directive):
|
||||||
|
|
||||||
def _parse_enum_field(self, f):
|
def _parse_enum_field(self, f):
|
||||||
from ereuse_devicehub.resources.device import states
|
from ereuse_devicehub.resources.device import states
|
||||||
|
|
||||||
if issubclass(f.enum, (Subdivision, Currency, Country, Layouts, states.State)):
|
if issubclass(f.enum, (Subdivision, Currency, Country, Layouts, states.State)):
|
||||||
return self.parse(f.enum.__doc__)
|
return self.parse(f.enum.__doc__)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__version__ = "2.4.1"
|
__version__ = "2.5.3"
|
||||||
|
|
|
@ -58,6 +58,8 @@ class InventoryView(LoginMixin, SnapshotMixin):
|
||||||
self.snapshot_json = ParseSnapshotLsHw(snapshot_json).get_snapshot()
|
self.snapshot_json = ParseSnapshotLsHw(snapshot_json).get_snapshot()
|
||||||
|
|
||||||
snapshot = self.build()
|
snapshot = self.build()
|
||||||
|
snapshot.device.set_hid()
|
||||||
|
snapshot.device.binding.device.set_hid()
|
||||||
db.session.add(snapshot)
|
db.session.add(snapshot)
|
||||||
|
|
||||||
snap_log = SnapshotsLog(
|
snap_log = SnapshotsLog(
|
||||||
|
@ -72,11 +74,13 @@ class InventoryView(LoginMixin, SnapshotMixin):
|
||||||
|
|
||||||
db.session().final_flush()
|
db.session().final_flush()
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
url = "https://{}/".format(app.config['HOST'])
|
||||||
|
public_url = "{}{}".format(url.strip("/"), snapshot.device.url.to_text())
|
||||||
self.response = jsonify(
|
self.response = jsonify(
|
||||||
{
|
{
|
||||||
'url': snapshot.device.url.to_text(),
|
|
||||||
'dhid': snapshot.device.dhid,
|
'dhid': snapshot.device.dhid,
|
||||||
'sid': snapshot.sid,
|
'url': url,
|
||||||
|
'public_url': public_url,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
self.response.status_code = 201
|
self.response.status_code = 201
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
from sqlalchemy.exc import DataError
|
from sqlalchemy.exc import DataError
|
||||||
from teal.auth import TokenAuth
|
|
||||||
from teal.db import ResourceNotFound
|
|
||||||
from werkzeug.exceptions import Unauthorized
|
from werkzeug.exceptions import Unauthorized
|
||||||
|
|
||||||
from ereuse_devicehub.resources.user.models import User, Session
|
from ereuse_devicehub.resources.user.models import Session, User
|
||||||
|
from ereuse_devicehub.teal.auth import TokenAuth
|
||||||
|
from ereuse_devicehub.teal.db import ResourceNotFound
|
||||||
|
|
||||||
|
|
||||||
class Auth(TokenAuth):
|
class Auth(TokenAuth):
|
||||||
|
|
|
@ -1,38 +0,0 @@
|
||||||
{% extends "ereuse_devicehub/base_site.html" %}
|
|
||||||
{% block main %}
|
|
||||||
|
|
||||||
<div class="pagetitle">
|
|
||||||
<h1>Billing</h1>
|
|
||||||
<nav>
|
|
||||||
<ol class="breadcrumb">
|
|
||||||
<li class="breadcrumb-item active">{{ page_title }}</li>
|
|
||||||
</ol>
|
|
||||||
</nav>
|
|
||||||
</div><!-- End Page Title -->
|
|
||||||
|
|
||||||
<section class="section">
|
|
||||||
Current usage
|
|
||||||
|
|
||||||
<table class="table table-striped">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th scope="col">Year</th>
|
|
||||||
<th scope="col">Month</th>
|
|
||||||
<th scope="col">Snapshot (register)</th>
|
|
||||||
<th scope="col">Snapshot (update)</th>
|
|
||||||
<th scope="col">Drives Erasure (uniques)</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<th scope="row">{{ current_month_usage.year }}</th>
|
|
||||||
<th scope="row">{{ current_month_usage.month }}</th>
|
|
||||||
<td>{{ current_month_usage.snapshot_register }}</td>
|
|
||||||
<td>{{ current_month_usage.snapshot_update }}</td>
|
|
||||||
<td>{{ current_month_usage.drives_erasure }}</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
</section>
|
|
||||||
{% endblock main %}
|
|
|
@ -1,60 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
import flask
|
|
||||||
from flask import Blueprint
|
|
||||||
from flask.views import View
|
|
||||||
from flask_login import current_user, login_required
|
|
||||||
from sqlalchemy.sql import extract
|
|
||||||
|
|
||||||
from ereuse_devicehub import __version__
|
|
||||||
from ereuse_devicehub.resources.action.models import Snapshot
|
|
||||||
|
|
||||||
billing = Blueprint(
|
|
||||||
"billing", __name__, url_prefix="/billing", template_folder="templates"
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class BillingIndexView(View):
|
|
||||||
methods = ["GET"]
|
|
||||||
decorators = [login_required]
|
|
||||||
template_name = "billing/home.html"
|
|
||||||
|
|
||||||
def dispatch_request(self):
|
|
||||||
# TODO (@slamora): replace hardcoded and get current time
|
|
||||||
# https://dateutil.readthedocs.io/en/stable/_modules/dateutil/tz/tz.html?highlight=now()
|
|
||||||
# datetime.now(tzutc())
|
|
||||||
year = 2022
|
|
||||||
month = 9
|
|
||||||
snapshot_register, snapshot_update = self.count_snapshot(year, month)
|
|
||||||
|
|
||||||
current_month_usage = {
|
|
||||||
"year": year,
|
|
||||||
"month": month,
|
|
||||||
"snapshot_register": snapshot_register,
|
|
||||||
"snapshot_update": snapshot_update,
|
|
||||||
# TODO (@slamora): data erasure count
|
|
||||||
}
|
|
||||||
context = {
|
|
||||||
"current_month_usage": current_month_usage,
|
|
||||||
"page_title": "Billing",
|
|
||||||
"version": __version__,
|
|
||||||
}
|
|
||||||
return flask.render_template(self.template_name, **context)
|
|
||||||
|
|
||||||
def count_snapshot(self, year, month):
|
|
||||||
query = Snapshot.query.filter(
|
|
||||||
Snapshot.author_id == current_user.id,
|
|
||||||
extract('year', Snapshot.created) == year,
|
|
||||||
extract('month', Snapshot.created) == month,
|
|
||||||
)
|
|
||||||
|
|
||||||
all = query.count()
|
|
||||||
register = query.distinct(Snapshot.device_id).count()
|
|
||||||
update = all - register
|
|
||||||
|
|
||||||
return (register, update)
|
|
||||||
|
|
||||||
|
|
||||||
billing.add_url_rule("/", view_func=BillingIndexView.as_view("billing_index"))
|
|
|
@ -2,16 +2,18 @@ import os
|
||||||
|
|
||||||
import click.testing
|
import click.testing
|
||||||
import flask.cli
|
import flask.cli
|
||||||
import ereuse_utils
|
import ereuse_devicehub.ereuse_utils
|
||||||
|
|
||||||
from ereuse_devicehub.config import DevicehubConfig
|
from ereuse_devicehub.config import DevicehubConfig
|
||||||
from ereuse_devicehub.devicehub import Devicehub
|
from ereuse_devicehub.devicehub import Devicehub
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.ps1 = '\001\033[92m\002>>> \001\033[0m\002'
|
sys.ps1 = '\001\033[92m\002>>> \001\033[0m\002'
|
||||||
sys.ps2= '\001\033[94m\002... \001\033[0m\002'
|
sys.ps2 = '\001\033[94m\002... \001\033[0m\002'
|
||||||
|
|
||||||
import os, readline, atexit
|
import os, readline, atexit
|
||||||
|
|
||||||
history_file = os.path.join(os.environ['HOME'], '.python_history')
|
history_file = os.path.join(os.environ['HOME'], '.python_history')
|
||||||
try:
|
try:
|
||||||
readline.read_history_file(history_file)
|
readline.read_history_file(history_file)
|
||||||
|
@ -29,6 +31,7 @@ readline.parse_and_bind('"\e[1;5D": backward-word')
|
||||||
readline.set_history_length(100000)
|
readline.set_history_length(100000)
|
||||||
atexit.register(readline.write_history_file, history_file)
|
atexit.register(readline.write_history_file, history_file)
|
||||||
|
|
||||||
|
|
||||||
class DevicehubGroup(flask.cli.FlaskGroup):
|
class DevicehubGroup(flask.cli.FlaskGroup):
|
||||||
# todo users cannot make cli to use a custom db this way!
|
# todo users cannot make cli to use a custom db this way!
|
||||||
CONFIG = DevicehubConfig
|
CONFIG = DevicehubConfig
|
||||||
|
@ -49,17 +52,25 @@ class DevicehubGroup(flask.cli.FlaskGroup):
|
||||||
def get_version(ctx, param, value):
|
def get_version(ctx, param, value):
|
||||||
if not value or ctx.resilient_parsing:
|
if not value or ctx.resilient_parsing:
|
||||||
return
|
return
|
||||||
click.echo('Devicehub {}'.format(ereuse_utils.version('ereuse-devicehub')), color=ctx.color)
|
click.echo(
|
||||||
|
'Devicehub {}'.format(
|
||||||
|
ereuse_devicehub.ereuse_utils.version('ereuse-devicehub')
|
||||||
|
),
|
||||||
|
color=ctx.color,
|
||||||
|
)
|
||||||
flask.cli.get_version(ctx, param, value)
|
flask.cli.get_version(ctx, param, value)
|
||||||
|
|
||||||
|
|
||||||
@click.option('--version',
|
@click.option(
|
||||||
|
'--version',
|
||||||
help='Devicehub version.',
|
help='Devicehub version.',
|
||||||
expose_value=False,
|
expose_value=False,
|
||||||
callback=get_version,
|
callback=get_version,
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
is_eager=True)
|
is_eager=True,
|
||||||
@click.group(cls=DevicehubGroup,
|
)
|
||||||
|
@click.group(
|
||||||
|
cls=DevicehubGroup,
|
||||||
context_settings=Devicehub.cli_context_settings,
|
context_settings=Devicehub.cli_context_settings,
|
||||||
add_version_option=False,
|
add_version_option=False,
|
||||||
help="""Manages the Devicehub of the inventory {}.
|
help="""Manages the Devicehub of the inventory {}.
|
||||||
|
@ -69,6 +80,9 @@ def get_version(ctx, param, value):
|
||||||
'dh tag add' adds a tag in the db1 database. Operations
|
'dh tag add' adds a tag in the db1 database. Operations
|
||||||
that affect the common database (like creating an user)
|
that affect the common database (like creating an user)
|
||||||
are not affected by this.
|
are not affected by this.
|
||||||
""".format(os.environ.get('dhi')))
|
""".format(
|
||||||
|
os.environ.get('dhi')
|
||||||
|
),
|
||||||
|
)
|
||||||
def cli():
|
def cli():
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
from inspect import isclass
|
from inspect import isclass
|
||||||
from typing import Dict, Iterable, Type, Union
|
from typing import Dict, Iterable, Type, Union
|
||||||
|
|
||||||
from ereuse_utils.test import JSON, Res
|
from ereuse_devicehub.ereuse_utils.test import JSON, Res
|
||||||
from flask.testing import FlaskClient
|
from flask.testing import FlaskClient
|
||||||
from flask_wtf.csrf import generate_csrf
|
from flask_wtf.csrf import generate_csrf
|
||||||
from teal.client import Client as TealClient
|
|
||||||
from teal.client import Query, Status
|
|
||||||
from werkzeug.exceptions import HTTPException
|
from werkzeug.exceptions import HTTPException
|
||||||
|
|
||||||
from ereuse_devicehub.resources import models, schemas
|
from ereuse_devicehub.resources import models, schemas
|
||||||
|
from ereuse_devicehub.teal.client import Client as TealClient
|
||||||
|
from ereuse_devicehub.teal.client import Query, Status
|
||||||
|
|
||||||
ResourceLike = Union[Type[Union[models.Thing, schemas.Thing]], str]
|
ResourceLike = Union[Type[Union[models.Thing, schemas.Thing]], str]
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
import click
|
||||||
|
|
||||||
|
from ereuse_devicehub.db import db
|
||||||
|
from ereuse_devicehub.resources.agent.models import Person
|
||||||
|
from ereuse_devicehub.resources.user.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class AddUser:
|
||||||
|
def __init__(self, app) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.app = app
|
||||||
|
self.schema = app.config.get('DB_SCHEMA')
|
||||||
|
self.app.cli.command('adduser', short_help='add a user.')(self.run)
|
||||||
|
|
||||||
|
@click.argument('email')
|
||||||
|
@click.argument('password')
|
||||||
|
def run(self, email, password):
|
||||||
|
name = email.split('@')[0]
|
||||||
|
|
||||||
|
user = User(email=email, password=password)
|
||||||
|
user.individuals.add(Person(name=name))
|
||||||
|
db.session.add(user)
|
||||||
|
|
||||||
|
db.session.commit()
|
|
@ -0,0 +1,125 @@
|
||||||
|
"""This command is used for up one snapshot."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from ereuse_devicehub.resources.action.models import Snapshot
|
||||||
|
from ereuse_devicehub.resources.user.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class CheckInstall:
|
||||||
|
"""Command.
|
||||||
|
|
||||||
|
This command check if the installation was ok and the
|
||||||
|
integration with the api of DLT was ok too.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, app) -> None:
|
||||||
|
"""Init function."""
|
||||||
|
super().__init__()
|
||||||
|
self.app = app
|
||||||
|
self.schema = app.config.get('DB_SCHEMA')
|
||||||
|
self.app.cli.command('check_install', short_help='Upload snapshots.')(self.run)
|
||||||
|
|
||||||
|
@click.argument('email')
|
||||||
|
@click.argument('password')
|
||||||
|
def run(self, email, password):
|
||||||
|
"""Run command."""
|
||||||
|
|
||||||
|
self.email = email
|
||||||
|
self.password = password
|
||||||
|
self.OKGREEN = '\033[92m'
|
||||||
|
# self.WARNING = '\033[93m'
|
||||||
|
self.FAIL = '\033[91m'
|
||||||
|
self.ENDC = '\033[0m'
|
||||||
|
print("\n")
|
||||||
|
try:
|
||||||
|
self.check_user()
|
||||||
|
self.check_snapshot()
|
||||||
|
except Exception:
|
||||||
|
txt = "There was an Error in the installation!"
|
||||||
|
print("\n" + self.FAIL + txt + self.ENDC)
|
||||||
|
return
|
||||||
|
|
||||||
|
txt = "The installation is OK!"
|
||||||
|
print("\n" + self.OKGREEN + txt + self.ENDC)
|
||||||
|
|
||||||
|
def check_user(self):
|
||||||
|
"""Get datamodel of user."""
|
||||||
|
self.user = User.query.filter_by(email=self.email).first()
|
||||||
|
|
||||||
|
txt = "Register user to the DLT "
|
||||||
|
try:
|
||||||
|
assert self.user.api_keys_dlt is not None
|
||||||
|
token_dlt = self.user.get_dlt_keys(self.password)
|
||||||
|
assert token_dlt.get('data', {}).get('eth_pub_key') is not None
|
||||||
|
except Exception:
|
||||||
|
self.print_fail(txt)
|
||||||
|
raise (txt)
|
||||||
|
|
||||||
|
self.print_ok(txt)
|
||||||
|
|
||||||
|
api_token = token_dlt.get('data', {}).get('api_token')
|
||||||
|
|
||||||
|
txt = "Register user roles in the DLT "
|
||||||
|
try:
|
||||||
|
rols = self.user.get_rols(api_token)
|
||||||
|
assert self.user.rols_dlt is not None
|
||||||
|
assert self.user.rols_dlt != []
|
||||||
|
assert self.user.rols_dlt == json.dumps([x for x, y in rols])
|
||||||
|
except Exception:
|
||||||
|
self.print_fail(txt)
|
||||||
|
raise (txt)
|
||||||
|
|
||||||
|
self.print_ok(txt)
|
||||||
|
|
||||||
|
def check_snapshot(self):
|
||||||
|
self.snapshot = Snapshot.query.filter_by(author=self.user).first()
|
||||||
|
if not self.snapshot:
|
||||||
|
txt = "Impossible register snapshot "
|
||||||
|
self.print_fail(txt)
|
||||||
|
raise (txt)
|
||||||
|
|
||||||
|
self.device = self.snapshot.device
|
||||||
|
|
||||||
|
txt = "Generate DPP "
|
||||||
|
try:
|
||||||
|
assert self.device.chid is not None
|
||||||
|
assert self.snapshot.json_wb is not None
|
||||||
|
assert self.snapshot.phid_dpp is not None
|
||||||
|
except Exception:
|
||||||
|
self.print_fail(txt)
|
||||||
|
raise (txt)
|
||||||
|
|
||||||
|
self.print_ok(txt)
|
||||||
|
|
||||||
|
txt = "Register DPP in the DLT "
|
||||||
|
try:
|
||||||
|
assert len(self.device.dpps) > 0
|
||||||
|
dpp = self.device.dpps[0]
|
||||||
|
assert type(dpp.timestamp) == int
|
||||||
|
assert dpp in self.snapshot.dpp
|
||||||
|
assert dpp.documentId == str(self.snapshot.uuid)
|
||||||
|
# if 'Device already exists' in DLT before
|
||||||
|
# device.proofs == 0
|
||||||
|
# Snapshot.proof == 1 [erase]
|
||||||
|
|
||||||
|
# if Device is new in DLT before
|
||||||
|
# device.proofs == 1
|
||||||
|
# Snapshot.proof == 1 or 2 [Register, erase]
|
||||||
|
|
||||||
|
assert len(self.device.proofs) in [0, 1]
|
||||||
|
assert len(self.snapshot.proofs) in [0, 1, 2]
|
||||||
|
except Exception:
|
||||||
|
self.print_fail(txt)
|
||||||
|
raise (txt)
|
||||||
|
|
||||||
|
self.print_ok(txt)
|
||||||
|
|
||||||
|
def print_ok(self, msg):
|
||||||
|
print(msg + self.OKGREEN + " OK!" + self.ENDC)
|
||||||
|
|
||||||
|
def print_fail(self, msg):
|
||||||
|
print(msg + self.FAIL + " FAIL!" + self.ENDC)
|
|
@ -0,0 +1,41 @@
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from boltons.urlutils import URL
|
||||||
|
from decouple import config
|
||||||
|
|
||||||
|
from ereuse_devicehub.db import db
|
||||||
|
from ereuse_devicehub.resources.agent.models import Person
|
||||||
|
from ereuse_devicehub.resources.inventory.model import Inventory
|
||||||
|
from ereuse_devicehub.resources.user.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class InitDatas:
|
||||||
|
def __init__(self, app) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.app = app
|
||||||
|
self.schema = app.config.get('DB_SCHEMA')
|
||||||
|
self.email = config('EMAIL_DEMO')
|
||||||
|
self.name = self.email.split('@')[0] if self.email else None
|
||||||
|
self.password = config('PASSWORD_DEMO')
|
||||||
|
self.app.cli.command(
|
||||||
|
'initdata', short_help='Save a minimum structure of datas.'
|
||||||
|
)(self.run)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
inv = Inventory(
|
||||||
|
id=self.schema,
|
||||||
|
name="usody",
|
||||||
|
tag_provider=URL('http://localhost:8081'),
|
||||||
|
tag_token=uuid4(),
|
||||||
|
org_id=uuid4(),
|
||||||
|
)
|
||||||
|
|
||||||
|
db.session.add(inv)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
if self.email:
|
||||||
|
user = User(email=self.email, password=self.password)
|
||||||
|
user.individuals.add(Person(name=self.name))
|
||||||
|
db.session.add(user)
|
||||||
|
|
||||||
|
db.session.commit()
|
|
@ -1,96 +0,0 @@
|
||||||
import csv
|
|
||||||
|
|
||||||
# import click_spinner
|
|
||||||
# import ereuse_utils.cli
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
from ereuse_devicehub.resources.action import models as evs
|
|
||||||
from ereuse_devicehub.resources.device.models import Placeholder
|
|
||||||
from ereuse_devicehub.resources.documents.device_row import InternalStatsRow
|
|
||||||
|
|
||||||
# import click
|
|
||||||
|
|
||||||
|
|
||||||
class Report:
|
|
||||||
def __init__(self, app) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self.app = app
|
|
||||||
short_help = 'Creates reports devices and users.'
|
|
||||||
self.app.cli.command('report', short_help=short_help)(self.run)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
stats = InternalStatsView()
|
|
||||||
stats.print()
|
|
||||||
|
|
||||||
|
|
||||||
class InternalStatsView:
|
|
||||||
def print(self):
|
|
||||||
query = evs.Action.query.filter(
|
|
||||||
evs.Action.type.in_(
|
|
||||||
(
|
|
||||||
'Snapshot',
|
|
||||||
'Live',
|
|
||||||
'Allocate',
|
|
||||||
'Deallocate',
|
|
||||||
'EraseBasic',
|
|
||||||
'EraseSectors',
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return self.generate_post_csv(query)
|
|
||||||
|
|
||||||
def generate_post_csv(self, query):
|
|
||||||
data = StringIO()
|
|
||||||
cw = csv.writer(data, delimiter=';', lineterminator="\n", quotechar='"')
|
|
||||||
cw.writerow(InternalStatsRow('', "2000-1", [], []).keys())
|
|
||||||
|
|
||||||
for row in self.get_rows(query):
|
|
||||||
cw.writerow(row)
|
|
||||||
|
|
||||||
return print(data.getvalue())
|
|
||||||
|
|
||||||
def get_rows(self, query):
|
|
||||||
d = {}
|
|
||||||
dd = {}
|
|
||||||
disks = []
|
|
||||||
for ac in query:
|
|
||||||
create = '{}-{}'.format(ac.created.year, ac.created.month)
|
|
||||||
user = ac.author.email
|
|
||||||
|
|
||||||
if user not in d:
|
|
||||||
d[user] = {}
|
|
||||||
dd[user] = {}
|
|
||||||
if create not in d[user]:
|
|
||||||
d[user][create] = []
|
|
||||||
dd[user][create] = None
|
|
||||||
d[user][create].append(ac)
|
|
||||||
|
|
||||||
for user, createds in d.items():
|
|
||||||
for create, actions in createds.items():
|
|
||||||
r = InternalStatsRow(user, create, actions, disks)
|
|
||||||
dd[user][create] = r
|
|
||||||
|
|
||||||
return self.get_placeholders(dd)
|
|
||||||
|
|
||||||
def get_placeholders(self, dd):
|
|
||||||
|
|
||||||
for p in Placeholder.query.all():
|
|
||||||
create = '{}-{}'.format(p.created.year, p.created.month)
|
|
||||||
user = p.owner.email
|
|
||||||
|
|
||||||
if user not in dd:
|
|
||||||
dd[user] = {}
|
|
||||||
|
|
||||||
if create not in dd[user]:
|
|
||||||
dd[user][create] = None
|
|
||||||
|
|
||||||
if not dd[user][create]:
|
|
||||||
dd[user][create] = InternalStatsRow(user, create, [], [])
|
|
||||||
|
|
||||||
dd[user][create]['Placeholders'] += 1
|
|
||||||
|
|
||||||
rows = []
|
|
||||||
for user, createds in dd.items():
|
|
||||||
for create, row in createds.items():
|
|
||||||
rows.append(row.values())
|
|
||||||
return rows
|
|
|
@ -0,0 +1,103 @@
|
||||||
|
"""This command is used for up one snapshot."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
# from uuid import uuid4
|
||||||
|
from io import BytesIO
|
||||||
|
from os import listdir
|
||||||
|
from os import remove as remove_file
|
||||||
|
from os.path import isfile, join
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
from flask.testing import FlaskClient
|
||||||
|
from flask_wtf.csrf import generate_csrf
|
||||||
|
|
||||||
|
from ereuse_devicehub.resources.user.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class UploadSnapshots:
|
||||||
|
"""Command.
|
||||||
|
|
||||||
|
This command allow upload all snapshots than exist
|
||||||
|
in the directory snapshots_upload.
|
||||||
|
If this snapshot exist replace it.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, app) -> None:
|
||||||
|
"""Init function."""
|
||||||
|
super().__init__()
|
||||||
|
self.app = app
|
||||||
|
self.schema = app.config.get('DB_SCHEMA')
|
||||||
|
self.app.cli.command('snapshot', short_help='Upload snapshots.')(self.run)
|
||||||
|
|
||||||
|
@click.argument('email')
|
||||||
|
@click.argument('password')
|
||||||
|
def run(self, email, password=None):
|
||||||
|
"""Run command."""
|
||||||
|
self.email = email
|
||||||
|
self.password = password
|
||||||
|
self.json_wb = None
|
||||||
|
self.onlyfiles = []
|
||||||
|
|
||||||
|
self.get_user()
|
||||||
|
self.get_files()
|
||||||
|
for f in self.onlyfiles:
|
||||||
|
self.file_snapshot = f
|
||||||
|
self.open_snapshot()
|
||||||
|
self.build_snapshot()
|
||||||
|
self.remove_files()
|
||||||
|
|
||||||
|
def get_user(self):
|
||||||
|
"""Get datamodel of user."""
|
||||||
|
self.user = User.query.filter_by(email=self.email).one()
|
||||||
|
self.client = FlaskClient(self.app, use_cookies=True)
|
||||||
|
self.client.get('/login/')
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'email': self.email,
|
||||||
|
'password': self.password,
|
||||||
|
'remember': False,
|
||||||
|
'csrf_token': generate_csrf(),
|
||||||
|
}
|
||||||
|
self.client.post('/login/', data=data, follow_redirects=True)
|
||||||
|
|
||||||
|
def remove_files(self):
|
||||||
|
"""Open snapshot file."""
|
||||||
|
for f in self.onlyfiles:
|
||||||
|
remove_file(Path(__file__).parent.joinpath('snapshot_files').joinpath(f))
|
||||||
|
|
||||||
|
def open_snapshot(self):
|
||||||
|
"""Open snapshot file."""
|
||||||
|
with Path(__file__).parent.joinpath('snapshot_files').joinpath(
|
||||||
|
self.file_snapshot,
|
||||||
|
).open() as file_snapshot:
|
||||||
|
self.json_wb = json.loads(file_snapshot.read())
|
||||||
|
b_snapshot = bytes(json.dumps(self.json_wb), 'utf-8')
|
||||||
|
self.file_snap = (BytesIO(b_snapshot), self.file_snapshot)
|
||||||
|
|
||||||
|
def build_snapshot(self):
|
||||||
|
"""Build the devices of snapshot."""
|
||||||
|
uri = '/inventory/upload-snapshot/'
|
||||||
|
|
||||||
|
if not self.json_wb:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.client.get(uri)
|
||||||
|
data = {
|
||||||
|
'snapshot': self.file_snap,
|
||||||
|
'csrf_token': generate_csrf(),
|
||||||
|
}
|
||||||
|
|
||||||
|
self.client.post(uri, data=data, content_type="multipart/form-data")
|
||||||
|
|
||||||
|
def get_files(self):
|
||||||
|
"""Read snaoshot_files dir."""
|
||||||
|
mypath = Path(__file__).parent.joinpath('snapshot_files')
|
||||||
|
for f in listdir(mypath):
|
||||||
|
if not isfile(join(mypath, f)):
|
||||||
|
continue
|
||||||
|
if not f[-5:] == ".json":
|
||||||
|
continue
|
||||||
|
self.onlyfiles.append(f)
|
|
@ -1,12 +1,7 @@
|
||||||
from distutils.version import StrictVersion
|
from distutils.version import StrictVersion
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from typing import Set
|
|
||||||
|
|
||||||
from decouple import config
|
from decouple import config
|
||||||
from teal.auth import TokenAuth
|
|
||||||
from teal.config import Config
|
|
||||||
from teal.enums import Currency
|
|
||||||
from teal.utils import import_resource
|
|
||||||
|
|
||||||
from ereuse_devicehub.resources import (
|
from ereuse_devicehub.resources import (
|
||||||
action,
|
action,
|
||||||
|
@ -18,12 +13,17 @@ from ereuse_devicehub.resources import (
|
||||||
user,
|
user,
|
||||||
)
|
)
|
||||||
from ereuse_devicehub.resources.device import definitions
|
from ereuse_devicehub.resources.device import definitions
|
||||||
|
from ereuse_devicehub.resources.did import did
|
||||||
from ereuse_devicehub.resources.documents import documents
|
from ereuse_devicehub.resources.documents import documents
|
||||||
from ereuse_devicehub.resources.enums import PriceSoftware
|
from ereuse_devicehub.resources.enums import PriceSoftware
|
||||||
from ereuse_devicehub.resources.licences import licences
|
from ereuse_devicehub.resources.licences import licences
|
||||||
from ereuse_devicehub.resources.metric import definitions as metric_def
|
from ereuse_devicehub.resources.metric import definitions as metric_def
|
||||||
from ereuse_devicehub.resources.tradedocument import definitions as tradedocument
|
from ereuse_devicehub.resources.tradedocument import definitions as tradedocument
|
||||||
from ereuse_devicehub.resources.versions import versions
|
from ereuse_devicehub.resources.versions import versions
|
||||||
|
from ereuse_devicehub.teal.auth import TokenAuth
|
||||||
|
from ereuse_devicehub.teal.config import Config
|
||||||
|
from ereuse_devicehub.teal.enums import Currency
|
||||||
|
from ereuse_devicehub.teal.utils import import_resource
|
||||||
|
|
||||||
|
|
||||||
class DevicehubConfig(Config):
|
class DevicehubConfig(Config):
|
||||||
|
@ -33,6 +33,7 @@ class DevicehubConfig(Config):
|
||||||
import_resource(action),
|
import_resource(action),
|
||||||
import_resource(user),
|
import_resource(user),
|
||||||
import_resource(tag),
|
import_resource(tag),
|
||||||
|
import_resource(did),
|
||||||
import_resource(agent),
|
import_resource(agent),
|
||||||
import_resource(lot),
|
import_resource(lot),
|
||||||
import_resource(deliverynote),
|
import_resource(deliverynote),
|
||||||
|
@ -44,21 +45,30 @@ class DevicehubConfig(Config):
|
||||||
import_resource(metric_def),
|
import_resource(metric_def),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
PASSWORD_SCHEMES = {'pbkdf2_sha256'} # type: Set[str]
|
PASSWORD_SCHEMES = {'pbkdf2_sha256'}
|
||||||
SECRET_KEY = config('SECRET_KEY')
|
SECRET_KEY = config('SECRET_KEY')
|
||||||
DB_USER = config('DB_USER', 'dhub')
|
DB_USER = config('DB_USER', 'dhub')
|
||||||
DB_PASSWORD = config('DB_PASSWORD', 'ereuse')
|
DB_PASSWORD = config('DB_PASSWORD', 'ereuse')
|
||||||
DB_HOST = config('DB_HOST', 'localhost')
|
DB_HOST = config('DB_HOST', 'localhost')
|
||||||
DB_DATABASE = config('DB_DATABASE', 'devicehub')
|
DB_DATABASE = config('DB_DATABASE', 'devicehub')
|
||||||
DB_SCHEMA = config('DB_SCHEMA', 'dbtest')
|
DB_SCHEMA = config('DB_SCHEMA', 'dbtest')
|
||||||
|
|
||||||
SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{pw}@{host}/{db}'.format(
|
SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{pw}@{host}/{db}'.format(
|
||||||
user=DB_USER,
|
user=DB_USER,
|
||||||
pw=DB_PASSWORD,
|
pw=DB_PASSWORD,
|
||||||
host=DB_HOST,
|
host=DB_HOST,
|
||||||
db=DB_DATABASE,
|
db=DB_DATABASE,
|
||||||
) # type: str
|
) # type: str
|
||||||
|
|
||||||
|
SQLALCHEMY_POOL_SIZE = int(config("SQLALCHEMY_POOL_SIZE", 10))
|
||||||
|
SQLALCHEMY_MAX_OVERFLOW = int(config("SQLALCHEMY_MAX_OVERFLOW", 20))
|
||||||
|
SQLALCHEMY_TRACK_MODIFICATIONS = bool(config("SQLALCHEMY_TRACK_MODIFICATIONS", False))
|
||||||
|
SQLALCHEMY_POOL_TIMEOUT = int(config("SQLALCHEMY_POOL_TIMEOUT", 0))
|
||||||
|
SQLALCHEMY_POOL_RECYCLE = int(config("SQLALCHEMY_POOL_RECYCLE", 3600))
|
||||||
|
|
||||||
SCHEMA = config('SCHEMA', 'dbtest')
|
SCHEMA = config('SCHEMA', 'dbtest')
|
||||||
HOST = config('HOST', 'localhost')
|
HOST = config('HOST', 'localhost')
|
||||||
|
API_HOST = config('API_HOST', 'localhost')
|
||||||
MIN_WORKBENCH = StrictVersion('11.0a1') # type: StrictVersion
|
MIN_WORKBENCH = StrictVersion('11.0a1') # type: StrictVersion
|
||||||
"""The minimum version of ereuse.org workbench that this devicehub
|
"""The minimum version of ereuse.org workbench that this devicehub
|
||||||
accepts. we recommend not changing this value.
|
accepts. we recommend not changing this value.
|
||||||
|
@ -86,3 +96,29 @@ class DevicehubConfig(Config):
|
||||||
"""Definition of path where save the documents of customers"""
|
"""Definition of path where save the documents of customers"""
|
||||||
PATH_DOCUMENTS_STORAGE = config('PATH_DOCUMENTS_STORAGE', '/tmp/')
|
PATH_DOCUMENTS_STORAGE = config('PATH_DOCUMENTS_STORAGE', '/tmp/')
|
||||||
JWT_PASS = config('JWT_PASS', '')
|
JWT_PASS = config('JWT_PASS', '')
|
||||||
|
|
||||||
|
MAIL_SERVER = config('MAIL_SERVER', '')
|
||||||
|
MAIL_USERNAME = config('MAIL_USERNAME', '')
|
||||||
|
MAIL_PASSWORD = config('MAIL_PASSWORD', '')
|
||||||
|
MAIL_PORT = config('MAIL_PORT', 587)
|
||||||
|
MAIL_USE_TLS = config('MAIL_USE_TLS', True)
|
||||||
|
MAIL_DEFAULT_SENDER = config('MAIL_DEFAULT_SENDER', '')
|
||||||
|
API_DLT = config('API_DLT', None)
|
||||||
|
API_DLT_TOKEN = config('API_DLT_TOKEN', None)
|
||||||
|
ID_FEDERATED = config('ID_FEDERATED', None)
|
||||||
|
URL_MANUALS = config('URL_MANUALS', None)
|
||||||
|
ABAC_TOKEN = config('ABAC_TOKEN', None)
|
||||||
|
ABAC_COOKIE = config('ABAC_COOKIE', None)
|
||||||
|
ABAC_URL = config('ABAC_URL', None)
|
||||||
|
VERIFY_URL = config('VERIFY_URL', None)
|
||||||
|
|
||||||
|
"""Definition of oauth jwt details."""
|
||||||
|
OAUTH2_JWT_ENABLED = config('OAUTH2_JWT_ENABLED', False)
|
||||||
|
OAUTH2_JWT_ISS = config('OAUTH2_JWT_ISS', '')
|
||||||
|
OAUTH2_JWT_KEY = config('OAUTH2_JWT_KEY', None)
|
||||||
|
OAUTH2_JWT_ALG = config('OAUTH2_JWT_ALG', 'HS256')
|
||||||
|
|
||||||
|
if API_DLT:
|
||||||
|
API_DLT = API_DLT.strip("/")
|
||||||
|
WALLET_INX_EBSI_PLUGIN_TOKEN = config('WALLET_INX_EBSI_PLUGIN_TOKEN', None)
|
||||||
|
WALLET_INX_EBSI_PLUGIN_URL = config('WALLET_INX_EBSI_PLUGIN_URL', None)
|
||||||
|
|
|
@ -4,7 +4,8 @@ from sqlalchemy.dialects import postgresql
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
from sqlalchemy.sql import expression
|
from sqlalchemy.sql import expression
|
||||||
from sqlalchemy_utils import view
|
from sqlalchemy_utils import view
|
||||||
from teal.db import SchemaSQLAlchemy, SchemaSession
|
|
||||||
|
from ereuse_devicehub.teal.db import SchemaSession, SchemaSQLAlchemy
|
||||||
|
|
||||||
|
|
||||||
class DhSession(SchemaSession):
|
class DhSession(SchemaSession):
|
||||||
|
@ -23,6 +24,7 @@ class DhSession(SchemaSession):
|
||||||
# flush, all the new / dirty interesting things in a variable
|
# flush, all the new / dirty interesting things in a variable
|
||||||
# until DeviceSearch is executed
|
# until DeviceSearch is executed
|
||||||
from ereuse_devicehub.resources.device.search import DeviceSearch
|
from ereuse_devicehub.resources.device.search import DeviceSearch
|
||||||
|
|
||||||
DeviceSearch.update_modified_devices(session=self)
|
DeviceSearch.update_modified_devices(session=self)
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,6 +33,7 @@ class SQLAlchemy(SchemaSQLAlchemy):
|
||||||
schema of the database, as it is in the `search_path`
|
schema of the database, as it is in the `search_path`
|
||||||
defined in teal.
|
defined in teal.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# todo add here all types of columns used so we don't have to
|
# todo add here all types of columns used so we don't have to
|
||||||
# manually import them all the time
|
# manually import them all the time
|
||||||
UUID = postgresql.UUID
|
UUID = postgresql.UUID
|
||||||
|
@ -60,11 +63,15 @@ def create_view(name, selectable):
|
||||||
# We need to ensure views are created / destroyed before / after
|
# We need to ensure views are created / destroyed before / after
|
||||||
# SchemaSQLAlchemy's listeners execute
|
# SchemaSQLAlchemy's listeners execute
|
||||||
# That is why insert=True in 'after_create'
|
# That is why insert=True in 'after_create'
|
||||||
event.listen(db.metadata, 'after_create', view.CreateView(name, selectable), insert=True)
|
event.listen(
|
||||||
|
db.metadata, 'after_create', view.CreateView(name, selectable), insert=True
|
||||||
|
)
|
||||||
event.listen(db.metadata, 'before_drop', view.DropView(name))
|
event.listen(db.metadata, 'before_drop', view.DropView(name))
|
||||||
return table
|
return table
|
||||||
|
|
||||||
|
|
||||||
db = SQLAlchemy(session_options={'autoflush': False})
|
db = SQLAlchemy(
|
||||||
|
session_options={'autoflush': False},
|
||||||
|
)
|
||||||
f = db.func
|
f = db.func
|
||||||
exp = expression
|
exp = expression
|
||||||
|
|
|
@ -5,31 +5,66 @@ from typing import Type
|
||||||
import boltons.urlutils
|
import boltons.urlutils
|
||||||
import click
|
import click
|
||||||
import click_spinner
|
import click_spinner
|
||||||
import ereuse_utils.cli
|
|
||||||
from ereuse_utils.session import DevicehubClient
|
|
||||||
from flask import _app_ctx_stack, g
|
from flask import _app_ctx_stack, g
|
||||||
from flask_login import LoginManager, current_user
|
from flask_login import LoginManager, current_user
|
||||||
from flask_sqlalchemy import SQLAlchemy
|
from flask_sqlalchemy import SQLAlchemy
|
||||||
from teal.db import ResourceNotFound, SchemaSQLAlchemy
|
|
||||||
from teal.teal import Teal
|
|
||||||
|
|
||||||
|
import ereuse_devicehub.ereuse_utils.cli
|
||||||
from ereuse_devicehub.auth import Auth
|
from ereuse_devicehub.auth import Auth
|
||||||
from ereuse_devicehub.client import Client, UserClient
|
from ereuse_devicehub.client import Client, UserClient
|
||||||
from ereuse_devicehub.commands.reports import Report
|
from ereuse_devicehub.commands.adduser import AddUser
|
||||||
|
from ereuse_devicehub.commands.check_install import CheckInstall
|
||||||
|
from ereuse_devicehub.commands.initdatas import InitDatas
|
||||||
|
from ereuse_devicehub.commands.snapshots import UploadSnapshots
|
||||||
|
|
||||||
|
# from ereuse_devicehub.commands.reports import Report
|
||||||
from ereuse_devicehub.commands.users import GetToken
|
from ereuse_devicehub.commands.users import GetToken
|
||||||
from ereuse_devicehub.config import DevicehubConfig
|
from ereuse_devicehub.config import DevicehubConfig
|
||||||
from ereuse_devicehub.db import db
|
from ereuse_devicehub.db import db
|
||||||
from ereuse_devicehub.dummy.dummy import Dummy
|
from ereuse_devicehub.dummy.dummy import Dummy
|
||||||
|
from ereuse_devicehub.ereuse_utils.session import DevicehubClient
|
||||||
from ereuse_devicehub.resources.device.search import DeviceSearch
|
from ereuse_devicehub.resources.device.search import DeviceSearch
|
||||||
from ereuse_devicehub.resources.inventory import Inventory, InventoryDef
|
from ereuse_devicehub.resources.inventory import Inventory, InventoryDef
|
||||||
from ereuse_devicehub.resources.user.models import User
|
from ereuse_devicehub.resources.user.models import User
|
||||||
|
from ereuse_devicehub.teal.db import ResourceNotFound, SchemaSQLAlchemy
|
||||||
|
from ereuse_devicehub.teal.teal import Teal
|
||||||
from ereuse_devicehub.templating import Environment
|
from ereuse_devicehub.templating import Environment
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ereuse_devicehub.modules.oidc.commands.sync_dlt import GetMembers
|
||||||
|
except Exception:
|
||||||
|
GetMembers = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ereuse_devicehub.modules.dpp.commands.register_user_dlt import RegisterUserDlt
|
||||||
|
except Exception:
|
||||||
|
RegisterUserDlt = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ereuse_devicehub.modules.oidc.commands.add_member import AddMember
|
||||||
|
except Exception:
|
||||||
|
AddMember = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ereuse_devicehub.modules.oidc.commands.client_member import AddClientOidc
|
||||||
|
except Exception:
|
||||||
|
AddClientOidc = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ereuse_devicehub.modules.oidc.commands.insert_member_in_dlt import InsertMember
|
||||||
|
except Exception:
|
||||||
|
InsertMembe = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ereuse_devicehub.modules.oidc.commands.add_contract_oidc import AddContractOidc
|
||||||
|
except Exception:
|
||||||
|
AddContractOidc = None
|
||||||
|
|
||||||
|
|
||||||
class Devicehub(Teal):
|
class Devicehub(Teal):
|
||||||
test_client_class = Client
|
test_client_class = Client
|
||||||
Dummy = Dummy
|
Dummy = Dummy
|
||||||
Report = Report
|
# Report = Report
|
||||||
jinja_environment = Environment
|
jinja_environment = Environment
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -70,8 +105,26 @@ class Devicehub(Teal):
|
||||||
self.id = inventory
|
self.id = inventory
|
||||||
"""The Inventory ID of this instance. In Teal is the app.schema."""
|
"""The Inventory ID of this instance. In Teal is the app.schema."""
|
||||||
self.dummy = Dummy(self)
|
self.dummy = Dummy(self)
|
||||||
self.report = Report(self)
|
# self.report = Report(self)
|
||||||
self.get_token = GetToken(self)
|
self.get_token = GetToken(self)
|
||||||
|
self.initdata = InitDatas(self)
|
||||||
|
self.adduser = AddUser(self)
|
||||||
|
self.uploadsnapshots = UploadSnapshots(self)
|
||||||
|
self.checkinstall = CheckInstall(self)
|
||||||
|
|
||||||
|
if GetMembers:
|
||||||
|
self.get_members = GetMembers(self)
|
||||||
|
if RegisterUserDlt:
|
||||||
|
self.dlt_register_user = RegisterUserDlt(self)
|
||||||
|
if AddMember:
|
||||||
|
self.dlt_insert_members = AddMember(self)
|
||||||
|
if AddClientOidc:
|
||||||
|
self.add_client_oidc = AddClientOidc(self)
|
||||||
|
if InsertMember:
|
||||||
|
self.dlt_insert_members = InsertMember(self)
|
||||||
|
|
||||||
|
if AddContractOidc:
|
||||||
|
self.add_contract_oidc = AddContractOidc(self)
|
||||||
|
|
||||||
@self.cli.group(
|
@self.cli.group(
|
||||||
short_help='Inventory management.',
|
short_help='Inventory management.',
|
||||||
|
@ -121,7 +174,7 @@ class Devicehub(Teal):
|
||||||
@click.option(
|
@click.option(
|
||||||
'--tag-url',
|
'--tag-url',
|
||||||
'-tu',
|
'-tu',
|
||||||
type=ereuse_utils.cli.URL(scheme=True, host=True, path=False),
|
type=ereuse_devicehub.ereuse_utils.cli.URL(scheme=True, host=True, path=False),
|
||||||
default='http://example.com',
|
default='http://example.com',
|
||||||
help='The base url (scheme and host) of the tag provider.',
|
help='The base url (scheme and host) of the tag provider.',
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
import uuid
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Set
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import click_spinner
|
import click_spinner
|
||||||
import ereuse_utils.cli
|
|
||||||
import jwt
|
import jwt
|
||||||
import yaml
|
import yaml
|
||||||
from ereuse_utils.test import ANY
|
from ereuse_devicehub.ereuse_utils.test import ANY
|
||||||
|
from ereuse_devicehub import ereuse_utils
|
||||||
|
|
||||||
from ereuse_devicehub.client import UserClient
|
from ereuse_devicehub.client import UserClient
|
||||||
from ereuse_devicehub.db import db
|
from ereuse_devicehub.db import db
|
||||||
|
@ -109,11 +109,13 @@ class Dummy:
|
||||||
files = tuple(Path(__file__).parent.joinpath('files').iterdir())
|
files = tuple(Path(__file__).parent.joinpath('files').iterdir())
|
||||||
print('done.')
|
print('done.')
|
||||||
sample_pc = None # We treat this one as a special sample for demonstrations
|
sample_pc = None # We treat this one as a special sample for demonstrations
|
||||||
pcs = set() # type: Set[int]
|
pcs = set()
|
||||||
with click.progressbar(files, label='Creating devices...'.ljust(28)) as bar:
|
with click.progressbar(files, label='Creating devices...'.ljust(28)) as bar:
|
||||||
for path in bar:
|
for path in bar:
|
||||||
with path.open() as f:
|
with path.open() as f:
|
||||||
snapshot = yaml.load(f)
|
snapshot = yaml.load(f)
|
||||||
|
if snapshot['device']['type'] in ['Desktop', 'Laptop']:
|
||||||
|
snapshot['device']['system_uuid'] = uuid.uuid4()
|
||||||
s, _ = user1.post(res=m.Snapshot, data=self.json_encode(snapshot))
|
s, _ = user1.post(res=m.Snapshot, data=self.json_encode(snapshot))
|
||||||
if s.get('uuid', None) == 'ec23c11b-80b6-42cd-ac5c-73ba7acddbc4':
|
if s.get('uuid', None) == 'ec23c11b-80b6-42cd-ac5c-73ba7acddbc4':
|
||||||
sample_pc = s['device']['id']
|
sample_pc = s['device']['id']
|
||||||
|
@ -228,7 +230,7 @@ class Dummy:
|
||||||
user1.get(res=Device, item=sample_pc_devicehub_id) # Test
|
user1.get(res=Device, item=sample_pc_devicehub_id) # Test
|
||||||
anonymous = self.app.test_client()
|
anonymous = self.app.test_client()
|
||||||
html, _ = anonymous.get(res=Device, item=sample_pc_devicehub_id, accept=ANY)
|
html, _ = anonymous.get(res=Device, item=sample_pc_devicehub_id, accept=ANY)
|
||||||
assert 'intel core2 duo cpu' in html
|
assert 'hewlett-packard' in html
|
||||||
|
|
||||||
# For netbook: to preapre -> torepair -> to dispose -> disposed
|
# For netbook: to preapre -> torepair -> to dispose -> disposed
|
||||||
print('⭐ Done.')
|
print('⭐ Done.')
|
||||||
|
|
|
@ -178,6 +178,7 @@
|
||||||
],
|
],
|
||||||
"type": "Laptop"
|
"type": "Laptop"
|
||||||
},
|
},
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "79c5098f-bc44-4834-8a59-9ea61d956c31"}}},
|
||||||
"elapsed": 14725,
|
"elapsed": 14725,
|
||||||
"endTime": "2018-11-24T18:06:37.611704+00:00",
|
"endTime": "2018-11-24T18:06:37.611704+00:00",
|
||||||
"software": "Workbench",
|
"software": "Workbench",
|
||||||
|
|
|
@ -119,6 +119,7 @@
|
||||||
"manufacturer": "ASUSTeK Computer INC."
|
"manufacturer": "ASUSTeK Computer INC."
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "645f00bf-1ec0-4fdb-9608-b5ac73e285f6"}}},
|
||||||
"version": "11.0a4",
|
"version": "11.0a4",
|
||||||
"elapsed": 6,
|
"elapsed": 6,
|
||||||
"endTime": "2016-11-03T17:17:17.266543+00:00"
|
"endTime": "2016-11-03T17:17:17.266543+00:00"
|
||||||
|
|
|
@ -148,6 +148,7 @@
|
||||||
"model": "0UG982"
|
"model": "0UG982"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "5dcdd380-5a54-48bc-99bf-aff6019e8491"}}},
|
||||||
"version": "11.0a3",
|
"version": "11.0a3",
|
||||||
"closed": false,
|
"closed": false,
|
||||||
"elapsed": 1512,
|
"elapsed": 1512,
|
||||||
|
|
|
@ -132,5 +132,6 @@
|
||||||
"model": "HP Compaq 8100 Elite SFF",
|
"model": "HP Compaq 8100 Elite SFF",
|
||||||
"manufacturer": "Hewlett-Packard"
|
"manufacturer": "Hewlett-Packard"
|
||||||
},
|
},
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "f6cfe48a-93d5-4e94-ab7b-3ee371e4d048"}}},
|
||||||
"version": "11.0a3"
|
"version": "11.0a3"
|
||||||
}
|
}
|
||||||
|
|
|
@ -170,5 +170,6 @@
|
||||||
},
|
},
|
||||||
"software": "Workbench",
|
"software": "Workbench",
|
||||||
"endTime": "2018-07-11T10:30:22.395958+00:00",
|
"endTime": "2018-07-11T10:30:22.395958+00:00",
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "75dcb454-ae80-4a87-a192-185d3b0250c0"}}},
|
||||||
"elapsed": 2766
|
"elapsed": 2766
|
||||||
}
|
}
|
||||||
|
|
|
@ -146,6 +146,7 @@
|
||||||
"pcmcia": 0
|
"pcmcia": 0
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "fcaf784e-5e57-43a2-b03f-8c56dabd0415"}}},
|
||||||
"uuid": "a01eacdb-db01-43ec-b6fb-a9b8cd21492d",
|
"uuid": "a01eacdb-db01-43ec-b6fb-a9b8cd21492d",
|
||||||
"type": "Snapshot",
|
"type": "Snapshot",
|
||||||
"version": "11.0a4",
|
"version": "11.0a4",
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
"closed": false,
|
"closed": false,
|
||||||
"endTime": "2018-07-11T13:26:29.365504+00:00",
|
"endTime": "2018-07-11T13:26:29.365504+00:00",
|
||||||
"type": "Snapshot",
|
"type": "Snapshot",
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "4f256440-e43f-429a-a2c6-1e8f3365de56"}}},
|
||||||
"device": {
|
"device": {
|
||||||
"serialNumber": "PB357N0",
|
"serialNumber": "PB357N0",
|
||||||
"actions": [
|
"actions": [
|
||||||
|
|
|
@ -148,6 +148,7 @@
|
||||||
"slots": 4
|
"slots": 4
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "077cad5d-ae1b-4156-a9a1-98bca6fa5c35"}}},
|
||||||
"version": "11.0a3",
|
"version": "11.0a3",
|
||||||
"endTime": "2018-07-11T10:28:55.879745+00:00",
|
"endTime": "2018-07-11T10:28:55.879745+00:00",
|
||||||
"type": "Snapshot",
|
"type": "Snapshot",
|
||||||
|
|
|
@ -136,8 +136,8 @@
|
||||||
],
|
],
|
||||||
"elapsed": 203,
|
"elapsed": 203,
|
||||||
"device": {
|
"device": {
|
||||||
"manufacturer": null,
|
"manufacturer": "Asus",
|
||||||
"model": null,
|
"model": "P7P55D",
|
||||||
"chassis": "Tower",
|
"chassis": "Tower",
|
||||||
"type": "Desktop",
|
"type": "Desktop",
|
||||||
"serialNumber": null,
|
"serialNumber": null,
|
||||||
|
@ -158,7 +158,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"version": "11.0a6",
|
"version": "11.0a6",
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "59ca9a2a-65bd-4802-89bb-315156a9352b"}}},
|
||||||
"type": "Snapshot",
|
"type": "Snapshot",
|
||||||
"closed": true,
|
"closed": true,
|
||||||
"software": "Workbench"
|
"software": "Workbench"
|
||||||
|
|
|
@ -142,7 +142,7 @@
|
||||||
},
|
},
|
||||||
"elapsed": 238,
|
"elapsed": 238,
|
||||||
"endTime": "2018-10-15T13:59:37.431309+00:00",
|
"endTime": "2018-10-15T13:59:37.431309+00:00",
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "43686b8e-e1ae-4e4e-bc51-f98f51e97c2d"}}},
|
||||||
"software": "Workbench",
|
"software": "Workbench",
|
||||||
"type": "Snapshot",
|
"type": "Snapshot",
|
||||||
"uuid": "ec23c11b-80b6-42cd-ac5c-73ba7acddbc4",
|
"uuid": "ec23c11b-80b6-42cd-ac5c-73ba7acddbc4",
|
||||||
|
|
|
@ -158,5 +158,6 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "a0cef731-9a78-4087-889c-dfb6ba5c2e9b"}}},
|
||||||
"closed": false
|
"closed": false
|
||||||
}
|
}
|
||||||
|
|
|
@ -114,6 +114,7 @@
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"version": "11.0a3",
|
"version": "11.0a3",
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "f2c50acd-501a-4f0b-b07c-58254b2ab8c9"}}},
|
||||||
"device": {
|
"device": {
|
||||||
"type": "Desktop",
|
"type": "Desktop",
|
||||||
"model": "HP Compaq 8000 Elite SFF",
|
"model": "HP Compaq 8000 Elite SFF",
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
{
|
{
|
||||||
"closed": false,
|
"closed": false,
|
||||||
"uuid": "f9e5e587-baee-44e1-9a94-255d216bbda9",
|
"uuid": "f9e5e587-baee-44e1-9a94-255d216bbda9",
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "4d21dd26-aa45-4902-a5f2-8a06e364cf25"}}},
|
||||||
"components": [
|
"components": [
|
||||||
{
|
{
|
||||||
"actions": [],
|
"actions": [],
|
||||||
|
|
|
@ -131,6 +131,7 @@
|
||||||
"model": "NB200"
|
"model": "NB200"
|
||||||
},
|
},
|
||||||
"uuid": "918726ae-c6bc-40aa-97cf-ad80d69268f9",
|
"uuid": "918726ae-c6bc-40aa-97cf-ad80d69268f9",
|
||||||
|
"debug": {"lshw": {"configuration": {"uuid": "33627ef0-89a9-4659-bb29-faa936727e0b"}}},
|
||||||
"closed": false,
|
"closed": false,
|
||||||
"type": "Snapshot"
|
"type": "Snapshot"
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,173 @@
|
||||||
|
import enum
|
||||||
|
import ipaddress
|
||||||
|
import json
|
||||||
|
import locale
|
||||||
|
from collections import Iterable
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from decimal import Decimal
|
||||||
|
from distutils.version import StrictVersion
|
||||||
|
from functools import wraps
|
||||||
|
from typing import Generator, Union
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
|
||||||
|
class JSONEncoder(json.JSONEncoder):
|
||||||
|
"""An overloaded JSON Encoder with extra type support."""
|
||||||
|
|
||||||
|
def default(self, obj):
|
||||||
|
if isinstance(obj, enum.Enum):
|
||||||
|
return obj.name
|
||||||
|
elif isinstance(obj, datetime):
|
||||||
|
return obj.isoformat()
|
||||||
|
elif isinstance(obj, timedelta):
|
||||||
|
return round(obj.total_seconds())
|
||||||
|
elif isinstance(obj, UUID):
|
||||||
|
return str(obj)
|
||||||
|
elif isinstance(obj, StrictVersion):
|
||||||
|
return str(obj)
|
||||||
|
elif isinstance(obj, set):
|
||||||
|
return list(obj)
|
||||||
|
elif isinstance(obj, Decimal):
|
||||||
|
return float(obj)
|
||||||
|
elif isinstance(obj, Dumpeable):
|
||||||
|
return obj.dump()
|
||||||
|
elif isinstance(obj, ipaddress._BaseAddress):
|
||||||
|
return str(obj)
|
||||||
|
# Instead of failing, return the string representation by default
|
||||||
|
return str(obj)
|
||||||
|
|
||||||
|
|
||||||
|
class Dumpeable:
|
||||||
|
"""Dumps dictionaries and jsons for Devicehub.
|
||||||
|
|
||||||
|
A base class to allow subclasses to generate dictionaries
|
||||||
|
and json suitable for sending to a Devicehub, i.e. preventing
|
||||||
|
private and constants to be in the JSON and camelCases field names.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ENCODER = JSONEncoder
|
||||||
|
|
||||||
|
def dump(self):
|
||||||
|
"""
|
||||||
|
Creates a dictionary consisting of the
|
||||||
|
non-private fields of this instance with camelCase field names.
|
||||||
|
"""
|
||||||
|
import inflection
|
||||||
|
|
||||||
|
return {
|
||||||
|
inflection.camelize(name, uppercase_first_letter=False): getattr(self, name)
|
||||||
|
for name in self._field_names()
|
||||||
|
if not name.startswith('_') and not name[0].isupper()
|
||||||
|
}
|
||||||
|
|
||||||
|
def _field_names(self):
|
||||||
|
"""An iterable of the names to dump."""
|
||||||
|
# Feel free to override this
|
||||||
|
return vars(self).keys()
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
"""
|
||||||
|
Creates a JSON representation of the non-private fields of
|
||||||
|
this class.
|
||||||
|
"""
|
||||||
|
return json.dumps(self, cls=self.ENCODER, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
class DumpeableModel(Dumpeable):
|
||||||
|
"""A dumpeable for SQLAlchemy models.
|
||||||
|
|
||||||
|
Note that this does not avoid recursive relations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _field_names(self):
|
||||||
|
from sqlalchemy import inspect
|
||||||
|
|
||||||
|
return (a.key for a in inspect(self).attrs)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_utf8(app_name_to_show_on_error: str):
|
||||||
|
"""
|
||||||
|
Python3 uses by default the system set, but it expects it to be
|
||||||
|
‘utf-8’ to work correctly.
|
||||||
|
This can generate problems in reading and writing files and in
|
||||||
|
``.decode()`` method.
|
||||||
|
|
||||||
|
An example how to 'fix' it::
|
||||||
|
|
||||||
|
echo 'export LC_CTYPE=en_US.UTF-8' > .bash_profile
|
||||||
|
echo 'export LC_ALL=en_US.UTF-8' > .bash_profile
|
||||||
|
"""
|
||||||
|
encoding = locale.getpreferredencoding()
|
||||||
|
if encoding.lower() != 'utf-8':
|
||||||
|
raise OSError(
|
||||||
|
'{} works only in UTF-8, but yours is set at {}'
|
||||||
|
''.format(app_name_to_show_on_error, encoding)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def now() -> datetime:
|
||||||
|
"""
|
||||||
|
Returns a compatible 'now' with DeviceHub's API,
|
||||||
|
this is as UTC and without microseconds.
|
||||||
|
"""
|
||||||
|
return datetime.utcnow().replace(microsecond=0)
|
||||||
|
|
||||||
|
|
||||||
|
def flatten_mixed(values: Iterable) -> Generator:
|
||||||
|
"""
|
||||||
|
Flatten a list containing lists and other elements. This is not deep.
|
||||||
|
|
||||||
|
>>> list(flatten_mixed([1, 2, [3, 4]]))
|
||||||
|
[1, 2, 3, 4]
|
||||||
|
"""
|
||||||
|
for x in values:
|
||||||
|
if isinstance(x, list):
|
||||||
|
for y in x:
|
||||||
|
yield y
|
||||||
|
else:
|
||||||
|
yield x
|
||||||
|
|
||||||
|
|
||||||
|
def if_none_return_none(f):
|
||||||
|
"""If the first value is None return None, otherwise execute f."""
|
||||||
|
|
||||||
|
@wraps(f)
|
||||||
|
def wrapper(self, value, *args, **kwargs):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
return f(self, value, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def local_ip(
|
||||||
|
dest='109.69.8.152',
|
||||||
|
) -> Union[ipaddress.IPv4Address, ipaddress.IPv6Address]:
|
||||||
|
"""Gets the local IP of the interface that has access to the
|
||||||
|
Internet.
|
||||||
|
|
||||||
|
This is a reliable way to test if a device has an active
|
||||||
|
connection to the Internet.
|
||||||
|
|
||||||
|
This method works by connecting, by default,
|
||||||
|
to the IP of ereuse01.ereuse.org.
|
||||||
|
|
||||||
|
>>> local_ip()
|
||||||
|
|
||||||
|
:raise OSError: The device cannot connect to the Internet.
|
||||||
|
"""
|
||||||
|
import socket, ipaddress
|
||||||
|
|
||||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
|
s.connect((dest, 80))
|
||||||
|
ip = s.getsockname()[0]
|
||||||
|
s.close()
|
||||||
|
return ipaddress.ip_address(ip)
|
||||||
|
|
||||||
|
|
||||||
|
def version(package_name: str) -> StrictVersion:
|
||||||
|
"""Returns the version of a package name installed with pip."""
|
||||||
|
# From https://stackoverflow.com/a/2073599
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
return StrictVersion(pkg_resources.require(package_name)[0].version)
|
|
@ -0,0 +1,301 @@
|
||||||
|
import enum as _enum
|
||||||
|
import getpass
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import threading
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from time import sleep
|
||||||
|
from typing import Any, Iterable, Type
|
||||||
|
|
||||||
|
from boltons import urlutils
|
||||||
|
from click import types as click_types
|
||||||
|
from colorama import Fore
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
from ereuse_devicehub.ereuse_utils import if_none_return_none
|
||||||
|
|
||||||
|
COMMON_CONTEXT_S = {'help_option_names': ('-h', '--help')}
|
||||||
|
"""Common Context settings used for our implementations of the
|
||||||
|
Click cli.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Py2/3 compat. Empty conditional to avoid coverage
|
||||||
|
try:
|
||||||
|
_unicode = unicode
|
||||||
|
except NameError:
|
||||||
|
_unicode = str
|
||||||
|
|
||||||
|
|
||||||
|
class Enum(click_types.Choice):
|
||||||
|
"""
|
||||||
|
Enum support for click.
|
||||||
|
|
||||||
|
Use it as a collection: @click.option(..., type=cli.Enum(MyEnum)).
|
||||||
|
Then, this expects you to pass the *name* of a member of the enum.
|
||||||
|
|
||||||
|
From `this github issue <https://github.com/pallets/click/issues/
|
||||||
|
605#issuecomment-277539425>`_.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, enum: Type[_enum.Enum]):
|
||||||
|
self.__enum = enum
|
||||||
|
super().__init__(enum.__members__)
|
||||||
|
|
||||||
|
def convert(self, value, param, ctx):
|
||||||
|
return self.__enum[super().convert(value, param, ctx)]
|
||||||
|
|
||||||
|
|
||||||
|
class Path(click_types.Path):
|
||||||
|
"""Like click.Path but returning ``pathlib.Path`` objects."""
|
||||||
|
|
||||||
|
def convert(self, value, param, ctx):
|
||||||
|
return pathlib.Path(super().convert(value, param, ctx))
|
||||||
|
|
||||||
|
|
||||||
|
class URL(click_types.StringParamType):
|
||||||
|
"""Returns a bolton's URL."""
|
||||||
|
|
||||||
|
name = 'url'
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
scheme=None,
|
||||||
|
username=None,
|
||||||
|
password=None,
|
||||||
|
host=None,
|
||||||
|
port=None,
|
||||||
|
path=None,
|
||||||
|
query_params=None,
|
||||||
|
fragment=None,
|
||||||
|
) -> None:
|
||||||
|
super().__init__()
|
||||||
|
"""Creates the type URL. You can require or enforce parts
|
||||||
|
of the URL by setting parameters of this constructor.
|
||||||
|
|
||||||
|
If the param is...
|
||||||
|
|
||||||
|
- None, no check is performed (default).
|
||||||
|
- True, it is then required as part of the URL.
|
||||||
|
- False, it is then required NOT to be part of the URL.
|
||||||
|
- Any other value, then such value is required to be in
|
||||||
|
the URL.
|
||||||
|
"""
|
||||||
|
self.attrs = (
|
||||||
|
('scheme', scheme),
|
||||||
|
('username', username),
|
||||||
|
('password', password),
|
||||||
|
('host', host),
|
||||||
|
('port', port),
|
||||||
|
('path', path),
|
||||||
|
('query_params', query_params),
|
||||||
|
('fragment', fragment),
|
||||||
|
)
|
||||||
|
|
||||||
|
@if_none_return_none
|
||||||
|
def convert(self, value, param, ctx):
|
||||||
|
url = urlutils.URL(super().convert(value, param, ctx))
|
||||||
|
for name, attr in self.attrs:
|
||||||
|
if attr is True:
|
||||||
|
if not getattr(url, name):
|
||||||
|
self.fail(
|
||||||
|
'URL {} must contain {} but it does not.'.format(url, name)
|
||||||
|
)
|
||||||
|
elif attr is False:
|
||||||
|
if getattr(url, name):
|
||||||
|
self.fail('URL {} cannot contain {} but it does.'.format(url, name))
|
||||||
|
elif attr:
|
||||||
|
if getattr(url, name) != attr:
|
||||||
|
self.fail('{} form {} can only be {}'.format(name, url, attr))
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
def password(service: str, username: str, prompt: str = 'Password:') -> str:
|
||||||
|
"""Gets a password from the keyring or the terminal."""
|
||||||
|
import keyring
|
||||||
|
|
||||||
|
return keyring.get_password(service, username) or getpass.getpass(prompt)
|
||||||
|
|
||||||
|
|
||||||
|
class Line(tqdm):
|
||||||
|
spinner_cycle = itertools.cycle(['-', '/', '|', '\\'])
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
total=None,
|
||||||
|
desc=None,
|
||||||
|
leave=True,
|
||||||
|
file=None,
|
||||||
|
ncols=None,
|
||||||
|
mininterval=0.2,
|
||||||
|
maxinterval=10.0,
|
||||||
|
miniters=None,
|
||||||
|
ascii=None,
|
||||||
|
disable=False,
|
||||||
|
unit='it',
|
||||||
|
unit_scale=False,
|
||||||
|
dynamic_ncols=True,
|
||||||
|
smoothing=0.3,
|
||||||
|
bar_format=None,
|
||||||
|
initial=0,
|
||||||
|
position=None,
|
||||||
|
postfix=None,
|
||||||
|
unit_divisor=1000,
|
||||||
|
write_bytes=None,
|
||||||
|
gui=False,
|
||||||
|
close_message: Iterable = None,
|
||||||
|
error_message: Iterable = None,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
"""This cannot work with iterables. Iterable use is considered
|
||||||
|
backward-compatibility in tqdm and inconsistent in Line.
|
||||||
|
Manually call ``update``.
|
||||||
|
"""
|
||||||
|
self._close_message = close_message
|
||||||
|
self._error_message = error_message
|
||||||
|
if total:
|
||||||
|
bar_format = '{desc}{percentage:.1f}% |{bar}| {n:1g}/{total:1g} {elapsed}<{remaining}'
|
||||||
|
super().__init__(
|
||||||
|
None,
|
||||||
|
desc,
|
||||||
|
total,
|
||||||
|
leave,
|
||||||
|
file,
|
||||||
|
ncols,
|
||||||
|
mininterval,
|
||||||
|
maxinterval,
|
||||||
|
miniters,
|
||||||
|
ascii,
|
||||||
|
disable,
|
||||||
|
unit,
|
||||||
|
unit_scale,
|
||||||
|
dynamic_ncols,
|
||||||
|
smoothing,
|
||||||
|
bar_format,
|
||||||
|
initial,
|
||||||
|
position,
|
||||||
|
postfix,
|
||||||
|
unit_divisor,
|
||||||
|
write_bytes,
|
||||||
|
gui,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def write_at_line(self, *args):
|
||||||
|
self.clear()
|
||||||
|
with self._lock:
|
||||||
|
self.display(''.join(str(arg) for arg in args))
|
||||||
|
|
||||||
|
def close_message(self, *args):
|
||||||
|
self._close_message = args
|
||||||
|
|
||||||
|
def error_message(self, *args):
|
||||||
|
self._error_message = args
|
||||||
|
|
||||||
|
def close(self): # noqa: C901
|
||||||
|
"""
|
||||||
|
Cleanup and (if leave=False) close the progressbar.
|
||||||
|
"""
|
||||||
|
if self.disable:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Prevent multiple closures
|
||||||
|
self.disable = True
|
||||||
|
|
||||||
|
# decrement instance pos and remove from internal set
|
||||||
|
pos = abs(self.pos)
|
||||||
|
self._decr_instances(self)
|
||||||
|
|
||||||
|
# GUI mode
|
||||||
|
if not hasattr(self, "sp"):
|
||||||
|
return
|
||||||
|
|
||||||
|
# annoyingly, _supports_unicode isn't good enough
|
||||||
|
def fp_write(s):
|
||||||
|
self.fp.write(_unicode(s))
|
||||||
|
|
||||||
|
try:
|
||||||
|
fp_write('')
|
||||||
|
except ValueError as e:
|
||||||
|
if 'closed' in str(e):
|
||||||
|
return
|
||||||
|
raise # pragma: no cover
|
||||||
|
|
||||||
|
with self._lock:
|
||||||
|
if self.leave:
|
||||||
|
if self._close_message:
|
||||||
|
self.display(
|
||||||
|
''.join(str(arg) for arg in self._close_message), pos=pos
|
||||||
|
)
|
||||||
|
elif self.last_print_n < self.n:
|
||||||
|
# stats for overall rate (no weighted average)
|
||||||
|
self.avg_time = None
|
||||||
|
self.display(pos=pos)
|
||||||
|
if not max(
|
||||||
|
[abs(getattr(i, "pos", 0)) for i in self._instances] + [pos]
|
||||||
|
):
|
||||||
|
# only if not nested (#477)
|
||||||
|
fp_write('\n')
|
||||||
|
else:
|
||||||
|
if self._close_message:
|
||||||
|
self.display(
|
||||||
|
''.join(str(arg) for arg in self._close_message), pos=pos
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.display(msg='', pos=pos)
|
||||||
|
if not pos:
|
||||||
|
fp_write('\r')
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def spin(self, prefix: str):
|
||||||
|
self._stop_running = threading.Event()
|
||||||
|
spin_thread = threading.Thread(target=self._spin, args=[prefix])
|
||||||
|
spin_thread.start()
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
self._stop_running.set()
|
||||||
|
spin_thread.join()
|
||||||
|
|
||||||
|
def _spin(self, prefix: str):
|
||||||
|
while not self._stop_running.is_set():
|
||||||
|
self.write_at_line(prefix, next(self.spinner_cycle))
|
||||||
|
sleep(0.50)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@contextmanager
|
||||||
|
def reserve_lines(self, n):
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
self.move_down(n - 1)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def move_down(cls, n: int):
|
||||||
|
print('\n' * n)
|
||||||
|
|
||||||
|
def __exit__(self, *exc):
|
||||||
|
if exc[0]:
|
||||||
|
self._close_message = self._error_message
|
||||||
|
return super().__exit__(*exc)
|
||||||
|
|
||||||
|
|
||||||
|
def clear():
|
||||||
|
os.system('clear')
|
||||||
|
|
||||||
|
|
||||||
|
def title(text: Any, ljust=32) -> str:
|
||||||
|
# Note that is 38 px + 1 extra space = 39 min
|
||||||
|
return str(text).ljust(ljust) + ' '
|
||||||
|
|
||||||
|
|
||||||
|
def danger(text: Any) -> str:
|
||||||
|
return '{}{}{}'.format(Fore.RED, text, Fore.RESET)
|
||||||
|
|
||||||
|
|
||||||
|
def warning(text: Any) -> str:
|
||||||
|
return '{}{}{}'.format(Fore.YELLOW, text, Fore.RESET)
|
||||||
|
|
||||||
|
|
||||||
|
def done(text: Any = 'done.') -> str:
|
||||||
|
return '{}{}{}'.format(Fore.GREEN, text, Fore.RESET)
|
|
@ -0,0 +1,148 @@
|
||||||
|
import subprocess
|
||||||
|
from contextlib import suppress
|
||||||
|
from typing import Any, Set
|
||||||
|
|
||||||
|
from ereuse_devicehub.ereuse_utils import text
|
||||||
|
|
||||||
|
|
||||||
|
def run(
|
||||||
|
*cmd: Any,
|
||||||
|
out=subprocess.PIPE,
|
||||||
|
err=subprocess.DEVNULL,
|
||||||
|
to_string=True,
|
||||||
|
check=True,
|
||||||
|
shell=False,
|
||||||
|
**kwargs,
|
||||||
|
) -> subprocess.CompletedProcess:
|
||||||
|
"""subprocess.run with a better API.
|
||||||
|
|
||||||
|
:param cmd: A list of commands to execute as parameters.
|
||||||
|
Parameters will be passed-in to ``str()`` so they
|
||||||
|
can be any object that can handle str().
|
||||||
|
:param out: As ``subprocess.run.stdout``.
|
||||||
|
:param err: As ``subprocess.run.stderr``.
|
||||||
|
:param to_string: As ``subprocess.run.universal_newlines``.
|
||||||
|
:param check: As ``subprocess.run.check``.
|
||||||
|
:param shell:
|
||||||
|
:param kwargs: Any other parameters that ``subprocess.run``
|
||||||
|
accepts.
|
||||||
|
:return: The result of executing ``subprocess.run``.
|
||||||
|
"""
|
||||||
|
cmds = tuple(str(c) for c in cmd)
|
||||||
|
return subprocess.run(
|
||||||
|
' '.join(cmds) if shell else cmds,
|
||||||
|
stdout=out,
|
||||||
|
stderr=err,
|
||||||
|
universal_newlines=to_string,
|
||||||
|
check=check,
|
||||||
|
shell=shell,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ProgressiveCmd:
|
||||||
|
"""Executes a cmd while interpreting its completion percentage.
|
||||||
|
|
||||||
|
The completion percentage of the cmd is stored in
|
||||||
|
:attr:`.percentage` and the user can obtain percentage
|
||||||
|
increments by executing :meth:`.increment`.
|
||||||
|
|
||||||
|
This class is useful to use within a child thread, so a main
|
||||||
|
thread can request from time to time the percentage / increment
|
||||||
|
status of the running command.
|
||||||
|
"""
|
||||||
|
|
||||||
|
READ_LINE = None
|
||||||
|
DECIMALS = {4, 5, 6}
|
||||||
|
DECIMAL_NUMBERS = 2
|
||||||
|
INT = {1, 2, 3}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*cmd: Any,
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
number_chars: Set[int] = INT,
|
||||||
|
decimal_numbers: int = None,
|
||||||
|
read: int = READ_LINE,
|
||||||
|
callback=None,
|
||||||
|
check=True,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
:param cmd: The command to execute.
|
||||||
|
:param stderr: the stderr passed-in to Popen.
|
||||||
|
:param stdout: the stdout passed-in to Popen
|
||||||
|
:param number_chars: The number of chars used to represent
|
||||||
|
the percentage. Normalized cases are
|
||||||
|
:attr:`.DECIMALS` and :attr:`.INT`.
|
||||||
|
:param read: For commands that do not print lines, how many
|
||||||
|
characters we should read between updates.
|
||||||
|
The percentage should be between those
|
||||||
|
characters.
|
||||||
|
:param callback: If passed in, this method is executed every time
|
||||||
|
run gets an update from the command, passing
|
||||||
|
in the increment from the last execution.
|
||||||
|
If not passed-in, you can get such increment
|
||||||
|
by executing manually the ``increment`` method.
|
||||||
|
:param check: Raise error if subprocess return code is non-zero.
|
||||||
|
"""
|
||||||
|
self.cmd = tuple(str(c) for c in cmd)
|
||||||
|
self.read = read
|
||||||
|
self.step = 0
|
||||||
|
self.check = check
|
||||||
|
self.number_chars = number_chars
|
||||||
|
self.decimal_numbers = decimal_numbers
|
||||||
|
# We call subprocess in the main thread so the main thread
|
||||||
|
# can react on ``CalledProcessError`` exceptions
|
||||||
|
self.conn = conn = subprocess.Popen(
|
||||||
|
self.cmd, universal_newlines=True, stderr=subprocess.PIPE, stdout=stdout
|
||||||
|
)
|
||||||
|
self.out = conn.stdout if stdout == subprocess.PIPE else conn.stderr
|
||||||
|
self._callback = callback
|
||||||
|
self.last_update_percentage = 0
|
||||||
|
self.percentage = 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def percentage(self):
|
||||||
|
return self._percentage
|
||||||
|
|
||||||
|
@percentage.setter
|
||||||
|
def percentage(self, v):
|
||||||
|
self._percentage = v
|
||||||
|
if self._callback and self._percentage > 0:
|
||||||
|
increment = self.increment()
|
||||||
|
if (
|
||||||
|
increment > 0
|
||||||
|
): # Do not bother calling if there has not been any increment
|
||||||
|
self._callback(increment, self._percentage)
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
"""Processes the output."""
|
||||||
|
while True:
|
||||||
|
out = self.out.read(self.read) if self.read else self.out.readline()
|
||||||
|
if out:
|
||||||
|
with suppress(StopIteration):
|
||||||
|
self.percentage = next(
|
||||||
|
text.positive_percentages(
|
||||||
|
out, self.number_chars, self.decimal_numbers
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else: # No more output
|
||||||
|
break
|
||||||
|
return_code = self.conn.wait() # wait until cmd ends
|
||||||
|
if self.check and return_code != 0:
|
||||||
|
raise subprocess.CalledProcessError(
|
||||||
|
self.conn.returncode, self.conn.args, stderr=self.conn.stderr.read()
|
||||||
|
)
|
||||||
|
|
||||||
|
def increment(self):
|
||||||
|
"""Returns the increment of progression from
|
||||||
|
the last time this method is executed.
|
||||||
|
"""
|
||||||
|
# for cmd badblocks the increment can be negative at the
|
||||||
|
# beginning of the second step where last_percentage
|
||||||
|
# is 100 and percentage is 0. By using max we
|
||||||
|
# kind-of reset the increment and start counting for
|
||||||
|
# the second step
|
||||||
|
increment = max(self.percentage - self.last_update_percentage, 0)
|
||||||
|
self.last_update_percentage = self.percentage
|
||||||
|
return increment
|
|
@ -0,0 +1,171 @@
|
||||||
|
"""Functions to get values from dictionaries and list encoded key-value
|
||||||
|
strings with meaningful indentations.
|
||||||
|
|
||||||
|
Values obtained from these functions are sanitized and automatically
|
||||||
|
(or explicitly set) casted. Sanitization includes removing unnecessary
|
||||||
|
whitespaces and removing useless keywords (in the context of
|
||||||
|
computer hardware) from the texts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from itertools import chain
|
||||||
|
from typing import Any, Iterable, Set, Type, Union
|
||||||
|
from unittest.mock import DEFAULT
|
||||||
|
|
||||||
|
import boltons.iterutils
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from ereuse_devicehub.ereuse_utils.text import clean
|
||||||
|
|
||||||
|
|
||||||
|
def dict(
|
||||||
|
d: dict,
|
||||||
|
path: Union[str, tuple],
|
||||||
|
remove: Set[str] = set(),
|
||||||
|
default: Any = DEFAULT,
|
||||||
|
type: Type = None,
|
||||||
|
):
|
||||||
|
"""Gets a value from the dictionary and sanitizes it.
|
||||||
|
|
||||||
|
Values are patterned and compared against sets
|
||||||
|
of meaningless characters for device hardware.
|
||||||
|
|
||||||
|
:param d: A dictionary potentially containing the value.
|
||||||
|
:param path: The key or a tuple-path where the value should be.
|
||||||
|
:param remove: Remove these words if found.
|
||||||
|
:param default: A default value to return if not found. If not set,
|
||||||
|
an exception is raised.
|
||||||
|
:param type: Enforce a type on the value (like ``int``). By default
|
||||||
|
dict tries to guess the correct type.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
v = boltons.iterutils.get_path(d, (path,) if isinstance(path, str) else path)
|
||||||
|
except KeyError:
|
||||||
|
return _default(path, default)
|
||||||
|
else:
|
||||||
|
return sanitize(v, remove, type=type)
|
||||||
|
|
||||||
|
|
||||||
|
def kv(
|
||||||
|
iterable: Iterable[str],
|
||||||
|
key: str,
|
||||||
|
default: Any = DEFAULT,
|
||||||
|
sep=':',
|
||||||
|
type: Type = None,
|
||||||
|
) -> Any:
|
||||||
|
"""Key-value. Gets a value from an iterable representing key values in the
|
||||||
|
form of a list of strings lines, for example an ``.ini`` or yaml file,
|
||||||
|
if they are opened with ``.splitlines()``.
|
||||||
|
|
||||||
|
:param iterable: An iterable of strings.
|
||||||
|
:param key: The key where the value should be.
|
||||||
|
:param default: A default value to return if not found. If not set,
|
||||||
|
an exception is raised.
|
||||||
|
:param sep: What separates the key from the value in the line.
|
||||||
|
Usually ``:`` or ``=``.
|
||||||
|
:param type: Enforce a type on the value (like ``int``). By default
|
||||||
|
dict tries to guess the correct type.
|
||||||
|
"""
|
||||||
|
for line in iterable:
|
||||||
|
try:
|
||||||
|
k, value, *_ = line.strip().split(sep)
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if key == k:
|
||||||
|
return sanitize(value, type=type)
|
||||||
|
return _default(key, default)
|
||||||
|
|
||||||
|
|
||||||
|
def indents(iterable: Iterable[str], keyword: str, indent=' '):
|
||||||
|
"""For a given iterable of strings, returns blocks of the same
|
||||||
|
left indentation.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
foo1
|
||||||
|
bar1
|
||||||
|
bar2
|
||||||
|
foo2
|
||||||
|
foo2
|
||||||
|
|
||||||
|
For that text, this method would return ``[bar1, bar2]`` for passed-in
|
||||||
|
keyword ``foo1``.
|
||||||
|
|
||||||
|
:param iterable: A list of strings representing lines.
|
||||||
|
:param keyword: The title preceding the indentation.
|
||||||
|
:param indent: Which characters makes the indentation.
|
||||||
|
"""
|
||||||
|
section_pos = None
|
||||||
|
for i, line in enumerate(iterable):
|
||||||
|
if not line.startswith(indent):
|
||||||
|
if keyword in line:
|
||||||
|
section_pos = i
|
||||||
|
elif section_pos is not None:
|
||||||
|
yield iterable[section_pos:i]
|
||||||
|
section_pos = None
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def _default(key, default):
|
||||||
|
if default is DEFAULT:
|
||||||
|
raise IndexError('Value {} not found.'.format(key))
|
||||||
|
else:
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
"""Gets"""
|
||||||
|
TO_REMOVE = {'none', 'prod', 'o.e.m', 'oem', r'n/a', 'atapi', 'pc', 'unknown'}
|
||||||
|
"""Delete those *words* from the value"""
|
||||||
|
assert all(v.lower() == v for v in TO_REMOVE), 'All words need to be lower-case'
|
||||||
|
|
||||||
|
REMOVE_CHARS_BETWEEN = '(){}[]'
|
||||||
|
"""
|
||||||
|
Remove those *characters* from the value.
|
||||||
|
All chars inside those are removed. Ex: foo (bar) => foo
|
||||||
|
"""
|
||||||
|
CHARS_TO_REMOVE = '*'
|
||||||
|
"""Remove the characters.
|
||||||
|
|
||||||
|
'*' Needs to be removed or otherwise it is interpreted
|
||||||
|
as a glob expression by regexes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
MEANINGLESS = {
|
||||||
|
'to be filled',
|
||||||
|
'system manufacturer',
|
||||||
|
'system product',
|
||||||
|
'sernum',
|
||||||
|
'xxxxx',
|
||||||
|
'system name',
|
||||||
|
'not specified',
|
||||||
|
'modulepartnumber',
|
||||||
|
'system serial',
|
||||||
|
'0001-067a-0000',
|
||||||
|
'partnum',
|
||||||
|
'manufacturer',
|
||||||
|
'0000000',
|
||||||
|
'fffff',
|
||||||
|
'jedec id:ad 00 00 00 00 00 00 00',
|
||||||
|
'012000',
|
||||||
|
'x.x',
|
||||||
|
'sku',
|
||||||
|
}
|
||||||
|
"""Discard a value if any of these values are inside it. """
|
||||||
|
assert all(v.lower() == v for v in MEANINGLESS), 'All values need to be lower-case'
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize(value, remove=set(), type=None):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
remove = remove | TO_REMOVE
|
||||||
|
regex = r'({})\W'.format('|'.join(s for s in remove))
|
||||||
|
val = re.sub(regex, '', value, flags=re.IGNORECASE)
|
||||||
|
val = '' if val.lower() in remove else val # regex's `\W` != whole string
|
||||||
|
val = re.sub(r'\([^)]*\)', '', val) # Remove everything between
|
||||||
|
for char_to_remove in chain(REMOVE_CHARS_BETWEEN, CHARS_TO_REMOVE):
|
||||||
|
val = val.replace(char_to_remove, '')
|
||||||
|
val = clean(val)
|
||||||
|
if val and not any(meaningless in val.lower() for meaningless in MEANINGLESS):
|
||||||
|
return type(val) if type else yaml.load(val, Loader=yaml.SafeLoader)
|
||||||
|
else:
|
||||||
|
return None
|
|
@ -0,0 +1,143 @@
|
||||||
|
from inflection import (
|
||||||
|
camelize,
|
||||||
|
dasherize,
|
||||||
|
parameterize,
|
||||||
|
pluralize,
|
||||||
|
singularize,
|
||||||
|
underscore,
|
||||||
|
)
|
||||||
|
|
||||||
|
HID_CONVERSION_DOC = """
|
||||||
|
The HID is the result of concatenating,
|
||||||
|
in the following order: the type of device (ex. Computer),
|
||||||
|
the manufacturer name, the model name, and the S/N. It is joined
|
||||||
|
with hyphens, and adapted to comply with the URI specification, so
|
||||||
|
it can be used in the URI identifying the device on the Internet.
|
||||||
|
The conversion is done as follows:
|
||||||
|
|
||||||
|
1. non-ASCII characters are converted to their ASCII equivalent or
|
||||||
|
removed.
|
||||||
|
2. Characterst that are not letters or numbers are converted to
|
||||||
|
underscores, in a way that there are no trailing underscores
|
||||||
|
and no underscores together, and they are set to lowercase.
|
||||||
|
|
||||||
|
Ex. ``laptop-acer-aod270-lusga_0d0242201212c7614``
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Naming:
|
||||||
|
"""
|
||||||
|
In DeviceHub there are many ways to name the same resource (yay!), this is because of all the different
|
||||||
|
types of schemas we work with. But no worries, we offer easy ways to change between naming conventions.
|
||||||
|
|
||||||
|
- TypeCase (or resource-type) is the one represented with '@type' and follow PascalCase and always singular.
|
||||||
|
This is the standard preferred one.
|
||||||
|
- resource-case is the eve naming, using the standard URI conventions. This one is tricky, as although the types
|
||||||
|
are represented in singular, the URI convention is to be plural (Event vs events), however just few of them
|
||||||
|
follow this rule (Snapshot [type] to snapshot [resource]). You can set which ones you want to change their
|
||||||
|
number.
|
||||||
|
- python_case is the one used by python for its folders and modules. It is underscored and always singular.
|
||||||
|
"""
|
||||||
|
|
||||||
|
TYPE_PREFIX = ':'
|
||||||
|
RESOURCE_PREFIX = '_'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resource(string: str):
|
||||||
|
"""
|
||||||
|
:param string: String can be type, resource or python case
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
prefix, resulting_type = Naming.pop_prefix(string)
|
||||||
|
prefix += Naming.RESOURCE_PREFIX
|
||||||
|
except IndexError:
|
||||||
|
prefix = ''
|
||||||
|
resulting_type = string
|
||||||
|
resulting_type = dasherize(underscore(resulting_type))
|
||||||
|
return prefix + pluralize(resulting_type)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def python(string: str):
|
||||||
|
"""
|
||||||
|
:param string: String can be type, resource or python case
|
||||||
|
"""
|
||||||
|
return underscore(singularize(string))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def type(string: str):
|
||||||
|
try:
|
||||||
|
prefix, resulting_type = Naming.pop_prefix(string)
|
||||||
|
prefix += Naming.TYPE_PREFIX
|
||||||
|
except IndexError:
|
||||||
|
prefix = ''
|
||||||
|
resulting_type = string
|
||||||
|
resulting_type = singularize(resulting_type)
|
||||||
|
resulting_type = resulting_type.replace(
|
||||||
|
'-', '_'
|
||||||
|
) # camelize does not convert '-' but '_'
|
||||||
|
return prefix + camelize(resulting_type)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def url_word(word: str):
|
||||||
|
"""
|
||||||
|
Normalizes a full word to be inserted to an url. If the word has spaces, etc, is used '_' and not '-'
|
||||||
|
"""
|
||||||
|
return parameterize(word, '_')
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pop_prefix(string: str):
|
||||||
|
"""Erases the prefix and returns it.
|
||||||
|
:throws IndexError: There is no prefix.
|
||||||
|
:return A set with two elements: 1- the prefix, 2- the type without it.
|
||||||
|
"""
|
||||||
|
result = string.split(Naming.TYPE_PREFIX)
|
||||||
|
if len(result) == 1:
|
||||||
|
result = string.split(Naming.RESOURCE_PREFIX)
|
||||||
|
if len(result) == 1:
|
||||||
|
raise IndexError()
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def new_type(type_name: str, prefix: str or None = None) -> str:
|
||||||
|
"""
|
||||||
|
Creates a resource type with optionally a prefix.
|
||||||
|
|
||||||
|
Using the rules of JSON-LD, we use prefixes to disambiguate between different types with the same name:
|
||||||
|
one can Accept a device or a project. In eReuse.org there are different events with the same names, in
|
||||||
|
linked-data terms they have different URI. In eReuse.org, we solve this with the following:
|
||||||
|
|
||||||
|
"@type": "devices:Accept" // the URI for these events is 'devices/events/accept'
|
||||||
|
"@type": "projects:Accept" // the URI for these events is 'projects/events/accept
|
||||||
|
...
|
||||||
|
|
||||||
|
Type is only used in events, when there are ambiguities. The rest of
|
||||||
|
|
||||||
|
"@type": "devices:Accept"
|
||||||
|
"@type": "Accept"
|
||||||
|
|
||||||
|
But these not:
|
||||||
|
|
||||||
|
"@type": "projects:Accept" // it is an event from a project
|
||||||
|
"@type": "Accept" // it is an event from a device
|
||||||
|
"""
|
||||||
|
if Naming.TYPE_PREFIX in type_name:
|
||||||
|
raise TypeError(
|
||||||
|
'Cannot create new type: type {} is already prefixed.'.format(type_name)
|
||||||
|
)
|
||||||
|
prefix = (prefix + Naming.TYPE_PREFIX) if prefix is not None else ''
|
||||||
|
return prefix + type_name
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hid(type: str, manufacturer: str, model: str, serial_number: str) -> str:
|
||||||
|
(
|
||||||
|
"""Computes the HID for the given properties of a device.
|
||||||
|
The HID is suitable to use to an URI.
|
||||||
|
"""
|
||||||
|
+ HID_CONVERSION_DOC
|
||||||
|
)
|
||||||
|
return '{type}-{mn}-{ml}-{sn}'.format(
|
||||||
|
type=Naming.url_word(type),
|
||||||
|
mn=Naming.url_word(manufacturer),
|
||||||
|
ml=Naming.url_word(model),
|
||||||
|
sn=Naming.url_word(serial_number),
|
||||||
|
)
|
|
@ -0,0 +1,85 @@
|
||||||
|
class NestedLookup:
|
||||||
|
@staticmethod
|
||||||
|
def __new__(cls, document, references, operation):
|
||||||
|
"""Lookup a key in a nested document, return a list of values
|
||||||
|
From https://github.com/russellballestrini/nested-lookup/ but in python 3
|
||||||
|
"""
|
||||||
|
return list(NestedLookup._nested_lookup(document, references, operation))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def key_equality_factory(key_to_find):
|
||||||
|
def key_equality(key, _):
|
||||||
|
return key == key_to_find
|
||||||
|
|
||||||
|
return key_equality
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_sub_type_factory(type):
|
||||||
|
def _is_sub_type(_, value):
|
||||||
|
return is_sub_type(value, type)
|
||||||
|
|
||||||
|
return _is_sub_type
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def key_value_equality_factory(key_to_find, value_to_find):
|
||||||
|
def key_value_equality(key, value):
|
||||||
|
return key == key_to_find and value == value_to_find
|
||||||
|
|
||||||
|
return key_value_equality
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def key_value_containing_value_factory(key_to_find, value_to_find):
|
||||||
|
def key_value_containing_value(key, value):
|
||||||
|
return key == key_to_find and value_to_find in value
|
||||||
|
|
||||||
|
return key_value_containing_value
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _nested_lookup(document, references, operation): # noqa: C901
|
||||||
|
"""Lookup a key in a nested document, yield a value"""
|
||||||
|
if isinstance(document, list):
|
||||||
|
for d in document:
|
||||||
|
for result in NestedLookup._nested_lookup(d, references, operation):
|
||||||
|
yield result
|
||||||
|
|
||||||
|
if isinstance(document, dict):
|
||||||
|
for k, v in document.items():
|
||||||
|
if operation(k, v):
|
||||||
|
references.append((document, k))
|
||||||
|
yield v
|
||||||
|
elif isinstance(v, dict):
|
||||||
|
for result in NestedLookup._nested_lookup(v, references, operation):
|
||||||
|
yield result
|
||||||
|
elif isinstance(v, list):
|
||||||
|
for d in v:
|
||||||
|
for result in NestedLookup._nested_lookup(
|
||||||
|
d, references, operation
|
||||||
|
):
|
||||||
|
yield result
|
||||||
|
|
||||||
|
|
||||||
|
def is_sub_type(value, resource_type):
|
||||||
|
try:
|
||||||
|
return issubclass(value, resource_type)
|
||||||
|
except TypeError:
|
||||||
|
return issubclass(value.__class__, resource_type)
|
||||||
|
|
||||||
|
|
||||||
|
def get_nested_dicts_with_key_value(parent_dict: dict, key, value):
|
||||||
|
"""Return all nested dictionaries that contain a key with a specific value. A sub-case of NestedLookup."""
|
||||||
|
references = []
|
||||||
|
NestedLookup(
|
||||||
|
parent_dict, references, NestedLookup.key_value_equality_factory(key, value)
|
||||||
|
)
|
||||||
|
return (document for document, _ in references)
|
||||||
|
|
||||||
|
|
||||||
|
def get_nested_dicts_with_key_containing_value(parent_dict: dict, key, value):
|
||||||
|
"""Return all nested dictionaries that contain a key with a specific value. A sub-case of NestedLookup."""
|
||||||
|
references = []
|
||||||
|
NestedLookup(
|
||||||
|
parent_dict,
|
||||||
|
references,
|
||||||
|
NestedLookup.key_value_containing_value_factory(key, value),
|
||||||
|
)
|
||||||
|
return (document for document, _ in references)
|
|
@ -0,0 +1,285 @@
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
from typing import Any, Dict, Iterable, Tuple, TypeVar, Union
|
||||||
|
|
||||||
|
import boltons.urlutils
|
||||||
|
from requests import Response
|
||||||
|
from requests_toolbelt.sessions import BaseUrlSession
|
||||||
|
from urllib3 import Retry
|
||||||
|
|
||||||
|
from ereuse_devicehub import ereuse_utils
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
Query = Iterable[Tuple[str, Any]]
|
||||||
|
|
||||||
|
Status = Union[int]
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Protocol # Only py 3.6+
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
class HasStatusProperty(Protocol):
|
||||||
|
def __init__(self, *args, **kwargs) -> None:
|
||||||
|
self.status = ... # type: int
|
||||||
|
|
||||||
|
|
||||||
|
Status = Union[int, HasStatusProperty]
|
||||||
|
|
||||||
|
JSON = 'application/json'
|
||||||
|
ANY = '*/*'
|
||||||
|
AUTH = 'Authorization'
|
||||||
|
BASIC = 'Basic {}'
|
||||||
|
URL = Union[str, boltons.urlutils.URL]
|
||||||
|
Data = Union[str, dict, ereuse_utils.Dumpeable]
|
||||||
|
Res = Tuple[Union[Dict[str, Any], str], Response]
|
||||||
|
|
||||||
|
|
||||||
|
# actual code
|
||||||
|
|
||||||
|
|
||||||
|
class Session(BaseUrlSession):
|
||||||
|
"""A BaseUrlSession that always raises for status and sets a
|
||||||
|
timeout for all requests by default.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, base_url=None, timeout=15):
|
||||||
|
"""
|
||||||
|
:param base_url:
|
||||||
|
:param timeout: Time requests will wait to receive the first
|
||||||
|
response bytes (not the whole) from the server. In seconds.
|
||||||
|
"""
|
||||||
|
super().__init__(base_url)
|
||||||
|
self.timeout = timeout
|
||||||
|
self.hooks['response'] = lambda r, *args, **kwargs: r.raise_for_status()
|
||||||
|
|
||||||
|
def request(self, method, url, *args, **kwargs):
|
||||||
|
kwargs.setdefault('timeout', self.timeout)
|
||||||
|
return super().request(method, url, *args, **kwargs)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<{} base={}>.'.format(self.__class__.__name__, self.base_url)
|
||||||
|
|
||||||
|
|
||||||
|
class DevicehubClient(Session):
|
||||||
|
"""A Session pre-configured to connect to Devicehub-like APIs."""
|
||||||
|
|
||||||
|
def __init__(self, base_url: URL = None,
|
||||||
|
token: str = None,
|
||||||
|
inventory: Union[str, bool] = False,
|
||||||
|
**kwargs):
|
||||||
|
"""Initializes a session pointing to a Devicehub endpoint.
|
||||||
|
|
||||||
|
Authentication can be passed-in as a token for endpoints
|
||||||
|
that require them, now at ini, after when executing the method,
|
||||||
|
or in between with ``set_auth``.
|
||||||
|
|
||||||
|
:param base_url: An url pointing to a endpoint.
|
||||||
|
:param token: A Base64 encoded token, as given by a devicehub.
|
||||||
|
You can encode tokens by executing `encode_token`.
|
||||||
|
:param inventory: If True, use the default inventory of the user.
|
||||||
|
If False, do not use inventories (single-inventory
|
||||||
|
database, this is the option by default).
|
||||||
|
If a string, always use the set inventory.
|
||||||
|
"""
|
||||||
|
if isinstance(base_url, boltons.urlutils.URL):
|
||||||
|
base_url = base_url.to_text()
|
||||||
|
else:
|
||||||
|
base_url = str(base_url)
|
||||||
|
super().__init__(base_url, **kwargs)
|
||||||
|
assert base_url[-1] != '/', 'Do not provide a final slash to the URL'
|
||||||
|
if token:
|
||||||
|
self.set_auth(token)
|
||||||
|
self.inventory = inventory
|
||||||
|
self.user = None # type: Dict[str, object]
|
||||||
|
|
||||||
|
def set_auth(self, token):
|
||||||
|
self.headers['Authorization'] = 'Basic {}'.format(token)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def encode_token(cls, token: str):
|
||||||
|
"""Encodes a token suitable for a Devicehub endpoint."""
|
||||||
|
return base64.b64encode(str.encode(str(token) + ':')).decode()
|
||||||
|
|
||||||
|
def login(self, email: str, password: str) -> Dict[str, Any]:
|
||||||
|
"""Performs login, authenticating future requests.
|
||||||
|
|
||||||
|
:return: The logged-in user.
|
||||||
|
"""
|
||||||
|
user, _ = self.post('/users/login/', {'email': email, 'password': password}, status=200)
|
||||||
|
self.set_auth(user['token'])
|
||||||
|
self.user = user
|
||||||
|
self.inventory = user['inventories'][0]['id']
|
||||||
|
return user
|
||||||
|
|
||||||
|
def get(self,
|
||||||
|
base_url: URL,
|
||||||
|
uri=None,
|
||||||
|
status: Status = 200,
|
||||||
|
query: Query = tuple(),
|
||||||
|
accept=JSON,
|
||||||
|
content_type=JSON,
|
||||||
|
headers: dict = None,
|
||||||
|
token=None,
|
||||||
|
**kwargs) -> Res:
|
||||||
|
return super().get(base_url,
|
||||||
|
uri=uri,
|
||||||
|
status=status,
|
||||||
|
query=query,
|
||||||
|
accept=accept,
|
||||||
|
content_type=content_type,
|
||||||
|
headers=headers,
|
||||||
|
token=token, **kwargs)
|
||||||
|
|
||||||
|
def post(self, base_url: URL,
|
||||||
|
data: Data,
|
||||||
|
uri=None,
|
||||||
|
status: Status = 201,
|
||||||
|
query: Query = tuple(),
|
||||||
|
accept=JSON,
|
||||||
|
content_type=JSON,
|
||||||
|
headers: dict = None,
|
||||||
|
token=None,
|
||||||
|
**kwargs) -> Res:
|
||||||
|
return super().post(base_url,
|
||||||
|
data=data,
|
||||||
|
uri=uri,
|
||||||
|
status=status,
|
||||||
|
query=query,
|
||||||
|
accept=accept,
|
||||||
|
content_type=content_type,
|
||||||
|
headers=headers,
|
||||||
|
token=token, **kwargs)
|
||||||
|
|
||||||
|
def delete(self,
|
||||||
|
base_url: URL,
|
||||||
|
uri=None,
|
||||||
|
status: Status = 204,
|
||||||
|
query: Query = tuple(),
|
||||||
|
accept=JSON,
|
||||||
|
content_type=JSON,
|
||||||
|
headers: dict = None,
|
||||||
|
token=None,
|
||||||
|
**kwargs) -> Res:
|
||||||
|
return super().delete(base_url,
|
||||||
|
uri=uri,
|
||||||
|
status=status,
|
||||||
|
query=query,
|
||||||
|
accept=accept,
|
||||||
|
content_type=content_type,
|
||||||
|
headers=headers,
|
||||||
|
token=token, **kwargs)
|
||||||
|
|
||||||
|
def patch(self, base_url: URL,
|
||||||
|
data: Data,
|
||||||
|
uri=None,
|
||||||
|
status: Status = 201,
|
||||||
|
query: Query = tuple(),
|
||||||
|
accept=JSON,
|
||||||
|
content_type=JSON,
|
||||||
|
headers: dict = None,
|
||||||
|
token=None,
|
||||||
|
**kwargs) -> Res:
|
||||||
|
return super().patch(base_url,
|
||||||
|
data=data,
|
||||||
|
uri=uri,
|
||||||
|
status=status,
|
||||||
|
query=query,
|
||||||
|
accept=accept,
|
||||||
|
content_type=content_type,
|
||||||
|
headers=headers,
|
||||||
|
token=token, **kwargs)
|
||||||
|
|
||||||
|
def request(self,
|
||||||
|
method,
|
||||||
|
base_url: URL,
|
||||||
|
uri=None,
|
||||||
|
status: Status = 200,
|
||||||
|
query: Query = tuple(),
|
||||||
|
accept=JSON,
|
||||||
|
content_type=JSON,
|
||||||
|
data=None,
|
||||||
|
headers: dict = None,
|
||||||
|
token=None,
|
||||||
|
**kw) -> Res:
|
||||||
|
assert not kw.get('json', None), 'Do not use json; use data.'
|
||||||
|
# We allow uris without slashes for item endpoints
|
||||||
|
uri = str(uri) if uri else None
|
||||||
|
headers = headers or {}
|
||||||
|
headers['Accept'] = accept
|
||||||
|
headers['Content-Type'] = content_type
|
||||||
|
if token:
|
||||||
|
headers['Authorization'] = 'Basic {}'.format(token)
|
||||||
|
if data and content_type == JSON:
|
||||||
|
data = json.dumps(data, cls=ereuse_utils.JSONEncoder, sort_keys=True)
|
||||||
|
url = base_url if not isinstance(base_url, boltons.urlutils.URL) else base_url.to_text()
|
||||||
|
assert url[-1] == '/', 'base_url should end with a slash'
|
||||||
|
if self.inventory and not isinstance(self.inventory, bool):
|
||||||
|
url = '{}/{}'.format(self.inventory, base_url)
|
||||||
|
assert url[-1] == '/', 'base_url should end with a slash'
|
||||||
|
if uri:
|
||||||
|
url = self.parse_uri(url, uri)
|
||||||
|
if query:
|
||||||
|
url = self.parse_query(url, query)
|
||||||
|
response = super().request(method, url, data=data, headers=headers, **kw)
|
||||||
|
if status:
|
||||||
|
_status = getattr(status, 'code', status)
|
||||||
|
if _status != response.status_code:
|
||||||
|
raise WrongStatus('Req to {} failed bc the status is {} but it should have been {}'
|
||||||
|
.format(url, response.status_code, _status))
|
||||||
|
data = response.content if not accept == JSON or not response.content else response.json()
|
||||||
|
return data, response
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_uri(base_url, uri):
|
||||||
|
return boltons.urlutils.URL(base_url).navigate(uri).to_text()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_query(uri, query):
|
||||||
|
url = boltons.urlutils.URL(uri)
|
||||||
|
url.query_params = boltons.urlutils.QueryParamDict([
|
||||||
|
(k, json.dumps(v, cls=ereuse_utils.JSONEncoder) if isinstance(v, (list, dict)) else v)
|
||||||
|
for k, v in query
|
||||||
|
])
|
||||||
|
return url.to_text()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<{} base={} inv={} user={}>.'.format(self.__class__.__name__, self.base_url,
|
||||||
|
self.inventory, self.user)
|
||||||
|
|
||||||
|
|
||||||
|
class WrongStatus(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
|
||||||
|
T = TypeVar('T', bound=requests.Session)
|
||||||
|
|
||||||
|
|
||||||
|
def retry(session: T,
|
||||||
|
retries=3,
|
||||||
|
backoff_factor=1,
|
||||||
|
status_to_retry=(500, 502, 504)) -> T:
|
||||||
|
"""Configures requests from the given session to retry in
|
||||||
|
failed requests due to connection errors, HTTP response codes
|
||||||
|
with ``status_to_retry`` and 30X redirections.
|
||||||
|
|
||||||
|
Remember that you still need
|
||||||
|
"""
|
||||||
|
# From https://www.peterbe.com/plog/best-practice-with-retries-with-requests
|
||||||
|
# Doc in https://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html#module-urllib3.util.retry
|
||||||
|
session = session or requests.Session()
|
||||||
|
retry = Retry(
|
||||||
|
total=retries,
|
||||||
|
read=retries,
|
||||||
|
connect=retries,
|
||||||
|
backoff_factor=backoff_factor,
|
||||||
|
status_forcelist=status_to_retry,
|
||||||
|
method_whitelist=False # Retry too in non-idempotent methods like POST
|
||||||
|
)
|
||||||
|
adapter = HTTPAdapter(max_retries=retry)
|
||||||
|
session.mount('http://', adapter)
|
||||||
|
session.mount('https://', adapter)
|
||||||
|
return session
|
|
@ -0,0 +1,165 @@
|
||||||
|
from contextlib import suppress
|
||||||
|
from typing import Dict, Tuple, Union
|
||||||
|
|
||||||
|
from flask import json
|
||||||
|
from flask.testing import FlaskClient
|
||||||
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
|
from ereuse_devicehub.ereuse_utils.session import ANY, AUTH, BASIC, DevicehubClient, JSON, Query, Status
|
||||||
|
|
||||||
|
ANY = ANY
|
||||||
|
AUTH = AUTH
|
||||||
|
BASIC = BASIC
|
||||||
|
|
||||||
|
Res = Tuple[Union[Dict[str, object], str], Response]
|
||||||
|
|
||||||
|
|
||||||
|
class Client(FlaskClient):
|
||||||
|
"""
|
||||||
|
A client for the REST servers of DeviceHub and WorkbenchServer.
|
||||||
|
|
||||||
|
- JSON first. By default it sends and expects receiving JSON files.
|
||||||
|
- Assert regular status responses, like 200 for GET.
|
||||||
|
- Auto-parses a nested dictionary of URL query params to the
|
||||||
|
URL version with nested properties to JSON.
|
||||||
|
- Meaningful headers format: a dictionary of name-values.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def open(self,
|
||||||
|
uri: str,
|
||||||
|
status: Status = 200,
|
||||||
|
query: Query = tuple(),
|
||||||
|
accept=JSON,
|
||||||
|
content_type=JSON,
|
||||||
|
item=None,
|
||||||
|
headers: dict = None,
|
||||||
|
**kw) -> Res:
|
||||||
|
"""
|
||||||
|
|
||||||
|
:param uri: The URI without basename and query.
|
||||||
|
:param status: Assert the response for specified status. Set
|
||||||
|
None to avoid.
|
||||||
|
:param query: The query of the URL in the form of
|
||||||
|
[(key1, value1), (key2, value2), (key1, value3)].
|
||||||
|
If value is a list or a dict, they will be
|
||||||
|
converted to JSON.
|
||||||
|
Please, see :class:`boltons.urlutils`.
|
||||||
|
QueryParamDict` for more info.
|
||||||
|
:param accept: The Accept header. If 'application/json'
|
||||||
|
(default) then it will parse incoming JSON.
|
||||||
|
:param item: The last part of the path. Useful to do something
|
||||||
|
like ``get('db/accounts', item='24')``. If you
|
||||||
|
use ``item``, you can't set a final backslash into
|
||||||
|
``uri`` (or the parse will fail).
|
||||||
|
:param headers: A dictionary of headers, where keys are header
|
||||||
|
names and values their values.
|
||||||
|
Ex: {'Accept', 'application/json'}.
|
||||||
|
:param kw: Kwargs passed into parent ``open``.
|
||||||
|
:return: A tuple with: 1. response data, as a string or JSON
|
||||||
|
depending of Accept, and 2. the Response object.
|
||||||
|
"""
|
||||||
|
j_encoder = self.application.json_encoder
|
||||||
|
headers = headers or {}
|
||||||
|
headers['Accept'] = accept
|
||||||
|
headers['Content-Type'] = content_type
|
||||||
|
headers = [(k, v) for k, v in headers.items()]
|
||||||
|
if 'data' in kw and content_type == JSON:
|
||||||
|
kw['data'] = json.dumps(kw['data'], cls=j_encoder)
|
||||||
|
if item:
|
||||||
|
uri = DevicehubClient.parse_uri(uri, item)
|
||||||
|
if query:
|
||||||
|
uri = DevicehubClient.parse_query(uri, query)
|
||||||
|
response = super().open(uri, headers=headers, **kw)
|
||||||
|
if status:
|
||||||
|
_status = getattr(status, 'code', status)
|
||||||
|
assert response.status_code == _status, \
|
||||||
|
'Expected status code {} but got {}. Returned data is:\n' \
|
||||||
|
'{}'.format(_status, response.status_code, response.get_data().decode())
|
||||||
|
|
||||||
|
data = response.get_data()
|
||||||
|
with suppress(UnicodeDecodeError):
|
||||||
|
data = data.decode()
|
||||||
|
if accept == JSON:
|
||||||
|
data = json.loads(data) if data else {}
|
||||||
|
return data, response
|
||||||
|
|
||||||
|
def get(self,
|
||||||
|
uri: str,
|
||||||
|
query: Query = tuple(),
|
||||||
|
item: str = None,
|
||||||
|
status: Status = 200,
|
||||||
|
accept: str = JSON,
|
||||||
|
headers: dict = None,
|
||||||
|
**kw) -> Res:
|
||||||
|
"""
|
||||||
|
Performs a GET.
|
||||||
|
|
||||||
|
See the parameters in :meth:`ereuse_utils.test.Client.open`.
|
||||||
|
Moreover:
|
||||||
|
|
||||||
|
:param query: A dictionary of query params. If a parameter is a
|
||||||
|
dict or a list, it will be parsed to JSON, then
|
||||||
|
all params are encoded with ``urlencode``.
|
||||||
|
:param kw: Kwargs passed into parent ``open``.
|
||||||
|
"""
|
||||||
|
return super().get(uri, item=item, status=status, accept=accept, headers=headers,
|
||||||
|
query=query, **kw)
|
||||||
|
|
||||||
|
def post(self,
|
||||||
|
uri: str,
|
||||||
|
data: str or dict,
|
||||||
|
query: Query = tuple(),
|
||||||
|
status: Status = 201,
|
||||||
|
content_type: str = JSON,
|
||||||
|
accept: str = JSON,
|
||||||
|
headers: dict = None,
|
||||||
|
**kw) -> Res:
|
||||||
|
"""
|
||||||
|
Performs a POST.
|
||||||
|
|
||||||
|
See the parameters in :meth:`ereuse_utils.test.Client.open`.
|
||||||
|
"""
|
||||||
|
return super().post(uri, data=data, status=status, content_type=content_type,
|
||||||
|
accept=accept, headers=headers, query=query, **kw)
|
||||||
|
|
||||||
|
def patch(self,
|
||||||
|
uri: str,
|
||||||
|
data: str or dict,
|
||||||
|
query: Query = tuple(),
|
||||||
|
status: Status = 200,
|
||||||
|
content_type: str = JSON,
|
||||||
|
item: str = None,
|
||||||
|
accept: str = JSON,
|
||||||
|
headers: dict = None,
|
||||||
|
**kw) -> Res:
|
||||||
|
"""
|
||||||
|
Performs a PATCH.
|
||||||
|
|
||||||
|
See the parameters in :meth:`ereuse_utils.test.Client.open`.
|
||||||
|
"""
|
||||||
|
return super().patch(uri, item=item, data=data, status=status, content_type=content_type,
|
||||||
|
accept=accept, headers=headers, query=query, **kw)
|
||||||
|
|
||||||
|
def put(self,
|
||||||
|
uri: str,
|
||||||
|
data: str or dict,
|
||||||
|
query: Query = tuple(),
|
||||||
|
status: Status = 201,
|
||||||
|
content_type: str = JSON,
|
||||||
|
item: str = None,
|
||||||
|
accept: str = JSON,
|
||||||
|
headers: dict = None,
|
||||||
|
**kw) -> Res:
|
||||||
|
return super().put(uri, item=item, data=data, status=status, content_type=content_type,
|
||||||
|
accept=accept, headers=headers, query=query, **kw)
|
||||||
|
|
||||||
|
def delete(self,
|
||||||
|
uri: str,
|
||||||
|
query: Query = tuple(),
|
||||||
|
item: str = None,
|
||||||
|
status: Status = 204,
|
||||||
|
accept: str = JSON,
|
||||||
|
headers: dict = None,
|
||||||
|
**kw) -> Res:
|
||||||
|
return super().delete(uri, query=query, item=item, status=status, accept=accept,
|
||||||
|
headers=headers, **kw)
|
|
@ -0,0 +1,72 @@
|
||||||
|
import ast
|
||||||
|
import re
|
||||||
|
from typing import Iterator, Set, Union
|
||||||
|
|
||||||
|
|
||||||
|
def grep(text: str, value: str):
|
||||||
|
"""An easy 'grep -i' that yields lines where value is found."""
|
||||||
|
for line in text.splitlines():
|
||||||
|
if value in line:
|
||||||
|
yield line
|
||||||
|
|
||||||
|
|
||||||
|
def between(text: str, begin='(', end=')'):
|
||||||
|
"""Dead easy text between two characters.
|
||||||
|
Not recursive or repetitions.
|
||||||
|
"""
|
||||||
|
return text.split(begin)[-1].split(end)[0]
|
||||||
|
|
||||||
|
|
||||||
|
def numbers(text: str) -> Iterator[Union[int, float]]:
|
||||||
|
"""Gets numbers in strings with other characters.
|
||||||
|
|
||||||
|
Integer Numbers: 1 2 3 987 +4 -8
|
||||||
|
Decimal Numbers: 0.1 2. .3 .987 +4.0 -0.8
|
||||||
|
Scientific Notation: 1e2 0.2e2 3.e2 .987e2 +4e-1 -8.e+2
|
||||||
|
Numbers with percentages: 49% 32.39%
|
||||||
|
|
||||||
|
This returns int or float.
|
||||||
|
"""
|
||||||
|
# From https://regexr.com/33jqd
|
||||||
|
for x in re.finditer(r'[+-]?(?=\.\d|\d)(?:\d+)?(?:\.?\d*)(?:[eE][+-]?\d+)?', text):
|
||||||
|
yield ast.literal_eval(x.group())
|
||||||
|
|
||||||
|
|
||||||
|
def positive_percentages(
|
||||||
|
text: str, lengths: Set[int] = None, decimal_numbers: int = None
|
||||||
|
) -> Iterator[Union[int, float]]:
|
||||||
|
"""Gets numbers postfixed with a '%' in strings with other characters.
|
||||||
|
|
||||||
|
1)100% 2)56.78% 3)56 78.90% 4)34.6789% some text
|
||||||
|
|
||||||
|
:param text: The text to search for.
|
||||||
|
:param lengths: A set of lengths that the percentage
|
||||||
|
number should have to be considered valid.
|
||||||
|
Ex. {5,6} would validate '90.32' and '100.00'
|
||||||
|
"""
|
||||||
|
# From https://regexr.com/3aumh
|
||||||
|
for x in re.finditer(r'[\d|\.]+%', text):
|
||||||
|
num = x.group()[:-1]
|
||||||
|
if lengths:
|
||||||
|
if not len(num) in lengths:
|
||||||
|
continue
|
||||||
|
if decimal_numbers:
|
||||||
|
try:
|
||||||
|
pos = num.rindex('.')
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if len(num) - pos - 1 != decimal_numbers:
|
||||||
|
continue
|
||||||
|
yield float(num)
|
||||||
|
|
||||||
|
|
||||||
|
def macs(text: str) -> Iterator[str]:
|
||||||
|
"""Find MACs in strings with other characters."""
|
||||||
|
for x in re.finditer('{0}:{0}:{0}:{0}:{0}:{0}'.format(r'[a-fA-F0-9.+_-]+'), text):
|
||||||
|
yield x.group()
|
||||||
|
|
||||||
|
|
||||||
|
def clean(text: str) -> str:
|
||||||
|
"""Trims the text and replaces multiple spaces with a single space."""
|
||||||
|
return ' '.join(text.split())
|
|
@ -0,0 +1,80 @@
|
||||||
|
import usb.core
|
||||||
|
import usb.util
|
||||||
|
from usb import CLASS_MASS_STORAGE
|
||||||
|
|
||||||
|
from ereuse_devicehub.ereuse_utils.naming import Naming
|
||||||
|
|
||||||
|
|
||||||
|
def plugged_usbs(multiple=True) -> map or dict: # noqa: C901
|
||||||
|
"""
|
||||||
|
Gets the plugged-in USB Flash drives (pen-drives).
|
||||||
|
|
||||||
|
If multiple is true, it returns a map, and a dict otherwise.
|
||||||
|
|
||||||
|
If multiple is false, this method will raise a :class:`.NoUSBFound` if no USB is found.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class FindPenDrives(object):
|
||||||
|
# From https://github.com/pyusb/pyusb/blob/master/docs/tutorial.rst
|
||||||
|
def __init__(self, class_):
|
||||||
|
self._class = class_
|
||||||
|
|
||||||
|
def __call__(self, device):
|
||||||
|
# first, let's check the device
|
||||||
|
if device.bDeviceClass == self._class:
|
||||||
|
return True
|
||||||
|
# ok, transverse all devices to find an
|
||||||
|
# interface that matches our class
|
||||||
|
for cfg in device:
|
||||||
|
# find_descriptor: what's it?
|
||||||
|
intf = usb.util.find_descriptor(cfg, bInterfaceClass=self._class)
|
||||||
|
# We don't want Card readers
|
||||||
|
if intf is not None:
|
||||||
|
try:
|
||||||
|
product = intf.device.product.lower()
|
||||||
|
except ValueError as e:
|
||||||
|
if 'langid' in str(e):
|
||||||
|
raise OSError(
|
||||||
|
'Cannot get "langid". Do you have permissions?'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
if 'crw' not in product and 'reader' not in product:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_pendrive(pen: usb.Device) -> dict:
|
||||||
|
if not pen.manufacturer or not pen.product or not pen.serial_number:
|
||||||
|
raise UsbDoesNotHaveHid()
|
||||||
|
manufacturer = pen.manufacturer.strip() or str(pen.idVendor)
|
||||||
|
model = pen.product.strip() or str(pen.idProduct)
|
||||||
|
serial_number = pen.serial_number.strip()
|
||||||
|
hid = Naming.hid('USBFlashDrive', manufacturer, model, serial_number)
|
||||||
|
return {
|
||||||
|
'id': hid, # Make live easier to DeviceHubClient by using _id
|
||||||
|
'hid': hid,
|
||||||
|
'type': 'USBFlashDrive',
|
||||||
|
'serialNumber': serial_number,
|
||||||
|
'model': model,
|
||||||
|
'manufacturer': manufacturer,
|
||||||
|
'vendorId': pen.idVendor,
|
||||||
|
'productId': pen.idProduct,
|
||||||
|
}
|
||||||
|
|
||||||
|
result = usb.core.find(
|
||||||
|
find_all=multiple, custom_match=FindPenDrives(CLASS_MASS_STORAGE)
|
||||||
|
)
|
||||||
|
if multiple:
|
||||||
|
return map(get_pendrive, result)
|
||||||
|
else:
|
||||||
|
if not result:
|
||||||
|
raise NoUSBFound()
|
||||||
|
return get_pendrive(result)
|
||||||
|
|
||||||
|
|
||||||
|
class NoUSBFound(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UsbDoesNotHaveHid(Exception):
|
||||||
|
pass
|
|
@ -1,10 +1,19 @@
|
||||||
from flask import g
|
from boltons.urlutils import URL
|
||||||
|
from flask import current_app as app
|
||||||
|
from flask import g, session
|
||||||
from flask_wtf import FlaskForm
|
from flask_wtf import FlaskForm
|
||||||
from werkzeug.security import generate_password_hash
|
from werkzeug.security import generate_password_hash
|
||||||
from wtforms import BooleanField, EmailField, PasswordField, validators
|
from wtforms import (
|
||||||
|
BooleanField,
|
||||||
|
EmailField,
|
||||||
|
PasswordField,
|
||||||
|
StringField,
|
||||||
|
URLField,
|
||||||
|
validators,
|
||||||
|
)
|
||||||
|
|
||||||
from ereuse_devicehub.db import db
|
from ereuse_devicehub.db import db
|
||||||
from ereuse_devicehub.resources.user.models import User
|
from ereuse_devicehub.resources.user.models import SanitizationEntity, User
|
||||||
|
|
||||||
|
|
||||||
class LoginForm(FlaskForm):
|
class LoginForm(FlaskForm):
|
||||||
|
@ -60,6 +69,17 @@ class LoginForm(FlaskForm):
|
||||||
if not user.is_active:
|
if not user.is_active:
|
||||||
self.form_errors.append(self.error_messages['inactive'])
|
self.form_errors.append(self.error_messages['inactive'])
|
||||||
|
|
||||||
|
if 'dpp' in app.blueprints.keys():
|
||||||
|
dlt_keys = user.get_dlt_keys(
|
||||||
|
self.password.data
|
||||||
|
).get('data', {})
|
||||||
|
|
||||||
|
token_dlt = dlt_keys.get('api_token')
|
||||||
|
eth_pub_key = dlt_keys.get('eth_pub_key')
|
||||||
|
session['token_dlt'] = token_dlt
|
||||||
|
session['eth_pub_key'] = eth_pub_key
|
||||||
|
session['rols'] = user.get_rols()
|
||||||
|
|
||||||
return user.is_active
|
return user.is_active
|
||||||
|
|
||||||
|
|
||||||
|
@ -95,9 +115,78 @@ class PasswordForm(FlaskForm):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def save(self, commit=True):
|
def save(self, commit=True):
|
||||||
|
if 'dpp' in app.blueprints.keys():
|
||||||
|
keys_dlt = g.user.get_dlt_keys(self.password.data)
|
||||||
|
g.user.reset_dlt_keys(self.newpassword.data, keys_dlt)
|
||||||
|
|
||||||
|
token_dlt = (
|
||||||
|
g.user.get_dlt_keys(self.newpassword.data)
|
||||||
|
.get('data', {})
|
||||||
|
.get('api_token')
|
||||||
|
)
|
||||||
|
session['token_dlt'] = token_dlt
|
||||||
|
|
||||||
g.user.password = self.newpassword.data
|
g.user.password = self.newpassword.data
|
||||||
|
|
||||||
db.session.add(g.user)
|
db.session.add(g.user)
|
||||||
if commit:
|
if commit:
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class SanitizationEntityForm(FlaskForm):
|
||||||
|
logo = URLField(
|
||||||
|
'Logo',
|
||||||
|
[validators.Optional(), validators.URL()],
|
||||||
|
render_kw={
|
||||||
|
'class': "form-control",
|
||||||
|
"placeholder": "Url where is the logo - acceptd only .png, .jpg, .gif, svg",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
company_name = StringField('Company Name', render_kw={'class': "form-control"})
|
||||||
|
location = StringField('Location', render_kw={'class': "form-control"})
|
||||||
|
responsable_person = StringField(
|
||||||
|
'Responsable person', render_kw={'class': "form-control"}
|
||||||
|
)
|
||||||
|
supervisor_person = StringField(
|
||||||
|
'Supervisor person', render_kw={'class': "form-control"}
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
if isinstance(self.logo.data, URL):
|
||||||
|
self.logo.data = self.logo.data.to_text()
|
||||||
|
|
||||||
|
def validate(self, extra_validators=None):
|
||||||
|
is_valid = super().validate(extra_validators)
|
||||||
|
|
||||||
|
if not is_valid:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.logo.data:
|
||||||
|
return True
|
||||||
|
|
||||||
|
extensions = ["jpg", "jpeg", "png", "gif", "svg"]
|
||||||
|
if self.logo.data.lower().split(".")[-1] not in extensions:
|
||||||
|
txt = "Error in Url field - accepted only .PNG, .JPG and .GIF. extensions"
|
||||||
|
self.logo.errors = [txt]
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def save(self, commit=True):
|
||||||
|
if isinstance(self.logo.data, str):
|
||||||
|
self.logo.data = URL(self.logo.data)
|
||||||
|
|
||||||
|
sanitation_data = SanitizationEntity.query.filter_by(user_id=g.user.id).first()
|
||||||
|
|
||||||
|
if not sanitation_data:
|
||||||
|
sanitation_data = SanitizationEntity(user_id=g.user.id)
|
||||||
|
self.populate_obj(sanitation_data)
|
||||||
|
db.session.add(sanitation_data)
|
||||||
|
else:
|
||||||
|
self.populate_obj(sanitation_data)
|
||||||
|
|
||||||
|
if commit:
|
||||||
|
db.session.commit()
|
||||||
|
return
|
||||||
|
|
|
@ -29,12 +29,21 @@ from wtforms import (
|
||||||
)
|
)
|
||||||
from wtforms.fields import FormField
|
from wtforms.fields import FormField
|
||||||
|
|
||||||
|
from ereuse_devicehub import messages
|
||||||
from ereuse_devicehub.db import db
|
from ereuse_devicehub.db import db
|
||||||
from ereuse_devicehub.inventory.models import DeliveryNote, ReceiverNote, Transfer
|
from ereuse_devicehub.inventory.models import (
|
||||||
from ereuse_devicehub.parser.models import PlaceholdersLog
|
DeliveryNote,
|
||||||
|
DeviceDocument,
|
||||||
|
ReceiverNote,
|
||||||
|
Transfer,
|
||||||
|
TransferCustomerDetails,
|
||||||
|
)
|
||||||
|
from ereuse_devicehub.parser.models import PlaceholdersLog, SnapshotsLog
|
||||||
from ereuse_devicehub.parser.parser import ParseSnapshotLsHw
|
from ereuse_devicehub.parser.parser import ParseSnapshotLsHw
|
||||||
from ereuse_devicehub.parser.schemas import Snapshot_lite
|
from ereuse_devicehub.parser.schemas import Snapshot_lite
|
||||||
from ereuse_devicehub.resources.action.models import Snapshot, Trade
|
from ereuse_devicehub.resources.action.models import Snapshot, Trade
|
||||||
|
from ereuse_devicehub.resources.action.schemas import EWaste as EWasteSchema
|
||||||
|
from ereuse_devicehub.resources.action.schemas import Recycled as RecycledSchema
|
||||||
from ereuse_devicehub.resources.action.schemas import Snapshot as SnapshotSchema
|
from ereuse_devicehub.resources.action.schemas import Snapshot as SnapshotSchema
|
||||||
from ereuse_devicehub.resources.action.views.snapshot import (
|
from ereuse_devicehub.resources.action.views.snapshot import (
|
||||||
SnapshotMixin,
|
SnapshotMixin,
|
||||||
|
@ -44,22 +53,31 @@ from ereuse_devicehub.resources.action.views.snapshot import (
|
||||||
from ereuse_devicehub.resources.device.models import (
|
from ereuse_devicehub.resources.device.models import (
|
||||||
SAI,
|
SAI,
|
||||||
Cellphone,
|
Cellphone,
|
||||||
|
Computer,
|
||||||
ComputerMonitor,
|
ComputerMonitor,
|
||||||
|
DataStorage,
|
||||||
Desktop,
|
Desktop,
|
||||||
Device,
|
Device,
|
||||||
|
HardDrive,
|
||||||
Keyboard,
|
Keyboard,
|
||||||
Laptop,
|
Laptop,
|
||||||
MemoryCardReader,
|
MemoryCardReader,
|
||||||
|
Mobile,
|
||||||
|
Monitor,
|
||||||
Mouse,
|
Mouse,
|
||||||
|
Other,
|
||||||
Placeholder,
|
Placeholder,
|
||||||
|
Projector,
|
||||||
Server,
|
Server,
|
||||||
Smartphone,
|
Smartphone,
|
||||||
|
SolidStateDrive,
|
||||||
Tablet,
|
Tablet,
|
||||||
|
TelevisionSet,
|
||||||
)
|
)
|
||||||
from ereuse_devicehub.resources.documents.models import DataWipeDocument
|
from ereuse_devicehub.resources.documents.models import DataWipeDocument
|
||||||
from ereuse_devicehub.resources.enums import Severity
|
from ereuse_devicehub.resources.enums import Severity
|
||||||
from ereuse_devicehub.resources.hash_reports import insert_hash
|
from ereuse_devicehub.resources.hash_reports import insert_hash
|
||||||
from ereuse_devicehub.resources.lot.models import Lot
|
from ereuse_devicehub.resources.lot.models import Lot, ShareLot
|
||||||
from ereuse_devicehub.resources.tag.model import Tag
|
from ereuse_devicehub.resources.tag.model import Tag
|
||||||
from ereuse_devicehub.resources.tradedocument.models import TradeDocument
|
from ereuse_devicehub.resources.tradedocument.models import TradeDocument
|
||||||
from ereuse_devicehub.resources.user.models import User
|
from ereuse_devicehub.resources.user.models import User
|
||||||
|
@ -81,17 +99,42 @@ DEVICES = {
|
||||||
],
|
],
|
||||||
"Mobile, tablet & smartphone": [
|
"Mobile, tablet & smartphone": [
|
||||||
"All Mobile",
|
"All Mobile",
|
||||||
"Mobile",
|
|
||||||
"Tablet",
|
"Tablet",
|
||||||
"Smartphone",
|
"Smartphone",
|
||||||
"Cellphone",
|
"Cellphone",
|
||||||
],
|
],
|
||||||
|
"Drives & Storage": [
|
||||||
|
"All DataStorage",
|
||||||
|
"HardDrive",
|
||||||
|
"SolidStateDrive",
|
||||||
|
],
|
||||||
|
"Accessories": [
|
||||||
|
"All Accessories",
|
||||||
|
"Mouse",
|
||||||
|
"MemoryCardReader",
|
||||||
|
"SAI",
|
||||||
|
"Keyboard",
|
||||||
|
],
|
||||||
|
"Other Devices": ["Other"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TYPES_DOCUMENTS = [
|
||||||
|
("", ""),
|
||||||
|
("image", "Image"),
|
||||||
|
("main_image", "Main Image"),
|
||||||
|
("functionality_report", "Functionality Report"),
|
||||||
|
("data_sanitization_report", "Data Sanitization Report"),
|
||||||
|
("disposition_report", "Disposition Report"),
|
||||||
|
]
|
||||||
|
|
||||||
COMPUTERS = ['Desktop', 'Laptop', 'Server', 'Computer']
|
COMPUTERS = ['Desktop', 'Laptop', 'Server', 'Computer']
|
||||||
|
|
||||||
MONITORS = ["ComputerMonitor", "Monitor", "TelevisionSet", "Projector"]
|
MONITORS = ["ComputerMonitor", "Monitor", "TelevisionSet", "Projector"]
|
||||||
MOBILE = ["Mobile", "Tablet", "Smartphone", "Cellphone"]
|
MOBILE = ["Mobile", "Tablet", "Smartphone", "Cellphone"]
|
||||||
|
STORAGE = ["HardDrive", "SolidStateDrive"]
|
||||||
|
ACCESSORIES = ["Mouse", "MemoryCardReader", "SAI", "Keyboard"]
|
||||||
|
OTHERS = ["Other"]
|
||||||
|
DATASTORAGE = ['HardDrive', 'SolidStateDrive']
|
||||||
|
|
||||||
|
|
||||||
class AdvancedSearchForm(FlaskForm):
|
class AdvancedSearchForm(FlaskForm):
|
||||||
|
@ -125,11 +168,14 @@ class FilterForm(FlaskForm):
|
||||||
'', choices=DEVICES, default="All Computers", render_kw={'class': "form-select"}
|
'', choices=DEVICES, default="All Computers", render_kw={'class': "form-select"}
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, lots, lot_id, *args, **kwargs):
|
def __init__(self, lots, lot, lot_id, *args, **kwargs):
|
||||||
self.all_devices = kwargs.pop('all_devices', False)
|
self.all_devices = kwargs.pop('all_devices', False)
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.lots = lots
|
self.lots = lots
|
||||||
|
self.lot = lot
|
||||||
self.lot_id = lot_id
|
self.lot_id = lot_id
|
||||||
|
if self.lot_id and not self.lot:
|
||||||
|
self.lot = self.lots.filter(Lot.id == self.lot_id).one()
|
||||||
self._get_types()
|
self._get_types()
|
||||||
|
|
||||||
def _get_types(self):
|
def _get_types(self):
|
||||||
|
@ -140,8 +186,7 @@ class FilterForm(FlaskForm):
|
||||||
self.filter.data = self.device_type
|
self.filter.data = self.device_type
|
||||||
|
|
||||||
def filter_from_lots(self):
|
def filter_from_lots(self):
|
||||||
if self.lot_id:
|
if self.lot:
|
||||||
self.lot = self.lots.filter(Lot.id == self.lot_id).one()
|
|
||||||
device_ids = (d.id for d in self.lot.devices)
|
device_ids = (d.id for d in self.lot.devices)
|
||||||
self.devices = Device.query.filter(Device.id.in_(device_ids)).filter(
|
self.devices = Device.query.filter(Device.id.in_(device_ids)).filter(
|
||||||
Device.binding == None # noqa: E711
|
Device.binding == None # noqa: E711
|
||||||
|
@ -164,7 +209,7 @@ class FilterForm(FlaskForm):
|
||||||
|
|
||||||
# Generic Filters
|
# Generic Filters
|
||||||
if "All Devices" == self.device_type:
|
if "All Devices" == self.device_type:
|
||||||
filter_type = COMPUTERS + MONITORS + MOBILE
|
filter_type = COMPUTERS + MONITORS + MOBILE + DATASTORAGE + OTHERS
|
||||||
|
|
||||||
elif "All Computers" == self.device_type:
|
elif "All Computers" == self.device_type:
|
||||||
filter_type = COMPUTERS
|
filter_type = COMPUTERS
|
||||||
|
@ -175,10 +220,18 @@ class FilterForm(FlaskForm):
|
||||||
elif "All Mobile" == self.device_type:
|
elif "All Mobile" == self.device_type:
|
||||||
filter_type = MOBILE
|
filter_type = MOBILE
|
||||||
|
|
||||||
|
elif "All DataStorage" == self.device_type:
|
||||||
|
filter_type = STORAGE
|
||||||
|
|
||||||
|
elif "All Accessories" == self.device_type:
|
||||||
|
filter_type = ACCESSORIES
|
||||||
|
|
||||||
if filter_type:
|
if filter_type:
|
||||||
self.devices = self.devices.filter(Device.type.in_(filter_type))
|
self.devices = self.devices.filter(Device.type.in_(filter_type))
|
||||||
|
|
||||||
return self.devices.order_by(Device.updated.desc())
|
return self.devices.filter(Device.active.is_(True)).order_by(
|
||||||
|
Device.updated.desc()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class LotForm(FlaskForm):
|
class LotForm(FlaskForm):
|
||||||
|
@ -213,7 +266,8 @@ class LotForm(FlaskForm):
|
||||||
return self.id
|
return self.id
|
||||||
|
|
||||||
def remove(self):
|
def remove(self):
|
||||||
if self.instance and not self.instance.trade:
|
shared = ShareLot.query.filter_by(lot=self.instance).first()
|
||||||
|
if self.instance and not self.instance.trade and not shared:
|
||||||
self.instance.delete()
|
self.instance.delete()
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return self.instance
|
return self.instance
|
||||||
|
@ -222,6 +276,10 @@ class LotForm(FlaskForm):
|
||||||
class UploadSnapshotForm(SnapshotMixin, FlaskForm):
|
class UploadSnapshotForm(SnapshotMixin, FlaskForm):
|
||||||
snapshot = MultipleFileField('Select a Snapshot File', [validators.DataRequired()])
|
snapshot = MultipleFileField('Select a Snapshot File', [validators.DataRequired()])
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.create_new_devices = kwargs.pop('create_new_devices', False)
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def validate(self, extra_validators=None):
|
def validate(self, extra_validators=None):
|
||||||
is_valid = super().validate(extra_validators)
|
is_valid = super().validate(extra_validators)
|
||||||
|
|
||||||
|
@ -266,7 +324,7 @@ class UploadSnapshotForm(SnapshotMixin, FlaskForm):
|
||||||
|
|
||||||
return is_lite
|
return is_lite
|
||||||
|
|
||||||
def save(self, commit=True):
|
def save(self, commit=True, user_trusts=True):
|
||||||
if any([x == 'Error' for x in self.result.values()]):
|
if any([x == 'Error' for x in self.result.values()]):
|
||||||
return
|
return
|
||||||
schema = SnapshotSchema()
|
schema = SnapshotSchema()
|
||||||
|
@ -274,6 +332,7 @@ class UploadSnapshotForm(SnapshotMixin, FlaskForm):
|
||||||
devices = []
|
devices = []
|
||||||
self.tmp_snapshots = app.config['TMP_SNAPSHOTS']
|
self.tmp_snapshots = app.config['TMP_SNAPSHOTS']
|
||||||
for filename, snapshot_json in self.snapshots:
|
for filename, snapshot_json in self.snapshots:
|
||||||
|
self.json_wb = copy.copy(snapshot_json)
|
||||||
path_snapshot = save_json(snapshot_json, self.tmp_snapshots, g.user.email)
|
path_snapshot = save_json(snapshot_json, self.tmp_snapshots, g.user.email)
|
||||||
debug = snapshot_json.pop('debug', None)
|
debug = snapshot_json.pop('debug', None)
|
||||||
self.version = snapshot_json.get('schema_api')
|
self.version = snapshot_json.get('schema_api')
|
||||||
|
@ -288,16 +347,21 @@ class UploadSnapshotForm(SnapshotMixin, FlaskForm):
|
||||||
system_uuid = self.get_uuid(debug)
|
system_uuid = self.get_uuid(debug)
|
||||||
if system_uuid:
|
if system_uuid:
|
||||||
snapshot_json['device']['system_uuid'] = system_uuid
|
snapshot_json['device']['system_uuid'] = system_uuid
|
||||||
|
self.get_fields_extra(debug, snapshot_json)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
snapshot_json = schema.load(snapshot_json)
|
snapshot_json = schema.load(snapshot_json)
|
||||||
|
response = self.build(
|
||||||
|
snapshot_json, create_new_device=self.create_new_devices
|
||||||
|
)
|
||||||
except ValidationError as err:
|
except ValidationError as err:
|
||||||
txt = "{}".format(err)
|
txt = "{}".format(err)
|
||||||
self.errors(txt=txt)
|
self.errors(txt=txt)
|
||||||
self.result[filename] = 'Error'
|
self.result[filename] = 'Error'
|
||||||
continue
|
continue
|
||||||
|
|
||||||
response = self.build(snapshot_json)
|
if isinstance(response.device, Computer):
|
||||||
|
response.device.user_trusts = user_trusts
|
||||||
db.session.add(response)
|
db.session.add(response)
|
||||||
devices.append(response.device.binding.device)
|
devices.append(response.device.binding.device)
|
||||||
|
|
||||||
|
@ -340,8 +404,9 @@ class NewDeviceForm(FlaskForm):
|
||||||
depth = FloatField('Depth', [validators.Optional()])
|
depth = FloatField('Depth', [validators.Optional()])
|
||||||
variant = StringField('Variant', [validators.Optional()])
|
variant = StringField('Variant', [validators.Optional()])
|
||||||
sku = StringField('SKU', [validators.Optional()])
|
sku = StringField('SKU', [validators.Optional()])
|
||||||
image = StringField('Image', [validators.Optional(), validators.URL()])
|
image = URLField('Image', [validators.Optional(), validators.URL()])
|
||||||
imei = IntegerField('IMEI', [validators.Optional()])
|
imei = IntegerField('IMEI', [validators.Optional()])
|
||||||
|
data_storage_size = FloatField('Storage Size', [validators.Optional()])
|
||||||
meid = StringField('MEID', [validators.Optional()])
|
meid = StringField('MEID', [validators.Optional()])
|
||||||
resolution = IntegerField('Resolution width', [validators.Optional()])
|
resolution = IntegerField('Resolution width', [validators.Optional()])
|
||||||
screen = FloatField('Screen size', [validators.Optional()])
|
screen = FloatField('Screen size', [validators.Optional()])
|
||||||
|
@ -361,14 +426,20 @@ class NewDeviceForm(FlaskForm):
|
||||||
"Smartphone": Smartphone,
|
"Smartphone": Smartphone,
|
||||||
"Tablet": Tablet,
|
"Tablet": Tablet,
|
||||||
"Cellphone": Cellphone,
|
"Cellphone": Cellphone,
|
||||||
|
"HardDrive": HardDrive,
|
||||||
|
"SolidStateDrive": SolidStateDrive,
|
||||||
"ComputerMonitor": ComputerMonitor,
|
"ComputerMonitor": ComputerMonitor,
|
||||||
|
"Monitor": Monitor,
|
||||||
|
"TelevisionSet": TelevisionSet,
|
||||||
|
"Projector": Projector,
|
||||||
"Mouse": Mouse,
|
"Mouse": Mouse,
|
||||||
"Keyboard": Keyboard,
|
"Keyboard": Keyboard,
|
||||||
"SAI": SAI,
|
"SAI": SAI,
|
||||||
"MemoryCardReader": MemoryCardReader,
|
"MemoryCardReader": MemoryCardReader,
|
||||||
|
"Other": Other,
|
||||||
}
|
}
|
||||||
|
|
||||||
def reset_from_obj(self):
|
def reset_from_obj(self): # noqa: C901
|
||||||
if not self._obj:
|
if not self._obj:
|
||||||
return
|
return
|
||||||
disabled = {'disabled': "disabled"}
|
disabled = {'disabled': "disabled"}
|
||||||
|
@ -400,19 +471,22 @@ class NewDeviceForm(FlaskForm):
|
||||||
self.depth.data = self._obj.depth
|
self.depth.data = self._obj.depth
|
||||||
self.variant.data = self._obj.variant
|
self.variant.data = self._obj.variant
|
||||||
self.sku.data = self._obj.sku
|
self.sku.data = self._obj.sku
|
||||||
self.image.data = self._obj.image
|
if self._obj.image:
|
||||||
|
self.image.data = self._obj.image.to_text()
|
||||||
if self._obj.type in ['Smartphone', 'Tablet', 'Cellphone']:
|
if self._obj.type in ['Smartphone', 'Tablet', 'Cellphone']:
|
||||||
self.imei.data = self._obj.imei
|
self.imei.data = self._obj.imei
|
||||||
self.meid.data = self._obj.meid
|
self.meid.data = self._obj.meid
|
||||||
|
self.data_storage_size.data = self._obj.data_storage_size
|
||||||
if self._obj.type == 'ComputerMonitor':
|
if self._obj.type == 'ComputerMonitor':
|
||||||
self.resolution.data = self._obj.resolution_width
|
self.resolution.data = self._obj.resolution_width
|
||||||
self.screen.data = self._obj.size
|
self.screen.data = self._obj.size
|
||||||
|
if self._obj.type in ['HardDrive', 'SolidStateDrive']:
|
||||||
|
if self._obj.size:
|
||||||
|
self.data_storage_size.data = self._obj.size / 1000
|
||||||
|
|
||||||
if self._obj.placeholder.is_abstract:
|
if self._obj.placeholder.is_abstract:
|
||||||
self.type.render_kw = disabled
|
self.type.render_kw = disabled
|
||||||
self.amount.render_kw = disabled
|
self.amount.render_kw = disabled
|
||||||
# self.id_device_supplier.render_kw = disabled
|
|
||||||
self.pallet.render_kw = disabled
|
|
||||||
self.info.render_kw = disabled
|
self.info.render_kw = disabled
|
||||||
self.components.render_kw = disabled
|
self.components.render_kw = disabled
|
||||||
self.serial_number.render_kw = disabled
|
self.serial_number.render_kw = disabled
|
||||||
|
@ -433,6 +507,9 @@ class NewDeviceForm(FlaskForm):
|
||||||
if self._obj.type in ['Smartphone', 'Tablet', 'Cellphone']:
|
if self._obj.type in ['Smartphone', 'Tablet', 'Cellphone']:
|
||||||
self.imei.render_kw = disabled
|
self.imei.render_kw = disabled
|
||||||
self.meid.render_kw = disabled
|
self.meid.render_kw = disabled
|
||||||
|
self.data_storage_size.render_kw = disabled
|
||||||
|
if self._obj.type in ['HardDrive', 'SolidStateDrive']:
|
||||||
|
self.data_storage_size.render_kw = disabled
|
||||||
if self._obj.type == 'ComputerMonitor':
|
if self._obj.type == 'ComputerMonitor':
|
||||||
self.resolution.render_kw = disabled
|
self.resolution.render_kw = disabled
|
||||||
self.screen.render_kw = disabled
|
self.screen.render_kw = disabled
|
||||||
|
@ -461,10 +538,10 @@ class NewDeviceForm(FlaskForm):
|
||||||
self.depth.errors = txt
|
self.depth.errors = txt
|
||||||
is_valid = False
|
is_valid = False
|
||||||
|
|
||||||
if self.imei.data and self.amount.data == 1:
|
# if self.imei.data and self.amount.data == 1:
|
||||||
if not 13 < len(str(self.imei.data)) < 17:
|
# if not 13 < len(str(self.imei.data)) < 17:
|
||||||
self.imei.errors = error
|
# self.imei.errors = error
|
||||||
is_valid = False
|
# is_valid = False
|
||||||
|
|
||||||
if self.meid.data and self.amount.data == 1:
|
if self.meid.data and self.amount.data == 1:
|
||||||
meid = self.meid.data
|
meid = self.meid.data
|
||||||
|
@ -545,8 +622,16 @@ class NewDeviceForm(FlaskForm):
|
||||||
if self.type.data in ['Smartphone', 'Tablet', 'Cellphone']:
|
if self.type.data in ['Smartphone', 'Tablet', 'Cellphone']:
|
||||||
device.imei = self.imei.data
|
device.imei = self.imei.data
|
||||||
device.meid = self.meid.data
|
device.meid = self.meid.data
|
||||||
|
device.data_storage_size = self.data_storage_size.data
|
||||||
|
|
||||||
|
if self.type.data in ['HardDrive', 'SolidStateDrive']:
|
||||||
|
if self.data_storage_size.data:
|
||||||
|
device.size = self.data_storage_size.data * 1000
|
||||||
|
|
||||||
|
device.image = URL(self.image.data)
|
||||||
|
|
||||||
device.placeholder = self.get_placeholder()
|
device.placeholder = self.get_placeholder()
|
||||||
|
device.set_hid()
|
||||||
db.session.add(device)
|
db.session.add(device)
|
||||||
|
|
||||||
placeholder_log = PlaceholdersLog(
|
placeholder_log = PlaceholdersLog(
|
||||||
|
@ -601,7 +686,7 @@ class NewDeviceForm(FlaskForm):
|
||||||
self._obj.height = self.height.data
|
self._obj.height = self.height.data
|
||||||
self._obj.depth = self.depth.data
|
self._obj.depth = self.depth.data
|
||||||
self._obj.variant = self.variant.data
|
self._obj.variant = self.variant.data
|
||||||
self._obj.image = self.image.data
|
self._obj.image = URL(self.image.data)
|
||||||
|
|
||||||
if self._obj.type == 'ComputerMonitor':
|
if self._obj.type == 'ComputerMonitor':
|
||||||
self._obj.resolution_width = self.resolution.data
|
self._obj.resolution_width = self.resolution.data
|
||||||
|
@ -610,6 +695,11 @@ class NewDeviceForm(FlaskForm):
|
||||||
if self._obj.type in ['Smartphone', 'Tablet', 'Cellphone']:
|
if self._obj.type in ['Smartphone', 'Tablet', 'Cellphone']:
|
||||||
self._obj.imei = self.imei.data
|
self._obj.imei = self.imei.data
|
||||||
self._obj.meid = self.meid.data
|
self._obj.meid = self.meid.data
|
||||||
|
self._obj.data_storage_size = self.data_storage_size.data
|
||||||
|
|
||||||
|
if self.type.data in ['HardDrive', 'SolidStateDrive']:
|
||||||
|
if self.data_storage_size.data:
|
||||||
|
self._obj.size = self.data_storage_size.data * 1000
|
||||||
|
|
||||||
if (
|
if (
|
||||||
self.appearance.data
|
self.appearance.data
|
||||||
|
@ -623,6 +713,14 @@ class NewDeviceForm(FlaskForm):
|
||||||
):
|
):
|
||||||
self._obj.set_functionality(self.functionality.data)
|
self._obj.set_functionality(self.functionality.data)
|
||||||
|
|
||||||
|
else:
|
||||||
|
self._obj.placeholder.id_device_supplier = (
|
||||||
|
self.id_device_supplier.data or None
|
||||||
|
)
|
||||||
|
self._obj.placeholder.id_device_internal = (
|
||||||
|
self.id_device_internal.data or None
|
||||||
|
)
|
||||||
|
self._obj.placeholder.pallet = self.pallet.data or None
|
||||||
placeholder_log = PlaceholdersLog(
|
placeholder_log = PlaceholdersLog(
|
||||||
type="Update", source='Web form', placeholder=self._obj.placeholder
|
type="Update", source='Web form', placeholder=self._obj.placeholder
|
||||||
)
|
)
|
||||||
|
@ -744,6 +842,9 @@ class ActionFormMixin(FlaskForm):
|
||||||
if not self._devices:
|
if not self._devices:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if len(devices) > 1 and self.type.data == 'EWaste':
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def generic_validation(self, extra_validators=None):
|
def generic_validation(self, extra_validators=None):
|
||||||
|
@ -760,6 +861,17 @@ class ActionFormMixin(FlaskForm):
|
||||||
|
|
||||||
self.populate_obj(self.instance)
|
self.populate_obj(self.instance)
|
||||||
db.session.add(self.instance)
|
db.session.add(self.instance)
|
||||||
|
|
||||||
|
if self.instance.type == 'EWaste':
|
||||||
|
ewaste = EWasteSchema().dump(self.instance)
|
||||||
|
doc = "{}".format(ewaste)
|
||||||
|
self.instance.register_proof(doc)
|
||||||
|
|
||||||
|
if self.instance.type == 'Recycled':
|
||||||
|
recycled = RecycledSchema().dump(self.instance)
|
||||||
|
doc = "{}".format(recycled)
|
||||||
|
self.instance.register_proof(doc)
|
||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
self.devices.data = devices
|
self.devices.data = devices
|
||||||
|
@ -782,7 +894,13 @@ class NewActionForm(ActionFormMixin):
|
||||||
if not is_valid:
|
if not is_valid:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.type.data in ['Allocate', 'Deallocate', 'Trade', 'DataWipe']:
|
if self.type.data in [
|
||||||
|
'Allocate',
|
||||||
|
'Deallocate',
|
||||||
|
'Trade',
|
||||||
|
'DataWipe',
|
||||||
|
'EraseDataWipe',
|
||||||
|
]:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -992,21 +1110,55 @@ class DataWipeDocumentForm(Form):
|
||||||
class DataWipeForm(ActionFormMixin):
|
class DataWipeForm(ActionFormMixin):
|
||||||
document = FormField(DataWipeDocumentForm)
|
document = FormField(DataWipeDocumentForm)
|
||||||
|
|
||||||
|
def validate(self, extra_validators=None):
|
||||||
|
is_valid = super().validate(extra_validators)
|
||||||
|
if not is_valid:
|
||||||
|
return False
|
||||||
|
|
||||||
|
txt = "Error: Only Data Sanitization actions are "
|
||||||
|
txt += "allowed on Placeholders that are of the Data Storage type."
|
||||||
|
for dev in self._devices:
|
||||||
|
if dev.is_abstract() == 'Placeholder':
|
||||||
|
if not (isinstance(dev, DataStorage) or isinstance(dev, Mobile)):
|
||||||
|
messages.error(txt)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return is_valid
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
self.document.form.save(commit=False)
|
self.document.form.save(commit=False)
|
||||||
|
|
||||||
Model = db.Model._decl_class_registry.data[self.type.data]()
|
Model = db.Model._decl_class_registry.data[self.type.data]()
|
||||||
self.instance = Model()
|
self.instance = Model()
|
||||||
devices = self.devices.data
|
devices = self.devices.data
|
||||||
|
if not self.document.success.data:
|
||||||
|
self.severity.data = Severity.Error.name
|
||||||
severity = self.severity.data
|
severity = self.severity.data
|
||||||
self.devices.data = self._devices
|
self.devices.data = self._devices
|
||||||
self.severity.data = Severity[self.severity.data]
|
self.severity.data = Severity[self.severity.data]
|
||||||
|
|
||||||
document = copy.copy(self.document)
|
document = copy.copy(self.document)
|
||||||
del self.document
|
del self.document
|
||||||
self.populate_obj(self.instance)
|
for dev in self._devices:
|
||||||
self.instance.document = document.form._obj
|
ac = None
|
||||||
db.session.add(self.instance)
|
if isinstance(dev, Mobile) or isinstance(dev, DataStorage):
|
||||||
|
ac = Model()
|
||||||
|
self.populate_obj(ac)
|
||||||
|
ac.device_id = dev.id
|
||||||
|
ac.document = document.form._obj
|
||||||
|
db.session.add(ac)
|
||||||
|
continue
|
||||||
|
|
||||||
|
for hd in dev.components:
|
||||||
|
if not isinstance(hd, DataStorage):
|
||||||
|
continue
|
||||||
|
ac = Model()
|
||||||
|
self.populate_obj(ac)
|
||||||
|
ac.parent = dev
|
||||||
|
ac.device = hd
|
||||||
|
ac.device_id = hd.id
|
||||||
|
ac.document = document.form._obj
|
||||||
|
db.session.add(ac)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
self.devices.data = devices
|
self.devices.data = devices
|
||||||
|
@ -1076,7 +1228,6 @@ class TradeForm(ActionFormMixin):
|
||||||
or email_to == email_from
|
or email_to == email_from
|
||||||
or g.user.email not in [email_from, email_to]
|
or g.user.email not in [email_from, email_to]
|
||||||
):
|
):
|
||||||
|
|
||||||
errors = ["If you want confirm, you need a correct email"]
|
errors = ["If you want confirm, you need a correct email"]
|
||||||
self.user_to.errors = errors
|
self.user_to.errors = errors
|
||||||
self.user_from.errors = errors
|
self.user_from.errors = errors
|
||||||
|
@ -1224,8 +1375,24 @@ class TradeDocumentForm(FlaskForm):
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
lot_id = kwargs.pop('lot')
|
lot_id = kwargs.pop('lot')
|
||||||
super().__init__(*args, **kwargs)
|
doc_id = kwargs.pop('document', None)
|
||||||
self._lot = Lot.query.filter(Lot.id == lot_id).one()
|
self._lot = Lot.query.filter(Lot.id == lot_id).one()
|
||||||
|
self._obj = None
|
||||||
|
if doc_id:
|
||||||
|
self._obj = TradeDocument.query.filter_by(
|
||||||
|
id=doc_id, lot=self._lot, owner=g.user
|
||||||
|
).one()
|
||||||
|
kwargs['obj'] = self._obj
|
||||||
|
|
||||||
|
if not self.file_name.args:
|
||||||
|
self.file_name.args = ("File", [validators.DataRequired()])
|
||||||
|
if doc_id:
|
||||||
|
self.file_name.args = ()
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
if self._obj:
|
||||||
|
if isinstance(self.url.data, URL):
|
||||||
|
self.url.data = self.url.data.to_text()
|
||||||
|
|
||||||
if not self._lot.transfer:
|
if not self._lot.transfer:
|
||||||
self.form_errors = ['Error, this lot is not a transfer lot.']
|
self.form_errors = ['Error, this lot is not a transfer lot.']
|
||||||
|
@ -1241,22 +1408,143 @@ class TradeDocumentForm(FlaskForm):
|
||||||
def save(self, commit=True):
|
def save(self, commit=True):
|
||||||
file_name = ''
|
file_name = ''
|
||||||
file_hash = ''
|
file_hash = ''
|
||||||
|
if self._obj:
|
||||||
|
file_name = self._obj.file_name
|
||||||
|
file_hash = self._obj.file_hash
|
||||||
|
|
||||||
if self.file_name.data:
|
if self.file_name.data:
|
||||||
file_name = self.file_name.data.filename
|
file_name = self.file_name.data.filename
|
||||||
file_hash = insert_hash(self.file_name.data.read(), commit=False)
|
file_hash = insert_hash(self.file_name.data.read(), commit=False)
|
||||||
|
|
||||||
self.url.data = URL(self.url.data)
|
self.url.data = URL(self.url.data)
|
||||||
|
if not self._obj:
|
||||||
self._obj = TradeDocument(lot_id=self._lot.id)
|
self._obj = TradeDocument(lot_id=self._lot.id)
|
||||||
|
|
||||||
self.populate_obj(self._obj)
|
self.populate_obj(self._obj)
|
||||||
|
|
||||||
self._obj.file_name = file_name
|
self._obj.file_name = file_name
|
||||||
self._obj.file_hash = file_hash
|
self._obj.file_hash = file_hash
|
||||||
|
|
||||||
|
if not self._obj.id:
|
||||||
db.session.add(self._obj)
|
db.session.add(self._obj)
|
||||||
self._lot.documents.add(self._obj)
|
self._lot.documents.add(self._obj)
|
||||||
|
|
||||||
if commit:
|
if commit:
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
return self._obj
|
return self._obj
|
||||||
|
|
||||||
|
def remove(self):
|
||||||
|
if self._obj:
|
||||||
|
self._obj.delete()
|
||||||
|
db.session.commit()
|
||||||
|
return self._obj
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceDocumentForm(FlaskForm):
|
||||||
|
url = URLField(
|
||||||
|
'Url',
|
||||||
|
[validators.Optional()],
|
||||||
|
render_kw={'class': "form-control"},
|
||||||
|
description="Url where the document resides",
|
||||||
|
)
|
||||||
|
description = StringField(
|
||||||
|
'Description',
|
||||||
|
[validators.Optional()],
|
||||||
|
render_kw={'class': "form-control"},
|
||||||
|
description="",
|
||||||
|
)
|
||||||
|
id_document = StringField(
|
||||||
|
'Document Id',
|
||||||
|
[validators.Optional()],
|
||||||
|
render_kw={'class': "form-control"},
|
||||||
|
description="Identification number of document",
|
||||||
|
)
|
||||||
|
type = SelectField(
|
||||||
|
'Type',
|
||||||
|
[validators.Optional()],
|
||||||
|
choices=TYPES_DOCUMENTS,
|
||||||
|
default="",
|
||||||
|
render_kw={'class': "form-select"},
|
||||||
|
)
|
||||||
|
date = DateField(
|
||||||
|
'Date',
|
||||||
|
[validators.Optional()],
|
||||||
|
render_kw={'class': "form-control"},
|
||||||
|
description="",
|
||||||
|
)
|
||||||
|
file_name = FileField(
|
||||||
|
'File',
|
||||||
|
[validators.DataRequired()],
|
||||||
|
render_kw={'class': "form-control"},
|
||||||
|
description="""This file is not stored on our servers, it is only used to
|
||||||
|
generate a digital signature and obtain the name of the file.""",
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
id = kwargs.pop('dhid')
|
||||||
|
doc_id = kwargs.pop('document', None)
|
||||||
|
self._device = Device.query.filter(Device.devicehub_id == id).first()
|
||||||
|
self._obj = None
|
||||||
|
if doc_id:
|
||||||
|
self._obj = DeviceDocument.query.filter_by(
|
||||||
|
id=doc_id, device=self._device, owner=g.user
|
||||||
|
).one()
|
||||||
|
kwargs['obj'] = self._obj
|
||||||
|
|
||||||
|
if not self.file_name.args:
|
||||||
|
self.file_name.args = ("File", [validators.DataRequired()])
|
||||||
|
if doc_id:
|
||||||
|
self.file_name.args = ()
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
if self._obj:
|
||||||
|
if isinstance(self.url.data, URL):
|
||||||
|
self.url.data = self.url.data.to_text()
|
||||||
|
|
||||||
|
def validate(self, extra_validators=None):
|
||||||
|
is_valid = super().validate(extra_validators)
|
||||||
|
|
||||||
|
if g.user != self._device.owner:
|
||||||
|
is_valid = False
|
||||||
|
|
||||||
|
return is_valid
|
||||||
|
|
||||||
|
def save(self, commit=True):
|
||||||
|
file_name = ''
|
||||||
|
file_hash = ''
|
||||||
|
if self._obj:
|
||||||
|
file_name = self._obj.file_name
|
||||||
|
file_hash = self._obj.file_hash
|
||||||
|
|
||||||
|
if self.file_name.data:
|
||||||
|
file_name = self.file_name.data.filename
|
||||||
|
file_hash = insert_hash(self.file_name.data.read(), commit=False)
|
||||||
|
|
||||||
|
self.url.data = URL(self.url.data)
|
||||||
|
if not self._obj:
|
||||||
|
self._obj = DeviceDocument(device_id=self._device.id)
|
||||||
|
|
||||||
|
self.populate_obj(self._obj)
|
||||||
|
|
||||||
|
self._obj.file_name = file_name
|
||||||
|
self._obj.file_hash = file_hash
|
||||||
|
|
||||||
|
if not self._obj.id:
|
||||||
|
db.session.add(self._obj)
|
||||||
|
# self._device.documents.add(self._obj)
|
||||||
|
|
||||||
|
if commit:
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
return self._obj
|
||||||
|
|
||||||
|
def remove(self):
|
||||||
|
if self._obj:
|
||||||
|
self._obj.delete()
|
||||||
|
db.session.commit()
|
||||||
|
return self._obj
|
||||||
|
|
||||||
|
|
||||||
class TransferForm(FlaskForm):
|
class TransferForm(FlaskForm):
|
||||||
lot_name = StringField(
|
lot_name = StringField(
|
||||||
|
@ -1473,6 +1761,70 @@ class NotesForm(FlaskForm):
|
||||||
return self._obj
|
return self._obj
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerDetailsForm(FlaskForm):
|
||||||
|
company_name = StringField(
|
||||||
|
'Company name',
|
||||||
|
[validators.Optional()],
|
||||||
|
render_kw={'class': "form-control"},
|
||||||
|
description="Name of the company",
|
||||||
|
)
|
||||||
|
location = StringField(
|
||||||
|
'Location',
|
||||||
|
[validators.Optional()],
|
||||||
|
render_kw={'class': "form-control"},
|
||||||
|
description="""Location where is the company""",
|
||||||
|
)
|
||||||
|
logo = URLField(
|
||||||
|
'Logo',
|
||||||
|
[validators.Optional()],
|
||||||
|
render_kw={
|
||||||
|
'class': "form-control",
|
||||||
|
"placeholder": "Url where is the logo - acceptd only .png, .jpg, .gif, svg",
|
||||||
|
},
|
||||||
|
description="Url where is the logo",
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
lot_id = kwargs.pop('lot_id', None)
|
||||||
|
self._tmp_lot = Lot.query.filter(Lot.id == lot_id).one()
|
||||||
|
self._obj = self._tmp_lot.transfer.customer_details
|
||||||
|
if self._obj:
|
||||||
|
kwargs['obj'] = self._obj
|
||||||
|
if not self._obj:
|
||||||
|
self._obj = TransferCustomerDetails(transfer_id=self._tmp_lot.transfer.id)
|
||||||
|
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
if isinstance(self.logo.data, URL):
|
||||||
|
self.logo.data = URL(self.logo.data).to_text()
|
||||||
|
|
||||||
|
def validate(self, extra_validators=None):
|
||||||
|
is_valid = super().validate(extra_validators)
|
||||||
|
|
||||||
|
if not is_valid:
|
||||||
|
return is_valid
|
||||||
|
|
||||||
|
if not self.logo.data:
|
||||||
|
return True
|
||||||
|
|
||||||
|
extensions = ["jpg", "jpeg", "png", "gif", "svg"]
|
||||||
|
if self.logo.data.lower().split(".")[-1] not in extensions:
|
||||||
|
txt = "Error in Url field - accepted only .PNG, .JPG and .GIF. extensions"
|
||||||
|
self.logo.errors = [txt]
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def save(self, commit=True):
|
||||||
|
self.populate_obj(self._obj)
|
||||||
|
self._obj.logo = URL(self._obj.logo)
|
||||||
|
db.session.add(self._obj)
|
||||||
|
|
||||||
|
if commit:
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
return self._obj
|
||||||
|
|
||||||
|
|
||||||
class UploadPlaceholderForm(FlaskForm):
|
class UploadPlaceholderForm(FlaskForm):
|
||||||
type = StringField('Type', [validators.DataRequired()])
|
type = StringField('Type', [validators.DataRequired()])
|
||||||
placeholder_file = FileField(
|
placeholder_file = FileField(
|
||||||
|
@ -1581,7 +1933,6 @@ class UploadPlaceholderForm(FlaskForm):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def save(self, commit=True):
|
def save(self, commit=True):
|
||||||
|
|
||||||
for device, placeholder_log in self.placeholders:
|
for device, placeholder_log in self.placeholders:
|
||||||
db.session.add(device)
|
db.session.add(device)
|
||||||
db.session.add(placeholder_log)
|
db.session.add(placeholder_log)
|
||||||
|
@ -1610,7 +1961,6 @@ class EditPlaceholderForm(FlaskForm):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def save(self, commit=True):
|
def save(self, commit=True):
|
||||||
|
|
||||||
for device in self.placeholders:
|
for device in self.placeholders:
|
||||||
db.session.add(device)
|
db.session.add(device)
|
||||||
|
|
||||||
|
@ -1658,3 +2008,118 @@ class BindingForm(FlaskForm):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class UserTrustsForm(FlaskForm):
|
||||||
|
snapshot_type = SelectField(
|
||||||
|
'',
|
||||||
|
[validators.DataRequired()],
|
||||||
|
choices=[("new_device", "New Device"), ("update", "Update")],
|
||||||
|
default="new_device",
|
||||||
|
render_kw={'class': "form-select"},
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, snapshot_uuid, *args, **kwargs):
|
||||||
|
self.snapshot = Snapshot.query.filter_by(uuid=snapshot_uuid).one()
|
||||||
|
self.device = None
|
||||||
|
if self.snapshot.device:
|
||||||
|
self.device = self.snapshot.device
|
||||||
|
|
||||||
|
self.snapshot_type.kwargs['default'] = self.snapshot.get_new_device()
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def validate(self, extra_validators=None):
|
||||||
|
is_valid = super().validate(extra_validators)
|
||||||
|
|
||||||
|
if not is_valid:
|
||||||
|
txt = ""
|
||||||
|
self.snapthot_type.errors = [txt]
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def unic(self):
|
||||||
|
try:
|
||||||
|
return self._unic
|
||||||
|
except Exception:
|
||||||
|
self._devices = (
|
||||||
|
Device.query.filter_by(
|
||||||
|
hid=self.device.hid, owner=g.user, placeholder=None, active=True
|
||||||
|
)
|
||||||
|
.order_by(Device.updated.asc())
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
|
||||||
|
self._unic = len(self._devices) < 2
|
||||||
|
return self._unic
|
||||||
|
|
||||||
|
def dhids_all_devices(self):
|
||||||
|
self.unic()
|
||||||
|
return ", ".join([x.dhid for x in self._devices][1:])
|
||||||
|
|
||||||
|
def dhid_base(self):
|
||||||
|
self.unic()
|
||||||
|
if not self._devices:
|
||||||
|
return ''
|
||||||
|
return self._devices[0].dhid
|
||||||
|
|
||||||
|
def show(self):
|
||||||
|
if not self.snapshot or not self.device:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not hasattr(self.device, 'system_uuid'):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.device.system_uuid:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.snapshot.get_new_device() == 'update':
|
||||||
|
# To do Split
|
||||||
|
return True
|
||||||
|
|
||||||
|
if not self.unic():
|
||||||
|
if self.device == self._devices[0]:
|
||||||
|
return False
|
||||||
|
# To do merge
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def save(self, commit=True):
|
||||||
|
if not self.show():
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.snapshot_type.data == self.snapshot.get_new_device():
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.snapshot_type.data == 'update' and not self.unic():
|
||||||
|
self.device.reliable()
|
||||||
|
|
||||||
|
if self.snapshot_type.data == 'new_device' and self.unic():
|
||||||
|
self.device.unreliable()
|
||||||
|
txt = "This devices is assigned as unreliable for the user "
|
||||||
|
txt += "and never is possible to do an update of this device."
|
||||||
|
self.error_log(txt)
|
||||||
|
|
||||||
|
if commit:
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
return self.snapshot
|
||||||
|
|
||||||
|
def error_log(self, txt):
|
||||||
|
snapshot = self.get_first_snapshot()
|
||||||
|
error = SnapshotsLog(
|
||||||
|
description=txt,
|
||||||
|
snapshot=snapshot,
|
||||||
|
snapshot_uuid=snapshot.uuid,
|
||||||
|
severity=Severity.Error,
|
||||||
|
sid=snapshot.sid,
|
||||||
|
version="{}".format(snapshot.version),
|
||||||
|
)
|
||||||
|
db.session.add(error)
|
||||||
|
|
||||||
|
def get_first_snapshot(self):
|
||||||
|
device = self.snapshot.device
|
||||||
|
for ac in device.actions:
|
||||||
|
if ac.type == 'Snapshot':
|
||||||
|
return ac
|
||||||
|
|
|
@ -1,15 +1,17 @@
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from citext import CIText
|
from citext import CIText
|
||||||
|
from dateutil.tz import tzutc
|
||||||
from flask import g
|
from flask import g
|
||||||
from sqlalchemy import Column, Integer
|
from sortedcontainers import SortedSet
|
||||||
|
from sqlalchemy import BigInteger, Column, Integer
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
from sqlalchemy.orm import backref, relationship
|
from sqlalchemy.orm import backref, relationship
|
||||||
from teal.db import CASCADE_OWN
|
|
||||||
|
|
||||||
from ereuse_devicehub.db import db
|
from ereuse_devicehub.db import db
|
||||||
from ereuse_devicehub.resources.models import Thing
|
from ereuse_devicehub.resources.models import Thing
|
||||||
from ereuse_devicehub.resources.user.models import User
|
from ereuse_devicehub.resources.user.models import User
|
||||||
|
from ereuse_devicehub.teal.db import CASCADE_OWN, URL
|
||||||
|
|
||||||
|
|
||||||
class Transfer(Thing):
|
class Transfer(Thing):
|
||||||
|
@ -90,3 +92,70 @@ class ReceiverNote(Thing):
|
||||||
backref=backref('receiver_note', lazy=True, uselist=False, cascade=CASCADE_OWN),
|
backref=backref('receiver_note', lazy=True, uselist=False, cascade=CASCADE_OWN),
|
||||||
primaryjoin='ReceiverNote.transfer_id == Transfer.id',
|
primaryjoin='ReceiverNote.transfer_id == Transfer.id',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TransferCustomerDetails(Thing):
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||||
|
company_name = Column(CIText(), nullable=True)
|
||||||
|
location = Column(CIText(), nullable=True)
|
||||||
|
logo = Column(URL(), nullable=True)
|
||||||
|
|
||||||
|
transfer_id = db.Column(
|
||||||
|
UUID(as_uuid=True),
|
||||||
|
db.ForeignKey('transfer.id'),
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
transfer = relationship(
|
||||||
|
'Transfer',
|
||||||
|
backref=backref(
|
||||||
|
'customer_details', lazy=True, uselist=False, cascade=CASCADE_OWN
|
||||||
|
),
|
||||||
|
primaryjoin='TransferCustomerDetails.transfer_id == Transfer.id',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_sorted_documents = {
|
||||||
|
'order_by': lambda: DeviceDocument.created,
|
||||||
|
'collection_class': SortedSet,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceDocument(Thing):
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||||
|
type = Column(db.CIText(), nullable=True)
|
||||||
|
date = Column(db.DateTime, nullable=True)
|
||||||
|
id_document = Column(db.CIText(), nullable=True)
|
||||||
|
description = Column(db.CIText(), nullable=True)
|
||||||
|
owner_id = db.Column(
|
||||||
|
UUID(as_uuid=True),
|
||||||
|
db.ForeignKey(User.id),
|
||||||
|
nullable=False,
|
||||||
|
default=lambda: g.user.id,
|
||||||
|
)
|
||||||
|
owner = db.relationship(User, primaryjoin=owner_id == User.id)
|
||||||
|
device_id = db.Column(BigInteger, db.ForeignKey('device.id'), nullable=False)
|
||||||
|
device = db.relationship(
|
||||||
|
'Device',
|
||||||
|
primaryjoin='DeviceDocument.device_id == Device.id',
|
||||||
|
backref=backref(
|
||||||
|
'documents', lazy=True, cascade=CASCADE_OWN, **_sorted_documents
|
||||||
|
),
|
||||||
|
)
|
||||||
|
file_name = Column(db.CIText(), nullable=True)
|
||||||
|
file_hash = Column(db.CIText(), nullable=True)
|
||||||
|
url = db.Column(URL(), nullable=True)
|
||||||
|
|
||||||
|
# __table_args__ = (
|
||||||
|
# db.Index('document_id', id, postgresql_using='hash'),
|
||||||
|
# db.Index('type_doc', type, postgresql_using='hash')
|
||||||
|
# )
|
||||||
|
|
||||||
|
def get_url(self) -> str:
|
||||||
|
if self.url:
|
||||||
|
return self.url.to_text()
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.created.replace(tzinfo=tzutc()) < other.created.replace(
|
||||||
|
tzinfo=tzutc()
|
||||||
|
)
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
import copy
|
import copy
|
||||||
import csv
|
import csv
|
||||||
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import uuid
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
@ -12,6 +14,7 @@ from flask import current_app as app
|
||||||
from flask import g, make_response, request, url_for
|
from flask import g, make_response, request, url_for
|
||||||
from flask.views import View
|
from flask.views import View
|
||||||
from flask_login import current_user, login_required
|
from flask_login import current_user, login_required
|
||||||
|
from sqlalchemy import or_
|
||||||
from werkzeug.exceptions import NotFound
|
from werkzeug.exceptions import NotFound
|
||||||
|
|
||||||
from ereuse_devicehub import messages
|
from ereuse_devicehub import messages
|
||||||
|
@ -20,7 +23,9 @@ from ereuse_devicehub.inventory.forms import (
|
||||||
AdvancedSearchForm,
|
AdvancedSearchForm,
|
||||||
AllocateForm,
|
AllocateForm,
|
||||||
BindingForm,
|
BindingForm,
|
||||||
|
CustomerDetailsForm,
|
||||||
DataWipeForm,
|
DataWipeForm,
|
||||||
|
DeviceDocumentForm,
|
||||||
EditTransferForm,
|
EditTransferForm,
|
||||||
FilterForm,
|
FilterForm,
|
||||||
LotForm,
|
LotForm,
|
||||||
|
@ -33,6 +38,7 @@ from ereuse_devicehub.inventory.forms import (
|
||||||
TransferForm,
|
TransferForm,
|
||||||
UploadPlaceholderForm,
|
UploadPlaceholderForm,
|
||||||
UploadSnapshotForm,
|
UploadSnapshotForm,
|
||||||
|
UserTrustsForm,
|
||||||
)
|
)
|
||||||
from ereuse_devicehub.labels.forms import PrintLabelsForm
|
from ereuse_devicehub.labels.forms import PrintLabelsForm
|
||||||
from ereuse_devicehub.parser.models import PlaceholdersLog, SnapshotsLog
|
from ereuse_devicehub.parser.models import PlaceholdersLog, SnapshotsLog
|
||||||
|
@ -41,12 +47,13 @@ from ereuse_devicehub.resources.device.models import (
|
||||||
Computer,
|
Computer,
|
||||||
DataStorage,
|
DataStorage,
|
||||||
Device,
|
Device,
|
||||||
|
Mobile,
|
||||||
Placeholder,
|
Placeholder,
|
||||||
)
|
)
|
||||||
from ereuse_devicehub.resources.documents.device_row import ActionRow, DeviceRow
|
from ereuse_devicehub.resources.documents.device_row import ActionRow, DeviceRow
|
||||||
from ereuse_devicehub.resources.enums import SnapshotSoftware
|
from ereuse_devicehub.resources.enums import SnapshotSoftware
|
||||||
from ereuse_devicehub.resources.hash_reports import insert_hash
|
from ereuse_devicehub.resources.hash_reports import insert_hash
|
||||||
from ereuse_devicehub.resources.lot.models import Lot
|
from ereuse_devicehub.resources.lot.models import Lot, ShareLot
|
||||||
from ereuse_devicehub.resources.tag.model import Tag
|
from ereuse_devicehub.resources.tag.model import Tag
|
||||||
from ereuse_devicehub.views import GenericMixin
|
from ereuse_devicehub.views import GenericMixin
|
||||||
|
|
||||||
|
@ -55,26 +62,44 @@ devices = Blueprint('inventory', __name__, url_prefix='/inventory')
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
PER_PAGE = 20
|
||||||
|
|
||||||
|
|
||||||
class DeviceListMixin(GenericMixin):
|
class DeviceListMixin(GenericMixin):
|
||||||
template_name = 'inventory/device_list.html'
|
template_name = 'inventory/device_list.html'
|
||||||
|
|
||||||
def get_context(self, lot_id=None, all_devices=False):
|
def get_context(self, lot_id=None, all_devices=False):
|
||||||
super().get_context()
|
super().get_context()
|
||||||
|
|
||||||
lots = self.context['lots']
|
page = int(request.args.get('page', 1))
|
||||||
form_filter = FilterForm(lots, lot_id, all_devices=all_devices)
|
per_page = int(request.args.get('per_page', PER_PAGE))
|
||||||
devices = form_filter.search()
|
filter = request.args.get('filter', "All+Computers")
|
||||||
|
|
||||||
lot = None
|
lot = None
|
||||||
|
|
||||||
|
share_lots = self.context['share_lots']
|
||||||
|
share_lot = share_lots.filter_by(lot_id=lot_id).first()
|
||||||
|
if share_lot:
|
||||||
|
lot = share_lot.lot
|
||||||
|
|
||||||
|
lots = self.context['lots']
|
||||||
|
form_filter = FilterForm(lots, lot, lot_id, all_devices=all_devices)
|
||||||
|
devices = form_filter.search().paginate(page=page, per_page=per_page)
|
||||||
|
devices.first = per_page * devices.page - per_page + 1
|
||||||
|
devices.last = len(devices.items) + devices.first - 1
|
||||||
|
|
||||||
form_transfer = ''
|
form_transfer = ''
|
||||||
form_delivery = ''
|
form_delivery = ''
|
||||||
form_receiver = ''
|
form_receiver = ''
|
||||||
|
form_customer_details = ''
|
||||||
|
|
||||||
if lot_id:
|
if lot_id and not lot:
|
||||||
lot = lots.filter(Lot.id == lot_id).one()
|
lot = lots.filter(Lot.id == lot_id).one()
|
||||||
if not lot.is_temporary and lot.transfer:
|
if not lot.is_temporary and lot.transfer:
|
||||||
form_transfer = EditTransferForm(lot_id=lot.id)
|
form_transfer = EditTransferForm(lot_id=lot.id)
|
||||||
form_delivery = NotesForm(lot_id=lot.id, type='Delivery')
|
form_delivery = NotesForm(lot_id=lot.id, type='Delivery')
|
||||||
form_receiver = NotesForm(lot_id=lot.id, type='Receiver')
|
form_receiver = NotesForm(lot_id=lot.id, type='Receiver')
|
||||||
|
form_customer_details = CustomerDetailsForm(lot_id=lot.id)
|
||||||
|
|
||||||
form_new_action = NewActionForm(lot=lot_id)
|
form_new_action = NewActionForm(lot=lot_id)
|
||||||
self.context.update(
|
self.context.update(
|
||||||
|
@ -86,12 +111,15 @@ class DeviceListMixin(GenericMixin):
|
||||||
'form_transfer': form_transfer,
|
'form_transfer': form_transfer,
|
||||||
'form_delivery': form_delivery,
|
'form_delivery': form_delivery,
|
||||||
'form_receiver': form_receiver,
|
'form_receiver': form_receiver,
|
||||||
|
'form_customer_details': form_customer_details,
|
||||||
'form_filter': form_filter,
|
'form_filter': form_filter,
|
||||||
'form_print_labels': PrintLabelsForm(),
|
'form_print_labels': PrintLabelsForm(),
|
||||||
'lot': lot,
|
'lot': lot,
|
||||||
'tags': self.get_user_tags(),
|
'tags': self.get_user_tags(),
|
||||||
'list_devices': self.get_selected_devices(form_new_action),
|
'list_devices': self.get_selected_devices(form_new_action),
|
||||||
'all_devices': all_devices,
|
'all_devices': all_devices,
|
||||||
|
'filter': filter,
|
||||||
|
'share_lots': share_lots,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -115,15 +143,43 @@ class DeviceListMixin(GenericMixin):
|
||||||
class ErasureListView(DeviceListMixin):
|
class ErasureListView(DeviceListMixin):
|
||||||
template_name = 'inventory/erasure_list.html'
|
template_name = 'inventory/erasure_list.html'
|
||||||
|
|
||||||
def dispatch_request(self):
|
def dispatch_request(self, orphans=0):
|
||||||
self.get_context()
|
self.get_context()
|
||||||
self.get_devices()
|
self.get_devices(orphans)
|
||||||
return flask.render_template(self.template_name, **self.context)
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
def get_devices(self):
|
def get_devices(self, orphans):
|
||||||
|
page = int(request.args.get('page', 1))
|
||||||
|
per_page = int(request.args.get('per_page', PER_PAGE))
|
||||||
|
|
||||||
erasure = EraseBasic.query.filter_by(author=g.user).order_by(
|
erasure = EraseBasic.query.filter_by(author=g.user).order_by(
|
||||||
EraseBasic.created.desc()
|
EraseBasic.created.desc()
|
||||||
)
|
)
|
||||||
|
if orphans:
|
||||||
|
schema = app.config.get('SCHEMA')
|
||||||
|
_user = g.user.id
|
||||||
|
sql = f"""
|
||||||
|
select action.id from {schema}.action as action
|
||||||
|
inner join {schema}.erase_basic as erase
|
||||||
|
on action.id=erase.id
|
||||||
|
inner join {schema}.device as device
|
||||||
|
on device.id=action.parent_id
|
||||||
|
inner join {schema}.placeholder as placeholder
|
||||||
|
on placeholder.binding_id=device.id
|
||||||
|
where (action.parent_id is null or placeholder.kangaroo=true)
|
||||||
|
and action.author_id='{_user}'
|
||||||
|
"""
|
||||||
|
ids = (e[0] for e in db.session.execute(sql))
|
||||||
|
erasure = (
|
||||||
|
EraseBasic.query.filter(EraseBasic.id.in_(ids))
|
||||||
|
.filter_by(author=g.user)
|
||||||
|
.order_by(EraseBasic.created.desc())
|
||||||
|
)
|
||||||
|
self.context['orphans'] = True
|
||||||
|
|
||||||
|
erasure = erasure.paginate(page=page, per_page=per_page)
|
||||||
|
erasure.first = per_page * erasure.page - per_page + 1
|
||||||
|
erasure.last = len(erasure.items) + erasure.first - 1
|
||||||
self.context['erasure'] = erasure
|
self.context['erasure'] = erasure
|
||||||
|
|
||||||
|
|
||||||
|
@ -166,10 +222,14 @@ class DeviceDetailView(GenericMixin):
|
||||||
)
|
)
|
||||||
|
|
||||||
form_tags = TagDeviceForm(dhid=id)
|
form_tags = TagDeviceForm(dhid=id)
|
||||||
|
placeholder = device.binding or device.placeholder
|
||||||
|
if not placeholder:
|
||||||
|
return NotFound()
|
||||||
|
|
||||||
self.context.update(
|
self.context.update(
|
||||||
{
|
{
|
||||||
'device': device,
|
'device': device,
|
||||||
'placeholder': device.binding or device.placeholder,
|
'placeholder': placeholder,
|
||||||
'page_title': 'Device {}'.format(device.devicehub_id),
|
'page_title': 'Device {}'.format(device.devicehub_id),
|
||||||
'form_tag_device': form_tags,
|
'form_tag_device': form_tags,
|
||||||
}
|
}
|
||||||
|
@ -296,6 +356,8 @@ class BindingView(GenericMixin):
|
||||||
self.real_phid = self.new_placeholder.phid
|
self.real_phid = self.new_placeholder.phid
|
||||||
self.abstract_dhid = self.old_device.devicehub_id
|
self.abstract_dhid = self.old_device.devicehub_id
|
||||||
self.abstract_phid = self.old_placeholder.phid
|
self.abstract_phid = self.old_placeholder.phid
|
||||||
|
if self.old_placeholder.kangaroo:
|
||||||
|
self.new_placeholder.kangaroo = True
|
||||||
|
|
||||||
# to do a backup of abstract_dhid and abstract_phid in
|
# to do a backup of abstract_dhid and abstract_phid in
|
||||||
# workbench device
|
# workbench device
|
||||||
|
@ -380,9 +442,14 @@ class UnBindingView(GenericMixin):
|
||||||
return flask.render_template(self.template_name, **self.context)
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
def clone_device(self, device):
|
def clone_device(self, device):
|
||||||
if device.binding.is_abstract:
|
if device.binding and device.binding.is_abstract:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
kangaroo = False
|
||||||
|
if device.binding:
|
||||||
|
kangaroo = device.binding.kangaroo
|
||||||
|
device.binding.kangaroo = False
|
||||||
|
|
||||||
dict_device = copy.copy(device.__dict__)
|
dict_device = copy.copy(device.__dict__)
|
||||||
dict_device.pop('_sa_instance_state')
|
dict_device.pop('_sa_instance_state')
|
||||||
dict_device.pop('id', None)
|
dict_device.pop('id', None)
|
||||||
|
@ -401,8 +468,14 @@ class UnBindingView(GenericMixin):
|
||||||
for c in device.components:
|
for c in device.components:
|
||||||
if c.binding:
|
if c.binding:
|
||||||
c.binding.device.parent = new_device
|
c.binding.device.parent = new_device
|
||||||
|
else:
|
||||||
|
new_c = self.clone_device(c)
|
||||||
|
new_c.parent = new_device
|
||||||
|
|
||||||
|
placeholder = Placeholder(
|
||||||
|
device=new_device, binding=device, is_abstract=True, kangaroo=kangaroo
|
||||||
|
)
|
||||||
|
|
||||||
placeholder = Placeholder(device=new_device, binding=device, is_abstract=True)
|
|
||||||
if (
|
if (
|
||||||
device.dhid_bk
|
device.dhid_bk
|
||||||
and not Device.query.filter_by(devicehub_id=device.dhid_bk).first()
|
and not Device.query.filter_by(devicehub_id=device.dhid_bk).first()
|
||||||
|
@ -473,8 +546,9 @@ class LotDeleteView(View):
|
||||||
|
|
||||||
def dispatch_request(self, id):
|
def dispatch_request(self, id):
|
||||||
form = LotForm(id=id)
|
form = LotForm(id=id)
|
||||||
if form.instance.trade:
|
shared = ShareLot.query.filter_by(lot=form.instance).first()
|
||||||
msg = "Sorry, the lot cannot be deleted because have a trade action "
|
if form.instance.trade or shared:
|
||||||
|
msg = "Sorry, the lot cannot be deleted because this lot is share"
|
||||||
messages.error(msg)
|
messages.error(msg)
|
||||||
next_url = url_for('inventory.lotdevicelist', lot_id=id)
|
next_url = url_for('inventory.lotdevicelist', lot_id=id)
|
||||||
return flask.redirect(next_url)
|
return flask.redirect(next_url)
|
||||||
|
@ -484,6 +558,27 @@ class LotDeleteView(View):
|
||||||
return flask.redirect(next_url)
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentDeleteView(View):
|
||||||
|
methods = ['GET']
|
||||||
|
decorators = [login_required]
|
||||||
|
template_name = 'inventory/device_list.html'
|
||||||
|
form_class = TradeDocumentForm
|
||||||
|
|
||||||
|
def dispatch_request(self, lot_id, doc_id):
|
||||||
|
next_url = url_for('inventory.lotdevicelist', lot_id=lot_id)
|
||||||
|
form = self.form_class(lot=lot_id, document=doc_id)
|
||||||
|
try:
|
||||||
|
form.remove()
|
||||||
|
except Exception as err:
|
||||||
|
msg = "{}".format(err)
|
||||||
|
messages.error(msg)
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
msg = "Document removed successfully."
|
||||||
|
messages.success(msg)
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
|
||||||
class UploadSnapshotView(GenericMixin):
|
class UploadSnapshotView(GenericMixin):
|
||||||
methods = ['GET', 'POST']
|
methods = ['GET', 'POST']
|
||||||
decorators = [login_required]
|
decorators = [login_required]
|
||||||
|
@ -726,6 +821,69 @@ class NewTradeView(DeviceListMixin, NewActionView):
|
||||||
return flask.redirect(next_url)
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
|
||||||
|
class NewDeviceDocumentView(GenericMixin):
|
||||||
|
methods = ['POST', 'GET']
|
||||||
|
decorators = [login_required]
|
||||||
|
template_name = 'inventory/device_document.html'
|
||||||
|
form_class = DeviceDocumentForm
|
||||||
|
title = "Add new document"
|
||||||
|
|
||||||
|
def dispatch_request(self, dhid):
|
||||||
|
self.form = self.form_class(dhid=dhid)
|
||||||
|
self.get_context()
|
||||||
|
|
||||||
|
if self.form.validate_on_submit():
|
||||||
|
self.form.save()
|
||||||
|
messages.success('Document created successfully!')
|
||||||
|
next_url = url_for('inventory.device_details', id=dhid)
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
self.context.update({'form': self.form, 'title': self.title})
|
||||||
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
|
|
||||||
|
class EditDeviceDocumentView(GenericMixin):
|
||||||
|
decorators = [login_required]
|
||||||
|
methods = ['POST', 'GET']
|
||||||
|
template_name = 'inventory/device_document.html'
|
||||||
|
form_class = DeviceDocumentForm
|
||||||
|
title = "Edit document"
|
||||||
|
|
||||||
|
def dispatch_request(self, dhid, doc_id):
|
||||||
|
self.form = self.form_class(dhid=dhid, document=doc_id)
|
||||||
|
self.get_context()
|
||||||
|
|
||||||
|
if self.form.validate_on_submit():
|
||||||
|
self.form.save()
|
||||||
|
messages.success('Edit document successfully!')
|
||||||
|
next_url = url_for('inventory.device_details', id=dhid)
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
self.context.update({'form': self.form, 'title': self.title})
|
||||||
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceDocumentDeleteView(View):
|
||||||
|
methods = ['GET']
|
||||||
|
decorators = [login_required]
|
||||||
|
template_name = 'inventory/device_detail.html'
|
||||||
|
form_class = DeviceDocumentForm
|
||||||
|
|
||||||
|
def dispatch_request(self, dhid, doc_id):
|
||||||
|
self.form = self.form_class(dhid=dhid, document=doc_id)
|
||||||
|
next_url = url_for('inventory.device_details', id=dhid)
|
||||||
|
try:
|
||||||
|
self.form.remove()
|
||||||
|
except Exception as err:
|
||||||
|
msg = "{}".format(err)
|
||||||
|
messages.error(msg)
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
msg = "Document removed successfully."
|
||||||
|
messages.success(msg)
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
|
||||||
class NewTradeDocumentView(GenericMixin):
|
class NewTradeDocumentView(GenericMixin):
|
||||||
methods = ['POST', 'GET']
|
methods = ['POST', 'GET']
|
||||||
decorators = [login_required]
|
decorators = [login_required]
|
||||||
|
@ -747,6 +905,27 @@ class NewTradeDocumentView(GenericMixin):
|
||||||
return flask.render_template(self.template_name, **self.context)
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
|
|
||||||
|
class EditTransferDocumentView(GenericMixin):
|
||||||
|
decorators = [login_required]
|
||||||
|
methods = ['POST', 'GET']
|
||||||
|
template_name = 'inventory/trade_document.html'
|
||||||
|
form_class = TradeDocumentForm
|
||||||
|
title = "Edit document"
|
||||||
|
|
||||||
|
def dispatch_request(self, lot_id, doc_id):
|
||||||
|
self.form = self.form_class(lot=lot_id, document=doc_id)
|
||||||
|
self.get_context()
|
||||||
|
|
||||||
|
if self.form.validate_on_submit():
|
||||||
|
self.form.save()
|
||||||
|
messages.success('Edit document successfully!')
|
||||||
|
next_url = url_for('inventory.lotdevicelist', lot_id=lot_id)
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
self.context.update({'form': self.form, 'title': self.title})
|
||||||
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
|
|
||||||
class NewTransferView(GenericMixin):
|
class NewTransferView(GenericMixin):
|
||||||
methods = ['POST', 'GET']
|
methods = ['POST', 'GET']
|
||||||
template_name = 'inventory/new_transfer.html'
|
template_name = 'inventory/new_transfer.html'
|
||||||
|
@ -778,6 +957,21 @@ class NewTransferView(GenericMixin):
|
||||||
return flask.render_template(self.template_name, **self.context)
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
|
|
||||||
|
class OpenTransferView(GenericMixin):
|
||||||
|
methods = ['GET']
|
||||||
|
|
||||||
|
def dispatch_request(self, lot_id=None):
|
||||||
|
lot = Lot.query.filter_by(id=lot_id).one()
|
||||||
|
next_url = url_for('inventory.lotdevicelist', lot_id=str(lot_id))
|
||||||
|
|
||||||
|
if hasattr(lot, 'transfer'):
|
||||||
|
lot.transfer.date = None
|
||||||
|
db.session.commit()
|
||||||
|
messages.success('Transfer was reopen successfully!')
|
||||||
|
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
|
||||||
class EditTransferView(GenericMixin):
|
class EditTransferView(GenericMixin):
|
||||||
methods = ['POST']
|
methods = ['POST']
|
||||||
form_class = EditTransferForm
|
form_class = EditTransferForm
|
||||||
|
@ -812,6 +1006,7 @@ class ExportsView(View):
|
||||||
'certificates': self.erasure,
|
'certificates': self.erasure,
|
||||||
'lots': self.lots_export,
|
'lots': self.lots_export,
|
||||||
'devices_lots': self.devices_lots_export,
|
'devices_lots': self.devices_lots_export,
|
||||||
|
'obada_standard': self.obada_standard_export,
|
||||||
'snapshot': self.snapshot,
|
'snapshot': self.snapshot,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -820,9 +1015,20 @@ class ExportsView(View):
|
||||||
return export_ids[export_id]()
|
return export_ids[export_id]()
|
||||||
|
|
||||||
def find_devices(self):
|
def find_devices(self):
|
||||||
|
sql = """
|
||||||
|
select lot_device.device_id as id from {schema}.share_lot as share
|
||||||
|
inner join {schema}.lot_device as lot_device
|
||||||
|
on share.lot_id=lot_device.lot_id
|
||||||
|
where share.user_to_id='{user_id}'
|
||||||
|
""".format(
|
||||||
|
schema=app.config.get('SCHEMA'), user_id=g.user.id
|
||||||
|
)
|
||||||
|
|
||||||
|
shared = (x[0] for x in db.session.execute(sql))
|
||||||
|
|
||||||
args = request.args.get('ids')
|
args = request.args.get('ids')
|
||||||
ids = args.split(',') if args else []
|
ids = args.split(',') if args else []
|
||||||
query = Device.query.filter(Device.owner == g.user)
|
query = Device.query.filter(or_(Device.owner == g.user, Device.id.in_(shared)))
|
||||||
return query.filter(Device.devicehub_id.in_(ids))
|
return query.filter(Device.devicehub_id.in_(ids))
|
||||||
|
|
||||||
def response_csv(self, data, name):
|
def response_csv(self, data, name):
|
||||||
|
@ -855,6 +1061,34 @@ class ExportsView(View):
|
||||||
|
|
||||||
return self.response_csv(data, "export.csv")
|
return self.response_csv(data, "export.csv")
|
||||||
|
|
||||||
|
def obada_standard_export(self):
|
||||||
|
"""Get device information for Obada Standard."""
|
||||||
|
data = StringIO()
|
||||||
|
cw = csv.writer(
|
||||||
|
data,
|
||||||
|
delimiter=',',
|
||||||
|
lineterminator="\n",
|
||||||
|
quotechar='',
|
||||||
|
quoting=csv.QUOTE_NONE,
|
||||||
|
)
|
||||||
|
|
||||||
|
cw.writerow(['Manufacturer', 'Model', 'Serial Number'])
|
||||||
|
|
||||||
|
for device in self.find_devices():
|
||||||
|
if device.placeholder:
|
||||||
|
if not device.placeholder.binding:
|
||||||
|
continue
|
||||||
|
device = device.placeholder.binding
|
||||||
|
|
||||||
|
d = [
|
||||||
|
device.manufacturer,
|
||||||
|
device.model,
|
||||||
|
device.serial_number,
|
||||||
|
]
|
||||||
|
cw.writerow(d)
|
||||||
|
|
||||||
|
return self.response_csv(data, "obada_standard.csv")
|
||||||
|
|
||||||
def metrics(self):
|
def metrics(self):
|
||||||
"""Get device query and put information in csv format."""
|
"""Get device query and put information in csv format."""
|
||||||
data = StringIO()
|
data = StringIO()
|
||||||
|
@ -922,6 +1156,7 @@ class ExportsView(View):
|
||||||
cw.writerow(
|
cw.writerow(
|
||||||
[
|
[
|
||||||
'Data Storage Serial',
|
'Data Storage Serial',
|
||||||
|
'DHID',
|
||||||
'Snapshot ID',
|
'Snapshot ID',
|
||||||
'Type of Erasure',
|
'Type of Erasure',
|
||||||
'PHID Erasure Host',
|
'PHID Erasure Host',
|
||||||
|
@ -941,9 +1176,10 @@ class ExportsView(View):
|
||||||
for ac in query:
|
for ac in query:
|
||||||
row = [
|
row = [
|
||||||
ac.device.serial_number.upper(),
|
ac.device.serial_number.upper(),
|
||||||
ac.snapshot.uuid,
|
ac.device.dhid,
|
||||||
|
ac.snapshot.uuid if ac.snapshot else '',
|
||||||
ac.type,
|
ac.type,
|
||||||
ac.get_phid(),
|
ac.parent.phid() if ac.parent else '',
|
||||||
ac.severity,
|
ac.severity,
|
||||||
ac.created.strftime('%Y-%m-%d %H:%M:%S'),
|
ac.created.strftime('%Y-%m-%d %H:%M:%S'),
|
||||||
]
|
]
|
||||||
|
@ -951,22 +1187,126 @@ class ExportsView(View):
|
||||||
|
|
||||||
return self.response_csv(data, "Erasures.csv")
|
return self.response_csv(data, "Erasures.csv")
|
||||||
|
|
||||||
def build_erasure_certificate(self):
|
def get_datastorages(self):
|
||||||
erasures = []
|
erasures = []
|
||||||
for device in self.find_devices():
|
for device in self.find_devices():
|
||||||
if device.placeholder and device.placeholder.binding:
|
if device.placeholder and device.placeholder.binding:
|
||||||
device = device.placeholder.binding
|
device = device.placeholder.binding
|
||||||
if isinstance(device, Computer):
|
if isinstance(device, Computer):
|
||||||
for privacy in device.privacy:
|
for ac in device.last_erase_action:
|
||||||
erasures.append(privacy)
|
erasures.append(ac)
|
||||||
elif isinstance(device, DataStorage):
|
elif isinstance(device, DataStorage):
|
||||||
if device.privacy:
|
ac = device.last_erase_action
|
||||||
erasures.append(device.privacy)
|
if ac:
|
||||||
|
erasures.append(ac)
|
||||||
|
elif isinstance(device, Mobile):
|
||||||
|
ac = device.last_erase_action
|
||||||
|
if ac:
|
||||||
|
erasures.append(ac)
|
||||||
|
return erasures
|
||||||
|
|
||||||
|
def get_costum_details(self, erasures):
|
||||||
|
my_data = None
|
||||||
|
customer_details = None
|
||||||
|
lot = None
|
||||||
|
|
||||||
|
if hasattr(g.user, 'sanitization_entity'):
|
||||||
|
my_data = g.user.sanitization_entity
|
||||||
|
|
||||||
|
customer_details = self.get_customer_details_from_request()
|
||||||
|
|
||||||
|
if not erasures or customer_details:
|
||||||
|
return my_data, customer_details
|
||||||
|
|
||||||
|
lots = {erasures[0].device.get_last_incoming_lot()}
|
||||||
|
for e in erasures[1:]:
|
||||||
|
lots.add(e.device.get_last_incoming_lot())
|
||||||
|
|
||||||
|
if len(lots) != 1:
|
||||||
|
return my_data, customer_details
|
||||||
|
|
||||||
|
lot = lots.pop()
|
||||||
|
try:
|
||||||
|
customer_details = lot.transfer.customer_details
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return my_data, customer_details
|
||||||
|
|
||||||
|
def get_customer_details_from_request(self):
|
||||||
|
try:
|
||||||
|
if len(request.referrer.split('/lot/')) < 2:
|
||||||
|
return
|
||||||
|
|
||||||
|
lot_id = request.referrer.split('/lot/')[-1].split('/')[0]
|
||||||
|
lot = Lot.query.filter_by(owner=g.user).filter_by(id=lot_id).first()
|
||||||
|
return lot.transfer.customer_details
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_server_erasure_hosts(self, erasures):
|
||||||
|
erasures_host = []
|
||||||
|
erasures_mobile = []
|
||||||
|
erasures_on_server = []
|
||||||
|
for erase in erasures:
|
||||||
|
try:
|
||||||
|
if isinstance(erase.device, Mobile):
|
||||||
|
erasures_mobile.append(erase.device)
|
||||||
|
continue
|
||||||
|
if erase.parent.binding.kangaroo:
|
||||||
|
erasures_host.append(erase.parent)
|
||||||
|
erasures_on_server.append(erase)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return erasures_host, erasures_on_server, erasures_mobile
|
||||||
|
|
||||||
|
def build_erasure_certificate(self):
|
||||||
|
erasures = self.get_datastorages()
|
||||||
|
software = 'USODY DRIVE ERASURE'
|
||||||
|
if erasures and erasures[0].snapshot:
|
||||||
|
software += ' {}'.format(
|
||||||
|
erasures[0].snapshot.version,
|
||||||
|
)
|
||||||
|
|
||||||
|
my_data, customer_details = self.get_costum_details(erasures)
|
||||||
|
|
||||||
|
a, b, c = self.get_server_erasure_hosts(erasures)
|
||||||
|
erasures_host, erasures_on_server, erasures_mobile = a, b, c
|
||||||
|
erasures_host = set(erasures_host)
|
||||||
|
erasures_mobile = set(erasures_mobile)
|
||||||
|
|
||||||
|
result_success = 0
|
||||||
|
result_failed = 0
|
||||||
|
for e in erasures:
|
||||||
|
result = e.severity.get_public_name()
|
||||||
|
if "Failed" == result:
|
||||||
|
result_failed += 1
|
||||||
|
if "Success" == result:
|
||||||
|
result_success += 1
|
||||||
|
|
||||||
|
erasures = sorted(erasures, key=lambda x: x.end_time)
|
||||||
|
erasures_on_server = sorted(erasures_on_server, key=lambda x: x.end_time)
|
||||||
|
erasures_normal = list(set(erasures) - set(erasures_on_server))
|
||||||
|
erasures_normal = sorted(erasures_normal, key=lambda x: x.end_time)
|
||||||
|
n_computers = len({x.parent for x in erasures if x.parent} - erasures_host)
|
||||||
|
n_mobiles = len(erasures_mobile)
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
'title': 'Erasure Certificate',
|
'title': 'Device Sanitization',
|
||||||
'erasures': tuple(erasures),
|
'erasures': tuple(erasures),
|
||||||
'url_pdf': '',
|
'url_pdf': '',
|
||||||
|
'date_report': '{:%c}'.format(datetime.datetime.now()),
|
||||||
|
'uuid_report': '{}'.format(uuid.uuid4()),
|
||||||
|
'software': software,
|
||||||
|
'my_data': my_data,
|
||||||
|
'n_computers': n_computers,
|
||||||
|
'n_mobiles': n_mobiles,
|
||||||
|
'result_success': result_success,
|
||||||
|
'result_failed': result_failed,
|
||||||
|
'customer_details': customer_details,
|
||||||
|
'erasure_hosts': erasures_host,
|
||||||
|
'erasure_mobiles': erasures_mobile,
|
||||||
|
'erasures_normal': erasures_normal,
|
||||||
}
|
}
|
||||||
return flask.render_template('inventory/erasure.html', **params)
|
return flask.render_template('inventory/erasure.html', **params)
|
||||||
|
|
||||||
|
@ -1002,12 +1342,18 @@ class ExportsView(View):
|
||||||
'Receiver Note Date',
|
'Receiver Note Date',
|
||||||
'Receiver Note Units',
|
'Receiver Note Units',
|
||||||
'Receiver Note Weight',
|
'Receiver Note Weight',
|
||||||
|
'Customer Company Name',
|
||||||
|
'Customer Location',
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
for lot in Lot.query.filter_by(owner=g.user):
|
all_lots = set(Lot.query.filter_by(owner=g.user).all())
|
||||||
|
share_lots = [s.lot for s in ShareLot.query.filter_by(user_to=g.user)]
|
||||||
|
all_lots = all_lots.union(share_lots)
|
||||||
|
for lot in all_lots:
|
||||||
delivery_note = lot.transfer and lot.transfer.delivery_note or ''
|
delivery_note = lot.transfer and lot.transfer.delivery_note or ''
|
||||||
receiver_note = lot.transfer and lot.transfer.receiver_note or ''
|
receiver_note = lot.transfer and lot.transfer.receiver_note or ''
|
||||||
|
customer = lot.transfer and lot.transfer.customer_details or ''
|
||||||
wb_devs = 0
|
wb_devs = 0
|
||||||
placeholders = 0
|
placeholders = 0
|
||||||
|
|
||||||
|
@ -1020,10 +1366,13 @@ class ExportsView(View):
|
||||||
elif snapshots[-1].software in [SnapshotSoftware.Workbench]:
|
elif snapshots[-1].software in [SnapshotSoftware.Workbench]:
|
||||||
wb_devs += 1
|
wb_devs += 1
|
||||||
|
|
||||||
|
type_lot = lot.type_transfer()
|
||||||
|
if lot in share_lots:
|
||||||
|
type_lot = "Shared"
|
||||||
row = [
|
row = [
|
||||||
lot.id,
|
lot.id,
|
||||||
lot.name,
|
lot.name,
|
||||||
lot.type_transfer(),
|
type_lot,
|
||||||
lot.transfer and (lot.transfer.closed and 'Closed' or 'Open') or '',
|
lot.transfer and (lot.transfer.closed and 'Closed' or 'Open') or '',
|
||||||
lot.transfer and lot.transfer.code or '',
|
lot.transfer and lot.transfer.code or '',
|
||||||
lot.transfer and lot.transfer.date or '',
|
lot.transfer and lot.transfer.date or '',
|
||||||
|
@ -1041,6 +1390,8 @@ class ExportsView(View):
|
||||||
receiver_note and receiver_note.date or '',
|
receiver_note and receiver_note.date or '',
|
||||||
receiver_note and receiver_note.units or '',
|
receiver_note and receiver_note.units or '',
|
||||||
receiver_note and receiver_note.weight or '',
|
receiver_note and receiver_note.weight or '',
|
||||||
|
customer and customer.company_name or '',
|
||||||
|
customer and customer.location or '',
|
||||||
]
|
]
|
||||||
cw.writerow(row)
|
cw.writerow(row)
|
||||||
|
|
||||||
|
@ -1070,11 +1421,14 @@ class ExportsView(View):
|
||||||
|
|
||||||
for dev in self.find_devices():
|
for dev in self.find_devices():
|
||||||
for lot in dev.lots:
|
for lot in dev.lots:
|
||||||
|
type_lot = lot.type_transfer()
|
||||||
|
if lot.is_shared:
|
||||||
|
type_lot = "Shared"
|
||||||
row = [
|
row = [
|
||||||
dev.devicehub_id,
|
dev.devicehub_id,
|
||||||
lot.id,
|
lot.id,
|
||||||
lot.name,
|
lot.name,
|
||||||
lot.type_transfer(),
|
type_lot,
|
||||||
lot.transfer and (lot.transfer.closed and 'Closed' or 'Open') or '',
|
lot.transfer and (lot.transfer.closed and 'Closed' or 'Open') or '',
|
||||||
lot.transfer and lot.transfer.code or '',
|
lot.transfer and lot.transfer.code or '',
|
||||||
lot.transfer and lot.transfer.date or '',
|
lot.transfer and lot.transfer.date or '',
|
||||||
|
@ -1126,50 +1480,27 @@ class SnapshotListView(GenericMixin):
|
||||||
return flask.render_template(self.template_name, **self.context)
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
def get_snapshots_log(self):
|
def get_snapshots_log(self):
|
||||||
|
page = int(request.args.get('page', 1))
|
||||||
|
per_page = int(request.args.get('per_page', PER_PAGE))
|
||||||
|
|
||||||
snapshots_log = SnapshotsLog.query.filter(
|
snapshots_log = SnapshotsLog.query.filter(
|
||||||
SnapshotsLog.owner == g.user
|
SnapshotsLog.owner == g.user
|
||||||
).order_by(SnapshotsLog.created.desc())
|
).order_by(SnapshotsLog.created.desc())
|
||||||
logs = {}
|
|
||||||
for snap in snapshots_log:
|
|
||||||
try:
|
|
||||||
system_uuid = snap.snapshot.device.system_uuid or ''
|
|
||||||
except AttributeError:
|
|
||||||
system_uuid = ''
|
|
||||||
|
|
||||||
if snap.snapshot_uuid not in logs:
|
snapshots_log = snapshots_log.paginate(page=page, per_page=per_page)
|
||||||
logs[snap.snapshot_uuid] = {
|
snapshots_log.first = per_page * snapshots_log.page - per_page + 1
|
||||||
'sid': snap.sid,
|
snapshots_log.last = len(snapshots_log.items) + snapshots_log.first - 1
|
||||||
'snapshot_uuid': snap.snapshot_uuid,
|
return snapshots_log
|
||||||
'version': snap.version,
|
|
||||||
'device': snap.get_device(),
|
|
||||||
'system_uuid': system_uuid,
|
|
||||||
'status': snap.get_status(),
|
|
||||||
'severity': snap.severity,
|
|
||||||
'created': snap.created,
|
|
||||||
'type_device': snap.get_type_device(),
|
|
||||||
'original_dhid': snap.get_original_dhid(),
|
|
||||||
'new_device': snap.get_new_device(),
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
|
|
||||||
if snap.created > logs[snap.snapshot_uuid]['created']:
|
|
||||||
logs[snap.snapshot_uuid]['created'] = snap.created
|
|
||||||
|
|
||||||
if snap.severity > logs[snap.snapshot_uuid]['severity']:
|
|
||||||
logs[snap.snapshot_uuid]['severity'] = snap.severity
|
|
||||||
logs[snap.snapshot_uuid]['status'] = snap.get_status()
|
|
||||||
|
|
||||||
result = sorted(logs.values(), key=lambda d: d['created'])
|
|
||||||
result.reverse()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class SnapshotDetailView(GenericMixin):
|
class SnapshotDetailView(GenericMixin):
|
||||||
template_name = 'inventory/snapshot_detail.html'
|
template_name = 'inventory/snapshot_detail.html'
|
||||||
|
methods = ['GET', 'POST']
|
||||||
|
form_class = UserTrustsForm
|
||||||
|
|
||||||
def dispatch_request(self, snapshot_uuid):
|
def dispatch_request(self, snapshot_uuid):
|
||||||
self.snapshot_uuid = snapshot_uuid
|
self.snapshot_uuid = snapshot_uuid
|
||||||
|
form = self.form_class(snapshot_uuid)
|
||||||
self.get_context()
|
self.get_context()
|
||||||
self.context['page_title'] = "Snapshot Detail"
|
self.context['page_title'] = "Snapshot Detail"
|
||||||
self.context['snapshots_log'] = self.get_snapshots_log()
|
self.context['snapshots_log'] = self.get_snapshots_log()
|
||||||
|
@ -1177,6 +1508,10 @@ class SnapshotDetailView(GenericMixin):
|
||||||
self.context['snapshot_sid'] = ''
|
self.context['snapshot_sid'] = ''
|
||||||
if self.context['snapshots_log'].count():
|
if self.context['snapshots_log'].count():
|
||||||
self.context['snapshot_sid'] = self.context['snapshots_log'][0].sid
|
self.context['snapshot_sid'] = self.context['snapshots_log'][0].sid
|
||||||
|
self.context['form'] = form
|
||||||
|
|
||||||
|
if form.validate_on_submit():
|
||||||
|
form.save()
|
||||||
|
|
||||||
return flask.render_template(self.template_name, **self.context)
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
|
@ -1188,6 +1523,28 @@ class SnapshotDetailView(GenericMixin):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerDetailsView(GenericMixin):
|
||||||
|
methods = ['POST']
|
||||||
|
form_class = CustomerDetailsForm
|
||||||
|
|
||||||
|
def dispatch_request(self, lot_id):
|
||||||
|
self.get_context()
|
||||||
|
form = self.form_class(request.form, lot_id=lot_id)
|
||||||
|
next_url = url_for('inventory.lotdevicelist', lot_id=lot_id)
|
||||||
|
|
||||||
|
if form.validate_on_submit():
|
||||||
|
form.save()
|
||||||
|
messages.success('Customer details updated successfully!')
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
messages.error('Customer details updated error!')
|
||||||
|
for k, v in form.errors.items():
|
||||||
|
value = ';'.join(v)
|
||||||
|
key = form[k].label.text
|
||||||
|
messages.error('Error {key}: {value}!'.format(key=key, value=value))
|
||||||
|
return flask.redirect(next_url)
|
||||||
|
|
||||||
|
|
||||||
class DeliveryNoteView(GenericMixin):
|
class DeliveryNoteView(GenericMixin):
|
||||||
methods = ['POST']
|
methods = ['POST']
|
||||||
form_class = NotesForm
|
form_class = NotesForm
|
||||||
|
@ -1292,10 +1649,17 @@ class PlaceholderLogListView(GenericMixin):
|
||||||
return flask.render_template(self.template_name, **self.context)
|
return flask.render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
def get_placeholders_log(self):
|
def get_placeholders_log(self):
|
||||||
|
page = int(request.args.get('page', 1))
|
||||||
|
per_page = int(request.args.get('per_page', PER_PAGE))
|
||||||
|
|
||||||
placeholder_log = PlaceholdersLog.query.filter(
|
placeholder_log = PlaceholdersLog.query.filter(
|
||||||
PlaceholdersLog.owner == g.user
|
PlaceholdersLog.owner == g.user
|
||||||
).order_by(PlaceholdersLog.created.desc())
|
).order_by(PlaceholdersLog.created.desc())
|
||||||
|
|
||||||
|
placeholder_log = placeholder_log.paginate(page=page, per_page=per_page)
|
||||||
|
placeholder_log.first = per_page * placeholder_log.page - per_page + 1
|
||||||
|
placeholder_log.last = len(placeholder_log.items) + placeholder_log.first - 1
|
||||||
|
|
||||||
return placeholder_log
|
return placeholder_log
|
||||||
|
|
||||||
|
|
||||||
|
@ -1308,8 +1672,28 @@ devices.add_url_rule(
|
||||||
'/action/datawipe/add/', view_func=NewDataWipeView.as_view('datawipe_add')
|
'/action/datawipe/add/', view_func=NewDataWipeView.as_view('datawipe_add')
|
||||||
)
|
)
|
||||||
devices.add_url_rule(
|
devices.add_url_rule(
|
||||||
'/lot/<string:lot_id>/trade-document/add/',
|
'/device/<string:dhid>/document/add/',
|
||||||
view_func=NewTradeDocumentView.as_view('trade_document_add'),
|
view_func=NewDeviceDocumentView.as_view('device_document_add'),
|
||||||
|
)
|
||||||
|
devices.add_url_rule(
|
||||||
|
'/device/<string:dhid>/document/edit/<string:doc_id>',
|
||||||
|
view_func=EditDeviceDocumentView.as_view('device_document_edit'),
|
||||||
|
)
|
||||||
|
devices.add_url_rule(
|
||||||
|
'/device/<string:dhid>/document/del/<string:doc_id>',
|
||||||
|
view_func=DeviceDocumentDeleteView.as_view('device_document_del'),
|
||||||
|
)
|
||||||
|
devices.add_url_rule(
|
||||||
|
'/lot/<string:lot_id>/transfer-document/add/',
|
||||||
|
view_func=NewTradeDocumentView.as_view('transfer_document_add'),
|
||||||
|
)
|
||||||
|
devices.add_url_rule(
|
||||||
|
'/lot/<string:lot_id>/document/edit/<string:doc_id>',
|
||||||
|
view_func=EditTransferDocumentView.as_view('transfer_document_edit'),
|
||||||
|
)
|
||||||
|
devices.add_url_rule(
|
||||||
|
'/lot/<string:lot_id>/document/del/<string:doc_id>',
|
||||||
|
view_func=DocumentDeleteView.as_view('document_del'),
|
||||||
)
|
)
|
||||||
devices.add_url_rule('/device/', view_func=DeviceListView.as_view('devicelist'))
|
devices.add_url_rule('/device/', view_func=DeviceListView.as_view('devicelist'))
|
||||||
devices.add_url_rule(
|
devices.add_url_rule(
|
||||||
|
@ -1372,6 +1756,10 @@ devices.add_url_rule(
|
||||||
'/lot/<string:lot_id>/transfer/',
|
'/lot/<string:lot_id>/transfer/',
|
||||||
view_func=EditTransferView.as_view('edit_transfer'),
|
view_func=EditTransferView.as_view('edit_transfer'),
|
||||||
)
|
)
|
||||||
|
devices.add_url_rule(
|
||||||
|
'/lot/<string:lot_id>/customerdetails/',
|
||||||
|
view_func=CustomerDetailsView.as_view('customer_details'),
|
||||||
|
)
|
||||||
devices.add_url_rule(
|
devices.add_url_rule(
|
||||||
'/lot/<string:lot_id>/deliverynote/',
|
'/lot/<string:lot_id>/deliverynote/',
|
||||||
view_func=DeliveryNoteView.as_view('delivery_note'),
|
view_func=DeliveryNoteView.as_view('delivery_note'),
|
||||||
|
@ -1404,3 +1792,11 @@ devices.add_url_rule(
|
||||||
devices.add_url_rule(
|
devices.add_url_rule(
|
||||||
'/device/erasure/', view_func=ErasureListView.as_view('device_erasure_list')
|
'/device/erasure/', view_func=ErasureListView.as_view('device_erasure_list')
|
||||||
)
|
)
|
||||||
|
devices.add_url_rule(
|
||||||
|
'/device/erasure/<int:orphans>/',
|
||||||
|
view_func=ErasureListView.as_view('device_erasure_list_orphans'),
|
||||||
|
)
|
||||||
|
devices.add_url_rule(
|
||||||
|
'/lot/<string:lot_id>/opentransfer/',
|
||||||
|
view_func=OpenTransferView.as_view('open_transfer'),
|
||||||
|
)
|
||||||
|
|
|
@ -8,7 +8,7 @@ from requests.exceptions import ConnectionError
|
||||||
|
|
||||||
from ereuse_devicehub import __version__, messages
|
from ereuse_devicehub import __version__, messages
|
||||||
from ereuse_devicehub.labels.forms import PrintLabelsForm, TagForm, TagUnnamedForm
|
from ereuse_devicehub.labels.forms import PrintLabelsForm, TagForm, TagUnnamedForm
|
||||||
from ereuse_devicehub.resources.lot.models import Lot
|
from ereuse_devicehub.resources.lot.models import Lot, ShareLot
|
||||||
from ereuse_devicehub.resources.tag.model import Tag
|
from ereuse_devicehub.resources.tag.model import Tag
|
||||||
|
|
||||||
labels = Blueprint('labels', __name__, url_prefix='/labels')
|
labels = Blueprint('labels', __name__, url_prefix='/labels')
|
||||||
|
@ -23,6 +23,7 @@ class TagListView(View):
|
||||||
|
|
||||||
def dispatch_request(self):
|
def dispatch_request(self):
|
||||||
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
||||||
|
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
|
||||||
tags = Tag.query.filter(Tag.owner_id == current_user.id).order_by(
|
tags = Tag.query.filter(Tag.owner_id == current_user.id).order_by(
|
||||||
Tag.created.desc()
|
Tag.created.desc()
|
||||||
)
|
)
|
||||||
|
@ -31,6 +32,7 @@ class TagListView(View):
|
||||||
'tags': tags,
|
'tags': tags,
|
||||||
'page_title': 'Unique Identifiers Management',
|
'page_title': 'Unique Identifiers Management',
|
||||||
'version': __version__,
|
'version': __version__,
|
||||||
|
'share_lots': share_lots,
|
||||||
}
|
}
|
||||||
return flask.render_template(self.template_name, **context)
|
return flask.render_template(self.template_name, **context)
|
||||||
|
|
||||||
|
@ -42,7 +44,13 @@ class TagAddView(View):
|
||||||
|
|
||||||
def dispatch_request(self):
|
def dispatch_request(self):
|
||||||
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
||||||
context = {'page_title': 'New Tag', 'lots': lots, 'version': __version__}
|
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
|
||||||
|
context = {
|
||||||
|
'page_title': 'New Tag',
|
||||||
|
'lots': lots,
|
||||||
|
'version': __version__,
|
||||||
|
'share_lots': share_lots,
|
||||||
|
}
|
||||||
form = TagForm()
|
form = TagForm()
|
||||||
if form.validate_on_submit():
|
if form.validate_on_submit():
|
||||||
form.save()
|
form.save()
|
||||||
|
@ -59,10 +67,12 @@ class TagAddUnnamedView(View):
|
||||||
|
|
||||||
def dispatch_request(self):
|
def dispatch_request(self):
|
||||||
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
||||||
|
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
|
||||||
context = {
|
context = {
|
||||||
'page_title': 'New Unnamed Tag',
|
'page_title': 'New Unnamed Tag',
|
||||||
'lots': lots,
|
'lots': lots,
|
||||||
'version': __version__,
|
'version': __version__,
|
||||||
|
'share_lots': share_lots,
|
||||||
}
|
}
|
||||||
form = TagUnnamedForm()
|
form = TagUnnamedForm()
|
||||||
if form.validate_on_submit():
|
if form.validate_on_submit():
|
||||||
|
@ -94,11 +104,13 @@ class PrintLabelsView(View):
|
||||||
|
|
||||||
def dispatch_request(self):
|
def dispatch_request(self):
|
||||||
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
||||||
|
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
|
||||||
context = {
|
context = {
|
||||||
'lots': lots,
|
'lots': lots,
|
||||||
'page_title': self.title,
|
'page_title': self.title,
|
||||||
'version': __version__,
|
'version': __version__,
|
||||||
'referrer': request.referrer,
|
'referrer': request.referrer,
|
||||||
|
'share_lots': share_lots,
|
||||||
}
|
}
|
||||||
|
|
||||||
form = PrintLabelsForm()
|
form = PrintLabelsForm()
|
||||||
|
@ -123,6 +135,7 @@ class LabelDetailView(View):
|
||||||
|
|
||||||
def dispatch_request(self, id):
|
def dispatch_request(self, id):
|
||||||
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
lots = Lot.query.filter(Lot.owner_id == current_user.id)
|
||||||
|
share_lots = ShareLot.query.filter_by(user_to_id=current_user.id)
|
||||||
tag = (
|
tag = (
|
||||||
Tag.query.filter(Tag.owner_id == current_user.id).filter(Tag.id == id).one()
|
Tag.query.filter(Tag.owner_id == current_user.id).filter(Tag.id == id).one()
|
||||||
)
|
)
|
||||||
|
@ -131,6 +144,7 @@ class LabelDetailView(View):
|
||||||
'page_title': self.title,
|
'page_title': self.title,
|
||||||
'version': __version__,
|
'version': __version__,
|
||||||
'referrer': request.referrer,
|
'referrer': request.referrer,
|
||||||
|
'share_lots': share_lots,
|
||||||
}
|
}
|
||||||
|
|
||||||
devices = []
|
devices = []
|
||||||
|
|
|
@ -0,0 +1,622 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
flaskext.mail
|
||||||
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Flask extension for sending email.
|
||||||
|
|
||||||
|
:copyright: (c) 2010 by Dan Jacob.
|
||||||
|
:license: BSD, see LICENSE for more details.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
|
__version__ = '0.9.1'
|
||||||
|
|
||||||
|
import re
|
||||||
|
import smtplib
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import unicodedata
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from email import charset
|
||||||
|
from email.encoders import encode_base64
|
||||||
|
from email.header import Header
|
||||||
|
from email.mime.base import MIMEBase
|
||||||
|
from email.mime.multipart import MIMEMultipart
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
from email.utils import formataddr, formatdate, make_msgid, parseaddr
|
||||||
|
|
||||||
|
import blinker
|
||||||
|
from flask import current_app
|
||||||
|
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
|
PY34 = PY3 and sys.version_info[1] >= 4
|
||||||
|
|
||||||
|
basestring = str
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
string_types = (str,)
|
||||||
|
text_type = str
|
||||||
|
from email import policy
|
||||||
|
|
||||||
|
message_policy = policy.SMTP
|
||||||
|
else:
|
||||||
|
string_types = (basestring,)
|
||||||
|
text_type = unicode
|
||||||
|
message_policy = None
|
||||||
|
|
||||||
|
charset.add_charset('utf-8', charset.SHORTEST, None, 'utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
class FlaskMailUnicodeDecodeError(UnicodeDecodeError):
|
||||||
|
def __init__(self, obj, *args):
|
||||||
|
self.obj = obj
|
||||||
|
UnicodeDecodeError.__init__(self, *args)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
original = UnicodeDecodeError.__str__(self)
|
||||||
|
return '%s. You passed in %r (%s)' % (original, self.obj, type(self.obj))
|
||||||
|
|
||||||
|
|
||||||
|
def force_text(s, encoding='utf-8', errors='strict'):
|
||||||
|
"""
|
||||||
|
Similar to smart_text, except that lazy instances are resolved to
|
||||||
|
strings, rather than kept as lazy objects.
|
||||||
|
|
||||||
|
If strings_only is True, don't convert (some) non-string-like objects.
|
||||||
|
"""
|
||||||
|
strings_only = True
|
||||||
|
if isinstance(s, text_type):
|
||||||
|
return s
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not isinstance(s, string_types):
|
||||||
|
if PY3:
|
||||||
|
if isinstance(s, bytes):
|
||||||
|
s = text_type(s, encoding, errors)
|
||||||
|
else:
|
||||||
|
s = text_type(s)
|
||||||
|
elif hasattr(s, '__unicode__'):
|
||||||
|
s = s.__unicode__()
|
||||||
|
else:
|
||||||
|
s = text_type(bytes(s), encoding, errors)
|
||||||
|
else:
|
||||||
|
s = s.decode(encoding, errors)
|
||||||
|
except UnicodeDecodeError as e:
|
||||||
|
if not isinstance(s, Exception):
|
||||||
|
raise FlaskMailUnicodeDecodeError(s, *e.args)
|
||||||
|
else:
|
||||||
|
s = ' '.join([force_text(arg, encoding, strings_only, errors) for arg in s])
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_subject(subject, encoding='utf-8'):
|
||||||
|
try:
|
||||||
|
subject.encode('ascii')
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
try:
|
||||||
|
subject = Header(subject, encoding).encode()
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
subject = Header(subject, 'utf-8').encode()
|
||||||
|
return subject
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_address(addr, encoding='utf-8'):
|
||||||
|
if isinstance(addr, string_types):
|
||||||
|
addr = parseaddr(force_text(addr))
|
||||||
|
nm, addr = addr
|
||||||
|
|
||||||
|
try:
|
||||||
|
nm = Header(nm, encoding).encode()
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
nm = Header(nm, 'utf-8').encode()
|
||||||
|
try:
|
||||||
|
addr.encode('ascii')
|
||||||
|
except UnicodeEncodeError: # IDN
|
||||||
|
if '@' in addr:
|
||||||
|
localpart, domain = addr.split('@', 1)
|
||||||
|
localpart = str(Header(localpart, encoding))
|
||||||
|
domain = domain.encode('idna').decode('ascii')
|
||||||
|
addr = '@'.join([localpart, domain])
|
||||||
|
else:
|
||||||
|
addr = Header(addr, encoding).encode()
|
||||||
|
return formataddr((nm, addr))
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_addresses(addresses, encoding='utf-8'):
|
||||||
|
return map(lambda e: sanitize_address(e, encoding), addresses)
|
||||||
|
|
||||||
|
|
||||||
|
def _has_newline(line):
|
||||||
|
"""Used by has_bad_header to check for \\r or \\n"""
|
||||||
|
if line and ('\r' in line or '\n' in line):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(object):
|
||||||
|
"""Handles connection to host."""
|
||||||
|
|
||||||
|
def __init__(self, mail):
|
||||||
|
self.mail = mail
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
if self.mail.suppress:
|
||||||
|
self.host = None
|
||||||
|
else:
|
||||||
|
self.host = self.configure_host()
|
||||||
|
|
||||||
|
self.num_emails = 0
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, tb):
|
||||||
|
if self.host:
|
||||||
|
self.host.quit()
|
||||||
|
|
||||||
|
def configure_host(self):
|
||||||
|
if self.mail.use_ssl:
|
||||||
|
host = smtplib.SMTP_SSL(self.mail.server, self.mail.port)
|
||||||
|
else:
|
||||||
|
host = smtplib.SMTP(self.mail.server, self.mail.port)
|
||||||
|
|
||||||
|
host.set_debuglevel(int(self.mail.debug))
|
||||||
|
|
||||||
|
if self.mail.use_tls:
|
||||||
|
host.starttls()
|
||||||
|
if self.mail.username and self.mail.password:
|
||||||
|
host.login(self.mail.username, self.mail.password)
|
||||||
|
|
||||||
|
return host
|
||||||
|
|
||||||
|
def send(self, message, envelope_from=None):
|
||||||
|
"""Verifies and sends message.
|
||||||
|
|
||||||
|
:param message: Message instance.
|
||||||
|
:param envelope_from: Email address to be used in MAIL FROM command.
|
||||||
|
"""
|
||||||
|
assert message.send_to, "No recipients have been added"
|
||||||
|
|
||||||
|
assert message.sender, (
|
||||||
|
"The message does not specify a sender and a default sender "
|
||||||
|
"has not been configured"
|
||||||
|
)
|
||||||
|
|
||||||
|
if message.has_bad_headers():
|
||||||
|
raise BadHeaderError
|
||||||
|
|
||||||
|
if message.date is None:
|
||||||
|
message.date = time.time()
|
||||||
|
|
||||||
|
if self.host:
|
||||||
|
self.host.sendmail(
|
||||||
|
sanitize_address(envelope_from or message.sender),
|
||||||
|
list(sanitize_addresses(message.send_to)),
|
||||||
|
message.as_bytes() if PY3 else message.as_string(),
|
||||||
|
message.mail_options,
|
||||||
|
message.rcpt_options,
|
||||||
|
)
|
||||||
|
|
||||||
|
email_dispatched.send(message, app=current_app._get_current_object())
|
||||||
|
|
||||||
|
self.num_emails += 1
|
||||||
|
|
||||||
|
if self.num_emails == self.mail.max_emails:
|
||||||
|
self.num_emails = 0
|
||||||
|
if self.host:
|
||||||
|
self.host.quit()
|
||||||
|
self.host = self.configure_host()
|
||||||
|
|
||||||
|
def send_message(self, *args, **kwargs):
|
||||||
|
"""Shortcut for send(msg).
|
||||||
|
|
||||||
|
Takes same arguments as Message constructor.
|
||||||
|
|
||||||
|
:versionadded: 0.3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.send(Message(*args, **kwargs))
|
||||||
|
|
||||||
|
|
||||||
|
class BadHeaderError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Attachment(object):
|
||||||
|
"""Encapsulates file attachment information.
|
||||||
|
|
||||||
|
:versionadded: 0.3.5
|
||||||
|
|
||||||
|
:param filename: filename of attachment
|
||||||
|
:param content_type: file mimetype
|
||||||
|
:param data: the raw file data
|
||||||
|
:param disposition: content-disposition (if any)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filename=None,
|
||||||
|
content_type=None,
|
||||||
|
data=None,
|
||||||
|
disposition=None,
|
||||||
|
headers=None,
|
||||||
|
):
|
||||||
|
self.filename = filename
|
||||||
|
self.content_type = content_type
|
||||||
|
self.data = data
|
||||||
|
self.disposition = disposition or 'attachment'
|
||||||
|
self.headers = headers or {}
|
||||||
|
|
||||||
|
|
||||||
|
class Message(object):
|
||||||
|
"""Encapsulates an email message.
|
||||||
|
|
||||||
|
:param subject: email subject header
|
||||||
|
:param recipients: list of email addresses
|
||||||
|
:param body: plain text message
|
||||||
|
:param html: HTML message
|
||||||
|
:param sender: email sender address, or **MAIL_DEFAULT_SENDER** by default
|
||||||
|
:param cc: CC list
|
||||||
|
:param bcc: BCC list
|
||||||
|
:param attachments: list of Attachment instances
|
||||||
|
:param reply_to: reply-to address
|
||||||
|
:param date: send date
|
||||||
|
:param charset: message character set
|
||||||
|
:param extra_headers: A dictionary of additional headers for the message
|
||||||
|
:param mail_options: A list of ESMTP options to be used in MAIL FROM command
|
||||||
|
:param rcpt_options: A list of ESMTP options to be used in RCPT commands
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
subject='',
|
||||||
|
recipients=None,
|
||||||
|
body=None,
|
||||||
|
html=None,
|
||||||
|
sender=None,
|
||||||
|
cc=None,
|
||||||
|
bcc=None,
|
||||||
|
attachments=None,
|
||||||
|
reply_to=None,
|
||||||
|
date=None,
|
||||||
|
charset=None,
|
||||||
|
extra_headers=None,
|
||||||
|
mail_options=None,
|
||||||
|
rcpt_options=None,
|
||||||
|
):
|
||||||
|
|
||||||
|
sender = sender or current_app.extensions['mail'].default_sender
|
||||||
|
|
||||||
|
if isinstance(sender, tuple):
|
||||||
|
sender = "%s <%s>" % sender
|
||||||
|
|
||||||
|
self.recipients = recipients or []
|
||||||
|
self.subject = subject
|
||||||
|
self.sender = sender
|
||||||
|
self.reply_to = reply_to
|
||||||
|
self.cc = cc or []
|
||||||
|
self.bcc = bcc or []
|
||||||
|
self.body = body
|
||||||
|
self.html = html
|
||||||
|
self.date = date
|
||||||
|
self.msgId = make_msgid()
|
||||||
|
self.charset = charset
|
||||||
|
self.extra_headers = extra_headers
|
||||||
|
self.mail_options = mail_options or []
|
||||||
|
self.rcpt_options = rcpt_options or []
|
||||||
|
self.attachments = attachments or []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def send_to(self):
|
||||||
|
return set(self.recipients) | set(self.bcc or ()) | set(self.cc or ())
|
||||||
|
|
||||||
|
def _mimetext(self, text, subtype='plain'):
|
||||||
|
"""Creates a MIMEText object with the given subtype (default: 'plain')
|
||||||
|
If the text is unicode, the utf-8 charset is used.
|
||||||
|
"""
|
||||||
|
charset = self.charset or 'utf-8'
|
||||||
|
return MIMEText(text, _subtype=subtype, _charset=charset)
|
||||||
|
|
||||||
|
def _message(self): # noqa: C901
|
||||||
|
"""Creates the email"""
|
||||||
|
ascii_attachments = current_app.extensions['mail'].ascii_attachments
|
||||||
|
encoding = self.charset or 'utf-8'
|
||||||
|
|
||||||
|
attachments = self.attachments or []
|
||||||
|
|
||||||
|
if len(attachments) == 0 and not self.html:
|
||||||
|
# No html content and zero attachments means plain text
|
||||||
|
msg = self._mimetext(self.body)
|
||||||
|
elif len(attachments) > 0 and not self.html:
|
||||||
|
# No html and at least one attachment means multipart
|
||||||
|
msg = MIMEMultipart()
|
||||||
|
msg.attach(self._mimetext(self.body))
|
||||||
|
else:
|
||||||
|
# Anything else
|
||||||
|
msg = MIMEMultipart()
|
||||||
|
alternative = MIMEMultipart('alternative')
|
||||||
|
alternative.attach(self._mimetext(self.body, 'plain'))
|
||||||
|
alternative.attach(self._mimetext(self.html, 'html'))
|
||||||
|
msg.attach(alternative)
|
||||||
|
|
||||||
|
if self.subject:
|
||||||
|
msg['Subject'] = sanitize_subject(force_text(self.subject), encoding)
|
||||||
|
|
||||||
|
msg['From'] = sanitize_address(self.sender, encoding)
|
||||||
|
msg['To'] = ', '.join(list(set(sanitize_addresses(self.recipients, encoding))))
|
||||||
|
|
||||||
|
msg['Date'] = formatdate(self.date, localtime=True)
|
||||||
|
# see RFC 5322 section 3.6.4.
|
||||||
|
msg['Message-ID'] = self.msgId
|
||||||
|
|
||||||
|
if self.cc:
|
||||||
|
msg['Cc'] = ', '.join(list(set(sanitize_addresses(self.cc, encoding))))
|
||||||
|
|
||||||
|
if self.reply_to:
|
||||||
|
msg['Reply-To'] = sanitize_address(self.reply_to, encoding)
|
||||||
|
|
||||||
|
if self.extra_headers:
|
||||||
|
for k, v in self.extra_headers.items():
|
||||||
|
msg[k] = v
|
||||||
|
|
||||||
|
SPACES = re.compile(r'[\s]+', re.UNICODE)
|
||||||
|
for attachment in attachments:
|
||||||
|
f = MIMEBase(*attachment.content_type.split('/'))
|
||||||
|
f.set_payload(attachment.data)
|
||||||
|
encode_base64(f)
|
||||||
|
|
||||||
|
filename = attachment.filename
|
||||||
|
if filename and ascii_attachments:
|
||||||
|
# force filename to ascii
|
||||||
|
filename = unicodedata.normalize('NFKD', filename)
|
||||||
|
filename = filename.encode('ascii', 'ignore').decode('ascii')
|
||||||
|
filename = SPACES.sub(u' ', filename).strip()
|
||||||
|
|
||||||
|
try:
|
||||||
|
filename and filename.encode('ascii')
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
if not PY3:
|
||||||
|
filename = filename.encode('utf8')
|
||||||
|
filename = ('UTF8', '', filename)
|
||||||
|
|
||||||
|
f.add_header(
|
||||||
|
'Content-Disposition', attachment.disposition, filename=filename
|
||||||
|
)
|
||||||
|
|
||||||
|
for key, value in attachment.headers:
|
||||||
|
f.add_header(key, value)
|
||||||
|
|
||||||
|
msg.attach(f)
|
||||||
|
if message_policy:
|
||||||
|
msg.policy = message_policy
|
||||||
|
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def as_string(self):
|
||||||
|
return self._message().as_string()
|
||||||
|
|
||||||
|
def as_bytes(self):
|
||||||
|
# if PY34:
|
||||||
|
# return self._message().as_bytes()
|
||||||
|
# else: # fallback for old Python (3) versions
|
||||||
|
# return self._message().as_string().encode(self.charset or 'utf-8')
|
||||||
|
return self._message().as_string().encode(self.charset or 'utf-8')
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.as_string()
|
||||||
|
|
||||||
|
def __bytes__(self):
|
||||||
|
return self.as_bytes()
|
||||||
|
|
||||||
|
def has_bad_headers(self):
|
||||||
|
"""Checks for bad headers i.e. newlines in subject, sender or recipients.
|
||||||
|
RFC5322: Allows multiline CRLF with trailing whitespace (FWS) in headers
|
||||||
|
"""
|
||||||
|
|
||||||
|
headers = [self.sender, self.reply_to] + self.recipients
|
||||||
|
for header in headers:
|
||||||
|
if _has_newline(header):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.subject:
|
||||||
|
if _has_newline(self.subject):
|
||||||
|
for linenum, line in enumerate(self.subject.split('\r\n')):
|
||||||
|
if not line:
|
||||||
|
return True
|
||||||
|
if linenum > 0 and line[0] not in '\t ':
|
||||||
|
return True
|
||||||
|
if _has_newline(line):
|
||||||
|
return True
|
||||||
|
if len(line.strip()) == 0:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_bad_headers(self):
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
msg = (
|
||||||
|
'is_bad_headers is deprecated, use the new has_bad_headers method instead.'
|
||||||
|
)
|
||||||
|
warn(DeprecationWarning(msg), stacklevel=1)
|
||||||
|
return self.has_bad_headers()
|
||||||
|
|
||||||
|
def send(self, connection):
|
||||||
|
"""Verifies and sends the message."""
|
||||||
|
|
||||||
|
connection.send(self)
|
||||||
|
|
||||||
|
def add_recipient(self, recipient):
|
||||||
|
"""Adds another recipient to the message.
|
||||||
|
|
||||||
|
:param recipient: email address of recipient.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.recipients.append(recipient)
|
||||||
|
|
||||||
|
def attach(
|
||||||
|
self,
|
||||||
|
filename=None,
|
||||||
|
content_type=None,
|
||||||
|
data=None,
|
||||||
|
disposition=None,
|
||||||
|
headers=None,
|
||||||
|
):
|
||||||
|
"""Adds an attachment to the message.
|
||||||
|
|
||||||
|
:param filename: filename of attachment
|
||||||
|
:param content_type: file mimetype
|
||||||
|
:param data: the raw file data
|
||||||
|
:param disposition: content-disposition (if any)
|
||||||
|
"""
|
||||||
|
self.attachments.append(
|
||||||
|
Attachment(filename, content_type, data, disposition, headers)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _MailMixin(object):
|
||||||
|
@contextmanager
|
||||||
|
def record_messages(self):
|
||||||
|
"""Records all messages. Use in unit tests for example::
|
||||||
|
|
||||||
|
with mail.record_messages() as outbox:
|
||||||
|
response = app.test_client.get("/email-sending-view/")
|
||||||
|
assert len(outbox) == 1
|
||||||
|
assert outbox[0].subject == "testing"
|
||||||
|
|
||||||
|
You must have blinker installed in order to use this feature.
|
||||||
|
:versionadded: 0.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not email_dispatched:
|
||||||
|
raise RuntimeError("blinker must be installed")
|
||||||
|
|
||||||
|
outbox = []
|
||||||
|
|
||||||
|
def _record(message, app):
|
||||||
|
outbox.append(message)
|
||||||
|
|
||||||
|
email_dispatched.connect(_record)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield outbox
|
||||||
|
finally:
|
||||||
|
email_dispatched.disconnect(_record)
|
||||||
|
|
||||||
|
def send(self, message):
|
||||||
|
"""Sends a single message instance. If TESTING is True the message will
|
||||||
|
not actually be sent.
|
||||||
|
|
||||||
|
:param message: a Message instance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
with self.connect() as connection:
|
||||||
|
message.send(connection)
|
||||||
|
|
||||||
|
def send_message(self, *args, **kwargs):
|
||||||
|
"""Shortcut for send(msg).
|
||||||
|
|
||||||
|
Takes same arguments as Message constructor.
|
||||||
|
|
||||||
|
:versionadded: 0.3.5
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.send(Message(*args, **kwargs))
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
"""Opens a connection to the mail host."""
|
||||||
|
app = getattr(self, "app", None) or current_app
|
||||||
|
try:
|
||||||
|
return Connection(app.extensions['mail'])
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError(
|
||||||
|
"The curent application was not configured with Flask-Mail"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _Mail(_MailMixin):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
server,
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
port,
|
||||||
|
use_tls,
|
||||||
|
use_ssl,
|
||||||
|
default_sender,
|
||||||
|
debug,
|
||||||
|
max_emails,
|
||||||
|
suppress,
|
||||||
|
ascii_attachments=False,
|
||||||
|
):
|
||||||
|
self.server = server
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.port = port
|
||||||
|
self.use_tls = use_tls
|
||||||
|
self.use_ssl = use_ssl
|
||||||
|
self.default_sender = default_sender
|
||||||
|
self.debug = debug
|
||||||
|
self.max_emails = max_emails
|
||||||
|
self.suppress = suppress
|
||||||
|
self.ascii_attachments = ascii_attachments
|
||||||
|
|
||||||
|
|
||||||
|
class Mail(_MailMixin):
|
||||||
|
"""Manages email messaging
|
||||||
|
|
||||||
|
:param app: Flask instance
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, app=None):
|
||||||
|
self.app = app
|
||||||
|
if app is not None:
|
||||||
|
self.state = self.init_app(app)
|
||||||
|
else:
|
||||||
|
self.state = None
|
||||||
|
|
||||||
|
def init_mail(self, config, debug=False, testing=False):
|
||||||
|
return _Mail(
|
||||||
|
config.get('MAIL_SERVER', '127.0.0.1'),
|
||||||
|
config.get('MAIL_USERNAME'),
|
||||||
|
config.get('MAIL_PASSWORD'),
|
||||||
|
config.get('MAIL_PORT', 25),
|
||||||
|
config.get('MAIL_USE_TLS', False),
|
||||||
|
config.get('MAIL_USE_SSL', False),
|
||||||
|
config.get('MAIL_DEFAULT_SENDER'),
|
||||||
|
int(config.get('MAIL_DEBUG', debug)),
|
||||||
|
config.get('MAIL_MAX_EMAILS'),
|
||||||
|
config.get('MAIL_SUPPRESS_SEND', testing),
|
||||||
|
config.get('MAIL_ASCII_ATTACHMENTS', False),
|
||||||
|
)
|
||||||
|
|
||||||
|
def init_app(self, app):
|
||||||
|
"""Initializes your mail settings from the application settings.
|
||||||
|
|
||||||
|
You can use this if you want to set up your Mail instance
|
||||||
|
at configuration time.
|
||||||
|
|
||||||
|
:param app: Flask application instance
|
||||||
|
"""
|
||||||
|
state = self.init_mail(app.config, app.debug, app.testing)
|
||||||
|
|
||||||
|
# register extension with app
|
||||||
|
app.extensions = getattr(app, 'extensions', {})
|
||||||
|
app.extensions['mail'] = state
|
||||||
|
return state
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return getattr(self.state, name, None)
|
||||||
|
|
||||||
|
|
||||||
|
signals = blinker.Namespace()
|
||||||
|
|
||||||
|
email_dispatched = signals.signal(
|
||||||
|
"email-dispatched",
|
||||||
|
doc="""
|
||||||
|
Signal sent when an email is dispatched. This signal will also be sent
|
||||||
|
in testing mode, even though the email will not actually be sent.
|
||||||
|
""",
|
||||||
|
)
|
|
@ -0,0 +1,31 @@
|
||||||
|
import logging
|
||||||
|
from smtplib import SMTPException
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
from flask import current_app as app
|
||||||
|
|
||||||
|
from ereuse_devicehub.mail.flask_mail import Message
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _send_async_email(app, msg):
|
||||||
|
with app.app_context():
|
||||||
|
try:
|
||||||
|
app.mail.send(msg)
|
||||||
|
except SMTPException:
|
||||||
|
logger.exception("An error occurred while sending the email")
|
||||||
|
|
||||||
|
|
||||||
|
def send_email(
|
||||||
|
subject, recipients, text_body, sender=None, cc=None, bcc=None, html_body=None
|
||||||
|
):
|
||||||
|
|
||||||
|
msg = Message(subject, sender=sender, recipients=recipients, cc=cc, bcc=bcc)
|
||||||
|
|
||||||
|
msg.body = text_body
|
||||||
|
|
||||||
|
if html_body:
|
||||||
|
msg.html = html_body
|
||||||
|
|
||||||
|
Thread(target=_send_async_email, args=(app._get_current_object(), msg)).start()
|
|
@ -1,14 +1,33 @@
|
||||||
from marshmallow.fields import missing_
|
from marshmallow.fields import missing_
|
||||||
from teal.db import SQLAlchemy
|
|
||||||
from teal.marshmallow import NestedOn as TealNestedOn
|
|
||||||
|
|
||||||
from ereuse_devicehub.db import db
|
from ereuse_devicehub.db import db
|
||||||
|
from ereuse_devicehub.teal.db import SQLAlchemy
|
||||||
|
from ereuse_devicehub.teal.marshmallow import NestedOn as TealNestedOn
|
||||||
|
|
||||||
|
|
||||||
class NestedOn(TealNestedOn):
|
class NestedOn(TealNestedOn):
|
||||||
__doc__ = TealNestedOn.__doc__
|
__doc__ = TealNestedOn.__doc__
|
||||||
|
|
||||||
def __init__(self, nested, polymorphic_on='type', db: SQLAlchemy = db, collection_class=list,
|
def __init__(
|
||||||
default=missing_, exclude=tuple(), only_query: str = None, only=None, **kwargs):
|
self,
|
||||||
super().__init__(nested, polymorphic_on, db, collection_class, default, exclude,
|
nested,
|
||||||
only_query, only, **kwargs)
|
polymorphic_on='type',
|
||||||
|
db: SQLAlchemy = db,
|
||||||
|
collection_class=list,
|
||||||
|
default=missing_,
|
||||||
|
exclude=tuple(),
|
||||||
|
only_query: str = None,
|
||||||
|
only=None,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
super().__init__(
|
||||||
|
nested,
|
||||||
|
polymorphic_on,
|
||||||
|
db,
|
||||||
|
collection_class,
|
||||||
|
default,
|
||||||
|
exclude,
|
||||||
|
only_query,
|
||||||
|
only,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
|
@ -9,7 +9,7 @@ from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import sqlalchemy_utils
|
import sqlalchemy_utils
|
||||||
import citext
|
import citext
|
||||||
import teal
|
from ereuse_devicehub import teal
|
||||||
${imports if imports else ""}
|
${imports if imports else ""}
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
|
|
|
@ -10,7 +10,7 @@ from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import sqlalchemy_utils
|
import sqlalchemy_utils
|
||||||
import citext
|
import citext
|
||||||
import teal
|
from ereuse_devicehub import teal
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
|
@ -26,11 +26,32 @@ def get_inv():
|
||||||
raise ValueError("Inventory value is not specified")
|
raise ValueError("Inventory value is not specified")
|
||||||
return INV
|
return INV
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
op.alter_column('test_data_storage', 'current_pending_sector_count', type_=sa.Integer(), schema=f'{get_inv()}')
|
op.alter_column(
|
||||||
op.alter_column('test_data_storage', 'offline_uncorrectable', type_=sa.Integer(), schema=f'{get_inv()}')
|
'test_data_storage',
|
||||||
|
'current_pending_sector_count',
|
||||||
|
type_=sa.Integer(),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.alter_column(
|
||||||
|
'test_data_storage',
|
||||||
|
'offline_uncorrectable',
|
||||||
|
type_=sa.Integer(),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade():
|
||||||
op.alter_column('test_data_storage', 'current_pending_sector_count', type_=sa.SmallInteger(), schema=f'{get_inv()}')
|
op.alter_column(
|
||||||
op.alter_column('test_data_storage', 'offline_uncorrectable', type_=sa.SmallInteger(), schema=f'{get_inv()}')
|
'test_data_storage',
|
||||||
|
'current_pending_sector_count',
|
||||||
|
type_=sa.SmallInteger(),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.alter_column(
|
||||||
|
'test_data_storage',
|
||||||
|
'offline_uncorrectable',
|
||||||
|
type_=sa.SmallInteger(),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
|
@ -11,7 +11,7 @@ from sqlalchemy.dialects import postgresql
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import sqlalchemy_utils
|
import sqlalchemy_utils
|
||||||
import citext
|
import citext
|
||||||
import teal
|
from ereuse_devicehub import teal
|
||||||
|
|
||||||
from ereuse_devicehub.resources.enums import SessionType
|
from ereuse_devicehub.resources.enums import SessionType
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
"""share lot
|
||||||
|
|
||||||
|
Revision ID: 2f2ef041483a
|
||||||
|
Revises: ac476b60d952
|
||||||
|
Create Date: 2023-04-26 16:04:21.560888
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '2f2ef041483a'
|
||||||
|
down_revision = 'ac476b60d952'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'share_lot',
|
||||||
|
sa.Column(
|
||||||
|
'created',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'updated',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('user_to_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||||
|
sa.Column('lot_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['user_to_id'], ['common.user.id']),
|
||||||
|
sa.ForeignKeyConstraint(['lot_id'], [f'{get_inv()}.lot.id']),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('share_lot', schema=f'{get_inv()}')
|
|
@ -5,12 +5,12 @@ Revises: bf600ca861a4
|
||||||
Create Date: 2020-12-16 11:45:13.339624
|
Create Date: 2020-12-16 11:45:13.339624
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from alembic import context
|
import citext
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import sqlalchemy_utils
|
import sqlalchemy_utils
|
||||||
import citext
|
from alembic import context
|
||||||
import teal
|
from alembic import op
|
||||||
|
from ereuse_devicehub import teal
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
|
|
|
@ -5,15 +5,14 @@ Revises: 51439cf24be8
|
||||||
Create Date: 2021-06-15 14:38:59.931818
|
Create Date: 2021-06-15 14:38:59.931818
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import teal
|
|
||||||
import citext
|
import citext
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
from ereuse_devicehub import teal
|
||||||
from alembic import op
|
from alembic import op
|
||||||
from alembic import context
|
from alembic import context
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = '3a3601ac8224'
|
revision = '3a3601ac8224'
|
||||||
down_revision = '51439cf24be8'
|
down_revision = '51439cf24be8'
|
||||||
|
@ -27,108 +26,143 @@ def get_inv():
|
||||||
raise ValueError("Inventory value is not specified")
|
raise ValueError("Inventory value is not specified")
|
||||||
return INV
|
return INV
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
op.create_table('trade_document',
|
op.create_table(
|
||||||
|
'trade_document',
|
||||||
sa.Column(
|
sa.Column(
|
||||||
'updated',
|
'updated',
|
||||||
sa.TIMESTAMP(timezone=True),
|
sa.TIMESTAMP(timezone=True),
|
||||||
server_default=sa.text('CURRENT_TIMESTAMP'),
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
comment='The last time Devicehub recorded a change for \n this thing.\n '
|
comment='The last time Devicehub recorded a change for \n this thing.\n ',
|
||||||
),
|
),
|
||||||
sa.Column(
|
sa.Column(
|
||||||
'created',
|
'created',
|
||||||
sa.TIMESTAMP(timezone=True),
|
sa.TIMESTAMP(timezone=True),
|
||||||
server_default=sa.text('CURRENT_TIMESTAMP'),
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
comment='When Devicehub created this.'
|
comment='When Devicehub created this.',
|
||||||
),
|
),
|
||||||
sa.Column(
|
sa.Column(
|
||||||
'id',
|
'id',
|
||||||
sa.BigInteger(),
|
sa.BigInteger(),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
comment='The identifier of the device for this database. Used only\n internally for software; users should not use this.\n '
|
comment='The identifier of the device for this database. Used only\n internally for software; users should not use this.\n ',
|
||||||
),
|
),
|
||||||
sa.Column(
|
sa.Column(
|
||||||
'date',
|
'date',
|
||||||
sa.DateTime(),
|
sa.DateTime(),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
comment='The date of document, some documents need to have one date\n '
|
comment='The date of document, some documents need to have one date\n ',
|
||||||
),
|
),
|
||||||
sa.Column(
|
sa.Column(
|
||||||
'id_document',
|
'id_document',
|
||||||
citext.CIText(),
|
citext.CIText(),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
comment='The id of one document like invoice so they can be linked.'
|
comment='The id of one document like invoice so they can be linked.',
|
||||||
),
|
),
|
||||||
sa.Column(
|
sa.Column(
|
||||||
'description',
|
'description',
|
||||||
citext.CIText(),
|
citext.CIText(),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
comment='A description of document.'
|
comment='A description of document.',
|
||||||
),
|
|
||||||
sa.Column(
|
|
||||||
'owner_id',
|
|
||||||
postgresql.UUID(as_uuid=True),
|
|
||||||
nullable=False
|
|
||||||
),
|
|
||||||
sa.Column(
|
|
||||||
'lot_id',
|
|
||||||
postgresql.UUID(as_uuid=True),
|
|
||||||
nullable=False
|
|
||||||
),
|
),
|
||||||
|
sa.Column('owner_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('lot_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
sa.Column(
|
sa.Column(
|
||||||
'file_name',
|
'file_name',
|
||||||
citext.CIText(),
|
citext.CIText(),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
comment='This is the name of the file when user up the document.'
|
comment='This is the name of the file when user up the document.',
|
||||||
),
|
),
|
||||||
sa.Column(
|
sa.Column(
|
||||||
'file_hash',
|
'file_hash',
|
||||||
citext.CIText(),
|
citext.CIText(),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
comment='This is the hash of the file produced from frontend.'
|
comment='This is the hash of the file produced from frontend.',
|
||||||
),
|
),
|
||||||
sa.Column(
|
sa.Column(
|
||||||
'url',
|
'url',
|
||||||
citext.CIText(),
|
citext.CIText(),
|
||||||
teal.db.URL(),
|
teal.db.URL(),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
comment='This is the url where resides the document.'
|
comment='This is the url where resides the document.',
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['lot_id'],
|
||||||
|
[f'{get_inv()}.lot.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['owner_id'],
|
||||||
|
['common.user.id'],
|
||||||
),
|
),
|
||||||
sa.ForeignKeyConstraint(['lot_id'], [f'{get_inv()}.lot.id'],),
|
|
||||||
sa.ForeignKeyConstraint(['owner_id'], ['common.user.id'],),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
schema=f'{get_inv()}'
|
schema=f'{get_inv()}',
|
||||||
)
|
)
|
||||||
# Action document table
|
# Action document table
|
||||||
op.create_table('action_trade_document',
|
op.create_table(
|
||||||
|
'action_trade_document',
|
||||||
sa.Column('document_id', sa.BigInteger(), nullable=False),
|
sa.Column('document_id', sa.BigInteger(), nullable=False),
|
||||||
sa.Column('action_id', postgresql.UUID(as_uuid=True), nullable=False),
|
sa.Column('action_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
sa.ForeignKeyConstraint(['action_id'], [f'{get_inv()}.action.id'], ),
|
sa.ForeignKeyConstraint(
|
||||||
sa.ForeignKeyConstraint(['document_id'], [f'{get_inv()}.trade_document.id'], ),
|
['action_id'],
|
||||||
|
[f'{get_inv()}.action.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['document_id'],
|
||||||
|
[f'{get_inv()}.trade_document.id'],
|
||||||
|
),
|
||||||
sa.PrimaryKeyConstraint('document_id', 'action_id'),
|
sa.PrimaryKeyConstraint('document_id', 'action_id'),
|
||||||
schema=f'{get_inv()}'
|
schema=f'{get_inv()}',
|
||||||
)
|
)
|
||||||
|
|
||||||
op.create_index('document_id', 'trade_document', ['id'], unique=False, postgresql_using='hash', schema=f'{get_inv()}')
|
op.create_index(
|
||||||
op.create_index(op.f('ix_trade_document_created'), 'trade_document', ['created'], unique=False, schema=f'{get_inv()}')
|
'document_id',
|
||||||
op.create_index(op.f('ix_trade_document_updated'), 'trade_document', ['updated'], unique=False, schema=f'{get_inv()}')
|
'trade_document',
|
||||||
|
['id'],
|
||||||
|
unique=False,
|
||||||
|
postgresql_using='hash',
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f('ix_trade_document_created'),
|
||||||
|
'trade_document',
|
||||||
|
['created'],
|
||||||
|
unique=False,
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f('ix_trade_document_updated'),
|
||||||
|
'trade_document',
|
||||||
|
['updated'],
|
||||||
|
unique=False,
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
op.create_table('confirm_document',
|
op.create_table(
|
||||||
|
'confirm_document',
|
||||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
|
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
sa.Column('action_id', postgresql.UUID(as_uuid=True), nullable=False),
|
sa.Column('action_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
sa.ForeignKeyConstraint(['id'], [f'{get_inv()}.action.id'], ),
|
['id'],
|
||||||
sa.ForeignKeyConstraint(['action_id'], [f'{get_inv()}.action.id'], ),
|
[f'{get_inv()}.action.id'],
|
||||||
sa.ForeignKeyConstraint(['user_id'], ['common.user.id'], ),
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['action_id'],
|
||||||
|
[f'{get_inv()}.action.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['user_id'],
|
||||||
|
['common.user.id'],
|
||||||
|
),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
schema=f'{get_inv()}'
|
schema=f'{get_inv()}',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade():
|
||||||
op.drop_table('action_trade_document', schema=f'{get_inv()}')
|
op.drop_table('action_trade_document', schema=f'{get_inv()}')
|
||||||
op.drop_table('confirm_document', schema=f'{get_inv()}')
|
op.drop_table('confirm_document', schema=f'{get_inv()}')
|
||||||
op.drop_table('trade_document', schema=f'{get_inv()}')
|
op.drop_table('trade_document', schema=f'{get_inv()}')
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,39 @@
|
||||||
|
"""device other
|
||||||
|
|
||||||
|
Revision ID: 410aadae7652
|
||||||
|
Revises: d65745749e34
|
||||||
|
Create Date: 2022-11-29 12:00:40.272121
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '410aadae7652'
|
||||||
|
down_revision = 'd65745749e34'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'other',
|
||||||
|
sa.Column('id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['id'],
|
||||||
|
[f'{get_inv()}.device.id'],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('other', schema=f'{get_inv()}')
|
|
@ -0,0 +1,86 @@
|
||||||
|
"""sanitization
|
||||||
|
|
||||||
|
Revision ID: 4f33137586dd
|
||||||
|
Revises: 93daff872771
|
||||||
|
Create Date: 2023-02-13 18:01:00.092527
|
||||||
|
|
||||||
|
"""
|
||||||
|
import citext
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
from ereuse_devicehub import teal
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '4f33137586dd'
|
||||||
|
down_revision = '93daff872771'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'sanitization_entity',
|
||||||
|
sa.Column('id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
'updated',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'created',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column('company_name', sa.String(), nullable=True),
|
||||||
|
sa.Column('logo', teal.db.URL(), nullable=True),
|
||||||
|
sa.Column('responsable_person', sa.String(), nullable=True),
|
||||||
|
sa.Column('supervisor_person', sa.String(), nullable=True),
|
||||||
|
sa.Column('location', sa.String(), nullable=True),
|
||||||
|
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['user_id'],
|
||||||
|
['common.user.id'],
|
||||||
|
),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'transfer_customer_details',
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
'updated',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'created',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column('company_name', citext.CIText(), nullable=True),
|
||||||
|
sa.Column('logo', teal.db.URL(), nullable=True),
|
||||||
|
sa.Column('location', citext.CIText(), nullable=True),
|
||||||
|
sa.Column('transfer_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['transfer_id'], [f'{get_inv()}.transfer.id']),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('sanitization_entity', schema=f'{get_inv()}')
|
||||||
|
op.drop_table('transfer_customer_details', schema=f'{get_inv()}')
|
|
@ -0,0 +1,45 @@
|
||||||
|
"""add new erase_data_wipe
|
||||||
|
|
||||||
|
Revision ID: 5169765e2653
|
||||||
|
Revises: 2f2ef041483a
|
||||||
|
Create Date: 2023-05-23 10:34:46.312074
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '5169765e2653'
|
||||||
|
down_revision = 'a8a86dbd5f51'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'erase_data_wipe',
|
||||||
|
sa.Column('document_id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['document_id'],
|
||||||
|
[f'{get_inv()}.document.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['id'],
|
||||||
|
[f'{get_inv()}.erase_basic.id'],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('erase_data_wipe', schema=f'{get_inv()}')
|
|
@ -0,0 +1,35 @@
|
||||||
|
"""add vendor family in device
|
||||||
|
|
||||||
|
Revision ID: 564952310b17
|
||||||
|
Revises: af038a8a388c
|
||||||
|
Create Date: 2022-11-14 13:12:22.916848
|
||||||
|
|
||||||
|
"""
|
||||||
|
import citext
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '564952310b17'
|
||||||
|
down_revision = 'af038a8a388c'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column(
|
||||||
|
'device',
|
||||||
|
sa.Column('family', citext.CIText(), nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('device', 'family', schema=f'{get_inv()}')
|
|
@ -9,7 +9,7 @@ from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import sqlalchemy_utils
|
import sqlalchemy_utils
|
||||||
import citext
|
import citext
|
||||||
import teal
|
from ereuse_devicehub import teal
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
from alembic import context
|
from alembic import context
|
||||||
|
@ -32,13 +32,23 @@ def get_inv():
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
# Document table
|
# Document table
|
||||||
op.create_table('document',
|
op.create_table(
|
||||||
|
'document',
|
||||||
sa.Column('id', sa.BigInteger(), nullable=False),
|
sa.Column('id', sa.BigInteger(), nullable=False),
|
||||||
sa.Column('updated', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'),
|
sa.Column(
|
||||||
|
'updated',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
comment='The last time Document recorded a change for \n this thing.\n '),
|
comment='The last time Document recorded a change for \n this thing.\n ',
|
||||||
sa.Column('created', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'),
|
),
|
||||||
nullable=False, comment='When Document created this.'),
|
sa.Column(
|
||||||
|
'created',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
comment='When Document created this.',
|
||||||
|
),
|
||||||
sa.Column('document_type', sa.Unicode(), nullable=False),
|
sa.Column('document_type', sa.Unicode(), nullable=False),
|
||||||
sa.Column('date', sa.TIMESTAMP(timezone=True), nullable=True),
|
sa.Column('date', sa.TIMESTAMP(timezone=True), nullable=True),
|
||||||
sa.Column('id_document', sa.Unicode(), nullable=True),
|
sa.Column('id_document', sa.Unicode(), nullable=True),
|
||||||
|
@ -46,36 +56,73 @@ def upgrade():
|
||||||
sa.Column('file_name', sa.Unicode(), nullable=False),
|
sa.Column('file_name', sa.Unicode(), nullable=False),
|
||||||
sa.Column('file_hash', sa.Unicode(), nullable=False),
|
sa.Column('file_hash', sa.Unicode(), nullable=False),
|
||||||
sa.Column('url', sa.Unicode(), nullable=True),
|
sa.Column('url', sa.Unicode(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
sa.ForeignKeyConstraint(['owner_id'], ['common.user.id'], ),
|
['owner_id'],
|
||||||
|
['common.user.id'],
|
||||||
|
),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
schema=f'{get_inv()}'
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
'generic_document_id',
|
||||||
|
'document',
|
||||||
|
['id'],
|
||||||
|
unique=False,
|
||||||
|
postgresql_using='hash',
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f('ix_document_created'),
|
||||||
|
'document',
|
||||||
|
['created'],
|
||||||
|
unique=False,
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
op.f('ix_document_updated'),
|
||||||
|
'document',
|
||||||
|
['updated'],
|
||||||
|
unique=False,
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
'document_type_index',
|
||||||
|
'document',
|
||||||
|
['document_type'],
|
||||||
|
unique=False,
|
||||||
|
postgresql_using='hash',
|
||||||
|
schema=f'{get_inv()}',
|
||||||
)
|
)
|
||||||
op.create_index('generic_document_id', 'document', ['id'], unique=False, postgresql_using='hash', schema=f'{get_inv()}')
|
|
||||||
op.create_index(op.f('ix_document_created'), 'document', ['created'], unique=False, schema=f'{get_inv()}')
|
|
||||||
op.create_index(op.f('ix_document_updated'), 'document', ['updated'], unique=False, schema=f'{get_inv()}')
|
|
||||||
op.create_index('document_type_index', 'document', ['document_type'], unique=False, postgresql_using='hash', schema=f'{get_inv()}')
|
|
||||||
|
|
||||||
|
|
||||||
# DataWipeDocument table
|
# DataWipeDocument table
|
||||||
op.create_table('data_wipe_document',
|
op.create_table(
|
||||||
|
'data_wipe_document',
|
||||||
sa.Column('id', sa.BigInteger(), nullable=False),
|
sa.Column('id', sa.BigInteger(), nullable=False),
|
||||||
sa.Column('software', sa.Unicode(), nullable=True),
|
sa.Column('software', sa.Unicode(), nullable=True),
|
||||||
sa.Column('success', sa.Boolean(), nullable=False),
|
sa.Column('success', sa.Boolean(), nullable=False),
|
||||||
sa.ForeignKeyConstraint(['id'], [f'{get_inv()}.document.id'], ),
|
sa.ForeignKeyConstraint(
|
||||||
|
['id'],
|
||||||
|
[f'{get_inv()}.document.id'],
|
||||||
|
),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
schema=f'{get_inv()}'
|
schema=f'{get_inv()}',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# DataWipe table
|
# DataWipe table
|
||||||
op.create_table('data_wipe',
|
op.create_table(
|
||||||
|
'data_wipe',
|
||||||
sa.Column('document_id', sa.BigInteger(), nullable=False),
|
sa.Column('document_id', sa.BigInteger(), nullable=False),
|
||||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
sa.ForeignKeyConstraint(['document_id'], [f'{get_inv()}.document.id'], ),
|
sa.ForeignKeyConstraint(
|
||||||
sa.ForeignKeyConstraint(['id'], [f'{get_inv()}.action.id'], ),
|
['document_id'],
|
||||||
|
[f'{get_inv()}.document.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['id'],
|
||||||
|
[f'{get_inv()}.action.id'],
|
||||||
|
),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
schema=f'{get_inv()}'
|
schema=f'{get_inv()}',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
"""add hash hid to device
|
||||||
|
|
||||||
|
Revision ID: 93daff872771
|
||||||
|
Revises: 564952310b17
|
||||||
|
Create Date: 2022-12-13 10:14:45.500087
|
||||||
|
|
||||||
|
"""
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
import citext
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '93daff872771'
|
||||||
|
down_revision = '564952310b17'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade_data():
|
||||||
|
con = op.get_bind()
|
||||||
|
sql = f"update {get_inv()}.computer set user_trusts='t';"
|
||||||
|
con.execute(sql)
|
||||||
|
|
||||||
|
dev_sql = f"select id, hid from {get_inv()}.device;"
|
||||||
|
for d in con.execute(dev_sql):
|
||||||
|
if not d.hid:
|
||||||
|
continue
|
||||||
|
dev_id = d.id
|
||||||
|
chid = hashlib.sha3_256(d.hid.encode('utf-8')).hexdigest()
|
||||||
|
sql = f"update {get_inv()}.device set chid='{chid}' where id={dev_id};"
|
||||||
|
con.execute(sql)
|
||||||
|
|
||||||
|
con.execute(sql)
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column(
|
||||||
|
'computer',
|
||||||
|
sa.Column('user_trusts', sa.Boolean(), default=True, nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
op.add_column(
|
||||||
|
'device',
|
||||||
|
sa.Column('chid', citext.CIText(), nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
upgrade_data()
|
||||||
|
|
||||||
|
op.alter_column('computer', 'user_trusts', nullable=False, schema=f'{get_inv()}')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('computer', 'user_trusts', schema=f'{get_inv()}')
|
||||||
|
op.drop_column('device', 'chid', schema=f'{get_inv()}')
|
|
@ -10,7 +10,7 @@ from alembic import context
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import sqlalchemy_utils
|
import sqlalchemy_utils
|
||||||
import citext
|
import citext
|
||||||
import teal
|
from ereuse_devicehub import teal
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
|
@ -26,10 +26,10 @@ def get_inv():
|
||||||
raise ValueError("Inventory value is not specified")
|
raise ValueError("Inventory value is not specified")
|
||||||
return INV
|
return INV
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
con = op.get_bind()
|
con = op.get_bind()
|
||||||
|
|
||||||
|
|
||||||
confirmsRevokes_sql = f"select * from {get_inv()}.action as action join {get_inv()}.confirm as confirm on action.id=confirm.id where action.type='ConfirmRevoke'"
|
confirmsRevokes_sql = f"select * from {get_inv()}.action as action join {get_inv()}.confirm as confirm on action.id=confirm.id where action.type='ConfirmRevoke'"
|
||||||
revokes_sql = f"select confirm.id, confirm.action_id from {get_inv()}.action as action join {get_inv()}.confirm as confirm on action.id=confirm.id where action.type='Revoke'"
|
revokes_sql = f"select confirm.id, confirm.action_id from {get_inv()}.action as action join {get_inv()}.confirm as confirm on action.id=confirm.id where action.type='Revoke'"
|
||||||
confirmsRevokes = [a for a in con.execute(confirmsRevokes_sql)]
|
confirmsRevokes = [a for a in con.execute(confirmsRevokes_sql)]
|
||||||
|
@ -40,12 +40,12 @@ def upgrade():
|
||||||
revoke_id = ac.action_id
|
revoke_id = ac.action_id
|
||||||
trade_id = revokes[revoke_id]
|
trade_id = revokes[revoke_id]
|
||||||
sql_action = f"update {get_inv()}.action set type='Revoke' where id='{ac_id}'"
|
sql_action = f"update {get_inv()}.action set type='Revoke' where id='{ac_id}'"
|
||||||
sql_confirm = f"update {get_inv()}.confirm set action_id='{trade_id}' where id='{ac_id}'"
|
sql_confirm = (
|
||||||
|
f"update {get_inv()}.confirm set action_id='{trade_id}' where id='{ac_id}'"
|
||||||
|
)
|
||||||
con.execute(sql_action)
|
con.execute(sql_action)
|
||||||
con.execute(sql_confirm)
|
con.execute(sql_confirm)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade():
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
"""add kangaroo in placeholder
|
||||||
|
|
||||||
|
Revision ID: a13ed6ad0e3e
|
||||||
|
Revises: 626c17026ca7
|
||||||
|
Create Date: 2022-10-13 11:56:15.303218
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'a13ed6ad0e3e'
|
||||||
|
down_revision = '626c17026ca7'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column(
|
||||||
|
'placeholder',
|
||||||
|
sa.Column('kangaroo', sa.Boolean(), nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('placeholder', 'kangaroo', schema=f'{get_inv()}')
|
|
@ -0,0 +1,35 @@
|
||||||
|
"""add rols to user
|
||||||
|
|
||||||
|
Revision ID: a8a86dbd5f51
|
||||||
|
Revises: 5169765e2653
|
||||||
|
Create Date: 2023-06-14 15:04:03.478157
|
||||||
|
|
||||||
|
"""
|
||||||
|
import citext
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'a8a86dbd5f51'
|
||||||
|
down_revision = '2f2ef041483a'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column(
|
||||||
|
'user',
|
||||||
|
sa.Column('rols_dlt', type_=citext.CIText(), nullable=True),
|
||||||
|
schema='common',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('user', 'rols_dlt', schema='common')
|
|
@ -0,0 +1,101 @@
|
||||||
|
"""add document device
|
||||||
|
|
||||||
|
Revision ID: ac476b60d952
|
||||||
|
Revises: 4f33137586dd
|
||||||
|
Create Date: 2023-03-31 10:46:02.463007
|
||||||
|
|
||||||
|
"""
|
||||||
|
import citext
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
from ereuse_devicehub import teal
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'ac476b60d952'
|
||||||
|
down_revision = '4f33137586dd'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'device_document',
|
||||||
|
sa.Column(
|
||||||
|
'updated',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'created',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'id',
|
||||||
|
postgresql.UUID(as_uuid=True),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'type',
|
||||||
|
citext.CIText(),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'date',
|
||||||
|
sa.DateTime(),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'id_document',
|
||||||
|
citext.CIText(),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'description',
|
||||||
|
citext.CIText(),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column('owner_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('device_id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
'file_name',
|
||||||
|
citext.CIText(),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'file_hash',
|
||||||
|
citext.CIText(),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'url',
|
||||||
|
citext.CIText(),
|
||||||
|
teal.db.URL(),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['device_id'],
|
||||||
|
[f'{get_inv()}.device.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['owner_id'],
|
||||||
|
['common.user.id'],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('device_document', schema=f'{get_inv()}')
|
|
@ -0,0 +1,35 @@
|
||||||
|
"""add settings_version to snapshots
|
||||||
|
|
||||||
|
Revision ID: af038a8a388c
|
||||||
|
Revises: 410aadae7652
|
||||||
|
Create Date: 2022-11-30 16:21:05.768024
|
||||||
|
|
||||||
|
"""
|
||||||
|
import citext
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'af038a8a388c'
|
||||||
|
down_revision = '410aadae7652'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column(
|
||||||
|
'snapshot',
|
||||||
|
sa.Column('settings_version', citext.CIText(), nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('snapshot', 'settings_version', schema=f'{get_inv()}')
|
|
@ -6,7 +6,7 @@ Create Date: 2020-12-29 20:19:46.981207
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import teal
|
from ereuse_devicehub import teal
|
||||||
from alembic import context, op
|
from alembic import context, op
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import sqlalchemy_utils
|
import sqlalchemy_utils
|
||||||
import citext
|
import citext
|
||||||
import teal
|
from ereuse_devicehub import teal
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
|
@ -26,6 +26,7 @@ def get_inv():
|
||||||
raise ValueError("Inventory value is not specified")
|
raise ValueError("Inventory value is not specified")
|
||||||
return INV
|
return INV
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
con = op.get_bind()
|
con = op.get_bind()
|
||||||
sql = f"""
|
sql = f"""
|
||||||
|
@ -60,6 +61,5 @@ def upgrade():
|
||||||
con.execute(sql)
|
con.execute(sql)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade():
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
"""add is_server_erase
|
||||||
|
|
||||||
|
Revision ID: d65745749e34
|
||||||
|
Revises: a13ed6ad0e3e
|
||||||
|
Create Date: 2022-10-17 13:20:29.875274
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'd65745749e34'
|
||||||
|
down_revision = 'a13ed6ad0e3e'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column(
|
||||||
|
'snapshot',
|
||||||
|
sa.Column('is_server_erase', sa.Boolean(), nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('snapshot', 'is_server_erase', schema=f'{get_inv()}')
|
|
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
||||||
from alembic import context
|
from alembic import context
|
||||||
import sqlalchemy_utils
|
import sqlalchemy_utils
|
||||||
import citext
|
import citext
|
||||||
import teal
|
from ereuse_devicehub import teal
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
|
@ -26,48 +26,85 @@ def get_inv():
|
||||||
raise ValueError("Inventory value is not specified")
|
raise ValueError("Inventory value is not specified")
|
||||||
return INV
|
return INV
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
# Allocate action
|
# Allocate action
|
||||||
op.drop_table('allocate', schema=f'{get_inv()}')
|
op.drop_table('allocate', schema=f'{get_inv()}')
|
||||||
op.create_table('allocate',
|
op.create_table(
|
||||||
sa.Column('final_user_code', citext.CIText(), default='', nullable=True,
|
'allocate',
|
||||||
comment = "This is a internal code for mainteing the secrets of the personal datas of the new holder"),
|
sa.Column(
|
||||||
sa.Column('transaction', citext.CIText(), nullable=True, comment='The code used from the owner for relation with external tool.'),
|
'final_user_code',
|
||||||
|
citext.CIText(),
|
||||||
|
default='',
|
||||||
|
nullable=True,
|
||||||
|
comment="This is a internal code for mainteing the secrets of the personal datas of the new holder",
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'transaction',
|
||||||
|
citext.CIText(),
|
||||||
|
nullable=True,
|
||||||
|
comment='The code used from the owner for relation with external tool.',
|
||||||
|
),
|
||||||
sa.Column('end_users', sa.Numeric(precision=4), nullable=True),
|
sa.Column('end_users', sa.Numeric(precision=4), nullable=True),
|
||||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
sa.ForeignKeyConstraint(['id'], [f'{get_inv()}.action.id'], ),
|
sa.ForeignKeyConstraint(
|
||||||
|
['id'],
|
||||||
|
[f'{get_inv()}.action.id'],
|
||||||
|
),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
schema=f'{get_inv()}'
|
schema=f'{get_inv()}',
|
||||||
)
|
)
|
||||||
|
|
||||||
# Deallocate action
|
# Deallocate action
|
||||||
op.drop_table('deallocate', schema=f'{get_inv()}')
|
op.drop_table('deallocate', schema=f'{get_inv()}')
|
||||||
op.create_table('deallocate',
|
op.create_table(
|
||||||
sa.Column('transaction', citext.CIText(), nullable=True, comment='The code used from the owner for relation with external tool.'),
|
'deallocate',
|
||||||
|
sa.Column(
|
||||||
|
'transaction',
|
||||||
|
citext.CIText(),
|
||||||
|
nullable=True,
|
||||||
|
comment='The code used from the owner for relation with external tool.',
|
||||||
|
),
|
||||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
sa.ForeignKeyConstraint(['id'], [f'{get_inv()}.action.id'], ),
|
sa.ForeignKeyConstraint(
|
||||||
|
['id'],
|
||||||
|
[f'{get_inv()}.action.id'],
|
||||||
|
),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
schema=f'{get_inv()}'
|
schema=f'{get_inv()}',
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add allocate as a column in device
|
# Add allocate as a column in device
|
||||||
op.add_column('device', sa.Column('allocated', sa.Boolean(), nullable=True), schema=f'{get_inv()}')
|
op.add_column(
|
||||||
|
'device',
|
||||||
|
sa.Column('allocated', sa.Boolean(), nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
# Receive action
|
# Receive action
|
||||||
op.drop_table('receive', schema=f'{get_inv()}')
|
op.drop_table('receive', schema=f'{get_inv()}')
|
||||||
|
|
||||||
# Live action
|
# Live action
|
||||||
op.drop_table('live', schema=f'{get_inv()}')
|
op.drop_table('live', schema=f'{get_inv()}')
|
||||||
op.create_table('live',
|
op.create_table(
|
||||||
|
'live',
|
||||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
sa.Column('serial_number', sa.Unicode(), nullable=True,
|
sa.Column(
|
||||||
comment='The serial number of the Hard Disk in lower case.'),
|
'serial_number',
|
||||||
|
sa.Unicode(),
|
||||||
|
nullable=True,
|
||||||
|
comment='The serial number of the Hard Disk in lower case.',
|
||||||
|
),
|
||||||
sa.Column('usage_time_hdd', sa.Interval(), nullable=True),
|
sa.Column('usage_time_hdd', sa.Interval(), nullable=True),
|
||||||
sa.Column('snapshot_uuid', postgresql.UUID(as_uuid=True), nullable=False),
|
sa.Column('snapshot_uuid', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
sa.ForeignKeyConstraint(['id'], [f'{get_inv()}.action.id'], ),
|
sa.ForeignKeyConstraint(
|
||||||
|
['id'],
|
||||||
|
[f'{get_inv()}.action.id'],
|
||||||
|
),
|
||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
schema=f'{get_inv()}'
|
schema=f'{get_inv()}',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade():
|
||||||
op.drop_table('allocate', schema=f'{get_inv()}')
|
op.drop_table('allocate', schema=f'{get_inv()}')
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,280 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta content="width=device-width, initial-scale=1.0" name="viewport">
|
||||||
|
|
||||||
|
<title>Device {{ device_real.dhid }} - Usody</title>
|
||||||
|
<meta content="" name="description">
|
||||||
|
<meta content="" name="keywords">
|
||||||
|
|
||||||
|
<!-- Favicons -->
|
||||||
|
<link href="{{ url_for('static', filename='img/favicon.png') }}" rel="icon">
|
||||||
|
<link href="{{ url_for('static', filename='img/apple-touch-icon.png') }}" rel="apple-touch-icon">
|
||||||
|
|
||||||
|
<!-- Google Fonts -->
|
||||||
|
<link href="https://fonts.gstatic.com" rel="preconnect">
|
||||||
|
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,300i,400,400i,600,600i,700,700i|Nunito:300,300i,400,400i,600,600i,700,700i|Poppins:300,300i,400,400i,500,500i,600,600i,700,700i" rel="stylesheet">
|
||||||
|
|
||||||
|
<!-- JS Files -->
|
||||||
|
<script src="{{ url_for('static', filename='js/jquery-3.6.0.min.js') }}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='vendor/bootstrap/js/bootstrap.bundle.min.js') }}"></script>
|
||||||
|
|
||||||
|
<!-- Vendor CSS Files -->
|
||||||
|
<link href="{{ url_for('static', filename='vendor/bootstrap/css/bootstrap.min.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='vendor/bootstrap-icons/bootstrap-icons.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Template Main CSS File -->
|
||||||
|
<link href="{{ url_for('static', filename='css/style.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='css/devicehub.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
|
<!-- =======================================================
|
||||||
|
* Template Name: NiceAdmin - v2.2.0
|
||||||
|
* Template URL: https://bootstrapmade.com/nice-admin-bootstrap-admin-html-template/
|
||||||
|
* Author: BootstrapMade.com
|
||||||
|
* License: https://bootstrapmade.com/license/
|
||||||
|
======================================================== -->
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
|
||||||
|
<section class="container mt-3">
|
||||||
|
<div class="row">
|
||||||
|
|
||||||
|
<div class="col">
|
||||||
|
<nav class="header-nav ms-auto">
|
||||||
|
<ul class="d-flex align-items-right">
|
||||||
|
<li class="nav-item">
|
||||||
|
{% if not rols and user.is_anonymous %}
|
||||||
|
<button class="btn btn-primary" data-bs-toggle="modal" data-bs-target="#validateModal">Validate</button>
|
||||||
|
{% else %}
|
||||||
|
<button class="btn btn-primary" id="buttonRol" data-bs-toggle="modal" data-bs-target="#rolsModal">Select your role</button>
|
||||||
|
<a class="btn btn-primary" href="{{ url_for('core.logout') }}?next={{ path }}">Logout</a>
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
{% if rol %}
|
||||||
|
<br />Current Role: {{ rol }}
|
||||||
|
{% endif %}
|
||||||
|
</nav>
|
||||||
|
<div class="col-xl-12">
|
||||||
|
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-body">
|
||||||
|
<h3 class="nav-link mt-5" style="color: #993365">{{ device_real.type }} - {{ device_real.verbose_name }}</h3>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-12">
|
||||||
|
<h5 class="card-title">Details</h5>
|
||||||
|
{% if manuals.details %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
{% if manuals.details.logo %}
|
||||||
|
<img style="max-width: 50px; margin-right: 15px;" src="{{ manuals.details.logo }}" />
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{% if manuals.details.image %}
|
||||||
|
<img style="width: 100px;" src="{{ manuals.details.image }}" />
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Type
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.type or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Manufacturer
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.manufacturer and device_real.manufacturer.upper() or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Model
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.model or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Device Identifier (CHID):
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
<a href="{{ url_for('did.did', id_dpp=device_abstract.chid) }}">{{ device_abstract.chid }}</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Manufacturer DPP:
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Usody Identifier (DHID)
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.dhid }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row mt-3">
|
||||||
|
<div class="col-12">
|
||||||
|
<h5 class="card-title">Components</h5>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<ul>
|
||||||
|
{% for component in components %}
|
||||||
|
{% if component.type == "Processor" %}
|
||||||
|
<li>
|
||||||
|
<strong>Processor</strong>: {{ component.manufacturer or '- not detected -' }} {{ component.model or '- not detected -'}}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% for component in components %}
|
||||||
|
{% if component.type in ['HardDrive', 'SolidStateDrive'] %}
|
||||||
|
<li>
|
||||||
|
<strong>{{ component.type }}</strong>:
|
||||||
|
{% if component.size %}{{ component.size/1000 }}GB{% else %} - not detected - {% endif %}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% for component in components %}
|
||||||
|
{% if component.type == 'RamModule' %}
|
||||||
|
<li>
|
||||||
|
<strong>Ram</strong>:
|
||||||
|
{% if component.size %}{{ component.size }}MB{% else %} - not detected - {% endif %}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% for component in components %}
|
||||||
|
{% if component.type == 'SoundCard' %}
|
||||||
|
<li>
|
||||||
|
<strong>Sound</strong>: {{ component.manufacturer or '- not detected -' }} {{ component.model or '- not detected -'}}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% for component in components %}
|
||||||
|
{% if component.type == 'NetworkAdapter' %}
|
||||||
|
<li>
|
||||||
|
<strong>Network</strong>: {{ component.manufacturer or '- not detected -' }} {{ component.model or '- not detected -'}}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</main>
|
||||||
|
<!-- ======= Footer ======= -->
|
||||||
|
<div class="container">
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<footer class="footer">
|
||||||
|
<div class="copyright">
|
||||||
|
© Copyright <strong><span>Usody</span></strong>. All Rights Reserved
|
||||||
|
</div>
|
||||||
|
<div class="credits">
|
||||||
|
<a href="https://help.usody.com/en/" target="_blank">Help</a> |
|
||||||
|
<a href="https://www.usody.com/legal/privacy-policy" target="_blank">Privacy</a> |
|
||||||
|
<a href="https://www.usody.com/legal/terms" target="_blank">Terms</a>
|
||||||
|
</div>
|
||||||
|
<div class="credits">
|
||||||
|
DeviceHub
|
||||||
|
</div>
|
||||||
|
</footer><!-- End Footer -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if user.is_anonymous and not rols %}
|
||||||
|
<div class="modal fade" id="validateModal" tabindex="-1" style="display: none;" aria-hidden="true">
|
||||||
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
|
<div class="modal-content">
|
||||||
|
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title">Validate as <span id="title-action"></span></h5>
|
||||||
|
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-body">
|
||||||
|
<a class="btn btn-primary" type="button"
|
||||||
|
href="{{ url_for('core.login') }}?next={{ path }}">
|
||||||
|
User of system
|
||||||
|
</a>
|
||||||
|
{% if oidc %}
|
||||||
|
<br />
|
||||||
|
<a class="btn btn-primary mt-3" type="button" href="{{ url_for('oidc.login_other_inventory') }}?next={{ path }}">
|
||||||
|
Use a wallet
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-footer"></div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="modal fade" id="rolsModal" tabindex="-1" style="display: none;" aria-hidden="true">
|
||||||
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
|
<div class="modal-content">
|
||||||
|
|
||||||
|
<form action="{{ path }}" method="get">
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title">Select your Role <span id="title-action"></span></h5>
|
||||||
|
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-body">
|
||||||
|
<select name="rol">
|
||||||
|
{% for k, v in rols %}
|
||||||
|
<option value="{{ k }}" {% if v==rol %}selected=selected{% endif %}>{{ v }}</option>
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-footer">
|
||||||
|
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||||
|
<input type="submit" class="btn btn-primary" value="Send" />
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</body>
|
||||||
|
|
||||||
|
<!-- Custom Code -->
|
||||||
|
{% if rols and not rol %}
|
||||||
|
<script>
|
||||||
|
$(document).ready(() => {
|
||||||
|
$("#buttonRol").click();
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</html>
|
|
@ -0,0 +1,300 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta content="width=device-width, initial-scale=1.0" name="viewport">
|
||||||
|
|
||||||
|
<title>Device {{ device_real.dhid }} - Usody</title>
|
||||||
|
<meta content="" name="description">
|
||||||
|
<meta content="" name="keywords">
|
||||||
|
|
||||||
|
<!-- Favicons -->
|
||||||
|
<link href="{{ url_for('static', filename='img/favicon.png') }}" rel="icon">
|
||||||
|
<link href="{{ url_for('static', filename='img/apple-touch-icon.png') }}" rel="apple-touch-icon">
|
||||||
|
|
||||||
|
<!-- Google Fonts -->
|
||||||
|
<link href="https://fonts.gstatic.com" rel="preconnect">
|
||||||
|
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,300i,400,400i,600,600i,700,700i|Nunito:300,300i,400,400i,600,600i,700,700i|Poppins:300,300i,400,400i,500,500i,600,600i,700,700i" rel="stylesheet">
|
||||||
|
|
||||||
|
<!-- JS Files -->
|
||||||
|
<script src="{{ url_for('static', filename='js/jquery-3.6.0.min.js') }}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='vendor/bootstrap/js/bootstrap.bundle.min.js') }}"></script>
|
||||||
|
|
||||||
|
<!-- Vendor CSS Files -->
|
||||||
|
<link href="{{ url_for('static', filename='vendor/bootstrap/css/bootstrap.min.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='vendor/bootstrap-icons/bootstrap-icons.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Template Main CSS File -->
|
||||||
|
<link href="{{ url_for('static', filename='css/style.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='css/devicehub.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
|
<!-- =======================================================
|
||||||
|
* Template Name: NiceAdmin - v2.2.0
|
||||||
|
* Template URL: https://bootstrapmade.com/nice-admin-bootstrap-admin-html-template/
|
||||||
|
* Author: BootstrapMade.com
|
||||||
|
* License: https://bootstrapmade.com/license/
|
||||||
|
======================================================== -->
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
|
||||||
|
<section class="container mt-3">
|
||||||
|
<div class="row">
|
||||||
|
|
||||||
|
<div class="col">
|
||||||
|
<nav class="header-nav ms-auto">
|
||||||
|
<ul class="d-flex align-items-right">
|
||||||
|
<li class="nav-item">
|
||||||
|
{% if not rols and user.is_anonymous %}
|
||||||
|
<button class="btn btn-primary" data-bs-toggle="modal" data-bs-target="#validateModal">Validate</button>
|
||||||
|
{% else %}
|
||||||
|
<button class="btn btn-primary" id="buttonRol" data-bs-toggle="modal" data-bs-target="#rolsModal">Select your role</button>
|
||||||
|
<a class="btn btn-primary" href="{{ url_for('core.logout') }}?next={{ path }}">Logout</a>
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
{% if rol %}
|
||||||
|
<br />Current Role: {{ rol }}
|
||||||
|
{% endif %}
|
||||||
|
</nav>
|
||||||
|
<div class="col-xl-12">
|
||||||
|
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-body">
|
||||||
|
<h3 class="nav-link mt-5" style="color: #993365">{{ device_real.type }} - {{ device_real.verbose_name }}</h3>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-12">
|
||||||
|
<h5 class="card-title">Basic</h5>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Device Identifier (CHID):
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
<a href="{{ url_for('did.did', id_dpp=device_abstract.chid) }}">{{ device_abstract.chid }}</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Last Digital Passport (Last Dpp):
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{% if last_dpp %}
|
||||||
|
<a href="{{ url_for('did.did', id_dpp=last_dpp.key) }}">{{ last_dpp.key }}</a>
|
||||||
|
{% else %}
|
||||||
|
- not detected -
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Before Digital Passport (Before Dpp):
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{% if before_dpp %}
|
||||||
|
<a href="{{ url_for('did.did', id_dpp=before_dpp.key) }}">{{ before_dpp.key }}</a>
|
||||||
|
{% else %}
|
||||||
|
- not detected -
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Usody Identifier (DHID)
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.dhid }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Inventory Identifier (PHID)
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.phid() }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Type
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.type or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Manufacturer
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.manufacturer or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Model
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.model or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Part Number
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.part_number or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Serial Number
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{% if rol %}
|
||||||
|
{{ device_abstract.serial_number and device_abstract.serial_number.upper() or '- not detected -' }}
|
||||||
|
{% else %}
|
||||||
|
- anonymized -
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row mt-3">
|
||||||
|
<div class="col-12">
|
||||||
|
<h5 class="card-title">Components</h5>
|
||||||
|
<div class="row">
|
||||||
|
{% if components %}
|
||||||
|
<div class="list-group col">
|
||||||
|
{% for component in components|sort(attribute='type') %}
|
||||||
|
<div class="list-group-item">
|
||||||
|
<div class="d-flex w-100 justify-content-between">
|
||||||
|
<h5 class="mb-1">{{ component.type }}</h5>
|
||||||
|
<small class="text-muted">{{ component.created.strftime('%H:%M %d-%m-%Y') }}</small>
|
||||||
|
</div>
|
||||||
|
<p class="mb-1">
|
||||||
|
Manufacturer: {{ component.manufacturer or '- not detected -' }}<br />
|
||||||
|
Model: {{ component.model or '- not detected -' }}<br />
|
||||||
|
{% if rol %}
|
||||||
|
Serial: {{ component.serial_number and component.serial_number.upper() or '- not detected -' }}
|
||||||
|
{% endif %}
|
||||||
|
</p>
|
||||||
|
<small class="text-muted">
|
||||||
|
{% if component.type in ['RamModule', 'HardDrive', 'SolidStateDrive'] %}
|
||||||
|
{{ component.size }}MB
|
||||||
|
{% endif %}
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="list-group col">
|
||||||
|
<div class="list-group-item">
|
||||||
|
- not detected -
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</main>
|
||||||
|
<!-- ======= Footer ======= -->
|
||||||
|
<div class="container">
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<footer class="footer">
|
||||||
|
<div class="copyright">
|
||||||
|
© Copyright <strong><span>Usody</span></strong>. All Rights Reserved
|
||||||
|
</div>
|
||||||
|
<div class="credits">
|
||||||
|
<a href="https://help.usody.com/en/" target="_blank">Help</a> |
|
||||||
|
<a href="https://www.usody.com/legal/privacy-policy" target="_blank">Privacy</a> |
|
||||||
|
<a href="https://www.usody.com/legal/terms" target="_blank">Terms</a>
|
||||||
|
</div>
|
||||||
|
<div class="credits">
|
||||||
|
DeviceHub
|
||||||
|
</div>
|
||||||
|
</footer><!-- End Footer -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if user.is_anonymous and not rols %}
|
||||||
|
<div class="modal fade" id="validateModal" tabindex="-1" style="display: none;" aria-hidden="true">
|
||||||
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
|
<div class="modal-content">
|
||||||
|
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title">Validate as <span id="title-action"></span></h5>
|
||||||
|
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-body">
|
||||||
|
<a class="btn btn-primary" type="button"
|
||||||
|
href="{{ url_for('core.login') }}?next={{ path }}">
|
||||||
|
User of system
|
||||||
|
</a>
|
||||||
|
{% if oidc %}
|
||||||
|
<br />
|
||||||
|
<a class="btn btn-primary mt-3" type="button" href="{{ url_for('oidc.login_other_inventory') }}?next={{ path }}">
|
||||||
|
User of other inventory
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-footer"></div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="modal fade" id="rolsModal" tabindex="-1" style="display: none;" aria-hidden="true">
|
||||||
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
|
<div class="modal-content">
|
||||||
|
|
||||||
|
<form action="{{ path }}" method="get">
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title">Select your Role <span id="title-action"></span></h5>
|
||||||
|
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-body">
|
||||||
|
<select name="rol">
|
||||||
|
{% for k, v in rols %}
|
||||||
|
<option value="{{ k }}" {% if v==rol %}selected=selected{% endif %}>{{ v }}</option>
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-footer">
|
||||||
|
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||||
|
<input type="submit" class="btn btn-primary" value="Send" />
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</body>
|
||||||
|
|
||||||
|
<!-- Custom Code -->
|
||||||
|
{% if not user.is_anonymous and not rol %}
|
||||||
|
<script>
|
||||||
|
$(document).ready(() => {
|
||||||
|
$("#buttonRol").click();
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</html>
|
|
@ -0,0 +1,585 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta content="width=device-width, initial-scale=1.0" name="viewport">
|
||||||
|
|
||||||
|
<title>Device {{ device_real.dhid }} - Usody</title>
|
||||||
|
<meta content="" name="description">
|
||||||
|
<meta content="" name="keywords">
|
||||||
|
|
||||||
|
<!-- Favicons -->
|
||||||
|
<link href="{{ url_for('static', filename='img/favicon.png') }}" rel="icon">
|
||||||
|
<link href="{{ url_for('static', filename='img/apple-touch-icon.png') }}" rel="apple-touch-icon">
|
||||||
|
|
||||||
|
<!-- Google Fonts -->
|
||||||
|
<link href="https://fonts.gstatic.com" rel="preconnect">
|
||||||
|
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,300i,400,400i,600,600i,700,700i|Nunito:300,300i,400,400i,600,600i,700,700i|Poppins:300,300i,400,400i,500,500i,600,600i,700,700i" rel="stylesheet">
|
||||||
|
|
||||||
|
<!-- JS Files -->
|
||||||
|
<script src="{{ url_for('static', filename='js/jquery-3.6.0.min.js') }}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='vendor/bootstrap/js/bootstrap.bundle.min.js') }}"></script>
|
||||||
|
|
||||||
|
<!-- Vendor CSS Files -->
|
||||||
|
<link href="{{ url_for('static', filename='vendor/bootstrap/css/bootstrap.min.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='vendor/bootstrap-icons/bootstrap-icons.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Template Main CSS File -->
|
||||||
|
<link href="{{ url_for('static', filename='css/style.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='css/devicehub.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
|
<!-- =======================================================
|
||||||
|
* Template Name: NiceAdmin - v2.2.0
|
||||||
|
* Template URL: https://bootstrapmade.com/nice-admin-bootstrap-admin-html-template/
|
||||||
|
* Author: BootstrapMade.com
|
||||||
|
* License: https://bootstrapmade.com/license/
|
||||||
|
======================================================== -->
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
|
||||||
|
<section class="container mt-3">
|
||||||
|
<div class="row">
|
||||||
|
|
||||||
|
<div class="col">
|
||||||
|
<nav class="header-nav ms-auto">
|
||||||
|
<ul class="d-flex align-items-right">
|
||||||
|
<li class="nav-item">
|
||||||
|
{% if not rols and user.is_anonymous %}
|
||||||
|
<button class="btn btn-primary" data-bs-toggle="modal" data-bs-target="#validateModal">Validate</button>
|
||||||
|
{% else %}
|
||||||
|
<button class="btn btn-primary" id="buttonRol" data-bs-toggle="modal" data-bs-target="#rolsModal">Select your role</button>
|
||||||
|
<a class="btn btn-primary" href="{{ url_for('core.logout') }}?next={{ path }}">Logout</a>
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
{% if rol %}
|
||||||
|
<br />Current Role: {{ rol }}
|
||||||
|
{% endif %}
|
||||||
|
</nav>
|
||||||
|
<div class="col-xl-12">
|
||||||
|
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-body">
|
||||||
|
<h3 class="nav-link mt-5" style="color: #993365">{{ device_real.type }} - {{ device_real.verbose_name }}</h3>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-12">
|
||||||
|
<h5 class="card-title">Details</h5>
|
||||||
|
{% if manuals.details %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
{% if manuals.details.logo %}
|
||||||
|
<img style="max-width: 50px; margin-right: 15px;" src="{{ manuals.details.logo }}" />
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{% if manuals.details.image %}
|
||||||
|
<img style="width: 100px;" src="{{ manuals.details.image }}" />
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Type
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.type or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Manufacturer
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.manufacturer or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Model
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.model or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Part Number
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.part_number or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Serial Number
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_abstract.serial_number and device_abstract.serial_number.upper() or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Usody Identifier (DHID)
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.dhid }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Inventory Identifier (PHID)
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.phid() }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Device Identifier (CHID):
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
<a href="{{ url_for('did.did', id_dpp=device_abstract.chid) }}">{{ device_abstract.chid|truncate(20, True, '...') }}</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Manufacturer DPP:
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Last Digital Passport (Last Dpp):
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{% if last_dpp %}
|
||||||
|
<a href="{{ url_for('did.did', id_dpp=last_dpp.key) }}">{{ last_dpp.key|truncate(20, True, '...') }}</a>
|
||||||
|
{% else %}
|
||||||
|
- not detected -
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row mt-3">
|
||||||
|
<div class="col-6">
|
||||||
|
<h5 class="card-title">Status</h5>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<div class="label"><b>Physical</b></div>
|
||||||
|
<div>{{ device_real.physical_status and device.physical_status.type or '- not status -' }}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<div class="label"><b>Lifecycle</b></div>
|
||||||
|
<div>{{ device_real.status and device_real.status.type or '- not status -' }}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<div class="label"><b>Allocation</b></div>
|
||||||
|
<div>
|
||||||
|
{% if device_real.allocated %}
|
||||||
|
Allocated
|
||||||
|
{% else %}
|
||||||
|
Not allocated
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="col-6">
|
||||||
|
{% if manuals.icecat %}
|
||||||
|
<h5 class="card-title">Icecat data sheet</h5>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-12 list-group-item d-flex align-items-center">
|
||||||
|
{% if manuals.details.logo %}
|
||||||
|
<img style="max-width: 50px; margin-right: 15px;" src="{{ manuals.details.logo }}" />
|
||||||
|
{% endif %}
|
||||||
|
{% if manuals.details.image %}
|
||||||
|
<img style="max-width: 100px; margin-right: 15px;" src="{{ manuals.details.image }}" />
|
||||||
|
{% endif %}
|
||||||
|
{% if manuals.details.pdf %}
|
||||||
|
<a href="{{ manuals.details.pdf }}" target="_blank">{{ manuals.details.title }}</a><br />
|
||||||
|
{% else %}
|
||||||
|
{{ manuals.details.title }}<br />
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="col-12 accordion-item">
|
||||||
|
<h5 class="card-title accordion-header">
|
||||||
|
<button class="accordion-button collapsed" data-bs-target="#manuals-icecat" type="button"
|
||||||
|
data-bs-toggle="collapse" aria-expanded="false">
|
||||||
|
More examples
|
||||||
|
</button>
|
||||||
|
</h5>
|
||||||
|
<div id="manuals-icecat" class="row accordion-collapse collapse">
|
||||||
|
<div class="accordion-body">
|
||||||
|
{% for m in manuals.icecat %}
|
||||||
|
<div class="list-group-item d-flex align-items-center">
|
||||||
|
{% if m.logo %}
|
||||||
|
<img style="max-width: 50px; margin-right: 15px;" src="{{ m.logo }}" />
|
||||||
|
{% endif %}
|
||||||
|
{% if m.pdf %}
|
||||||
|
<a href="{{ m.pdf }}" target="_blank">{{ m.title }}</a><br />
|
||||||
|
{% else %}
|
||||||
|
{{ m.title }}<br />
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row mt-3">
|
||||||
|
<div class="col-6">
|
||||||
|
<h5 class="card-title">Components</h5>
|
||||||
|
<div class="row">
|
||||||
|
{% if components %}
|
||||||
|
<div class="list-group col">
|
||||||
|
{% for component in components|sort(attribute='type') %}
|
||||||
|
<div class="list-group-item">
|
||||||
|
<div class="d-flex w-100 justify-content-between">
|
||||||
|
<h5 class="mb-1">{{ component.type }}</h5>
|
||||||
|
<small class="text-muted">{{ component.created.strftime('%H:%M %d-%m-%Y') }}</small>
|
||||||
|
</div>
|
||||||
|
<p class="mb-1">
|
||||||
|
Manufacturer: {{ component.manufacturer or '- not detected -' }}<br />
|
||||||
|
Model: {{ component.model or '- not detected -' }}<br />
|
||||||
|
Serial: {{ component.serial_number and component.serial_number.upper() or '- not detected -' }}
|
||||||
|
</p>
|
||||||
|
<small class="text-muted">
|
||||||
|
{% if component.type in ['RamModule', 'HardDrive', 'SolidStateDrive'] %}
|
||||||
|
{{ component.size }}MB
|
||||||
|
{% endif %}
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="list-group col">
|
||||||
|
<div class="list-group-item">
|
||||||
|
- not detected -
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="col-6">
|
||||||
|
<h5 class="card-title">Repair history</h5>
|
||||||
|
<div class="row">
|
||||||
|
<div class="list-group col">
|
||||||
|
{% for action in placeholder.actions %}
|
||||||
|
<div class="list-group-item d-flex justify-content-between align-items-center">
|
||||||
|
{{ action.type }} {{ action.severity }}
|
||||||
|
<small class="text-muted">{{ action.created.strftime('%H:%M %d-%m-%Y') }}</small>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% if manuals.laer %}
|
||||||
|
<div class="row mt-3">
|
||||||
|
<div class="col-12">
|
||||||
|
<h5 class="card-title">Recycled Content</h5>
|
||||||
|
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-2">
|
||||||
|
Metal
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-10">
|
||||||
|
<div class="progress">
|
||||||
|
|
||||||
|
<div class="progress-bar"
|
||||||
|
role="progressbar"
|
||||||
|
style="width: {{ manuals.laer.0.metal }}%"
|
||||||
|
aria-valuenow="{{ manuals.laer.0.metal }}"
|
||||||
|
aria-valuemin="0"
|
||||||
|
aria-valuemax="100">{{ manuals.laer.0.metal }}%
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-2">
|
||||||
|
Plastic post Consumer
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-10">
|
||||||
|
<div class="progress">
|
||||||
|
<div class="progress-bar"
|
||||||
|
role="progressbar"
|
||||||
|
style="width: {{ manuals.laer.0.plastic_post_consumer }}%"
|
||||||
|
aria-valuenow="{{ manuals.laer.0.plastic_post_consumer }}"
|
||||||
|
aria-valuemin="0"
|
||||||
|
aria-valuemax="100">{{ manuals.laer.0.plastic_post_consumer }}%
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-2">
|
||||||
|
Plastic post Industry
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-10">
|
||||||
|
<div class="progress">
|
||||||
|
<div class="progress-bar"
|
||||||
|
role="progressbar"
|
||||||
|
style="width: {{ manuals.laer.0.plastic_post_industry }}%"
|
||||||
|
aria-valuenow="{{ manuals.laer.0.plastic_post_industry }}"
|
||||||
|
aria-valuemin="0"
|
||||||
|
aria-valuemax="100">{{ manuals.laer.0.plastic_post_industry }}%
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if manuals.energystar %}
|
||||||
|
<div class="row mt-3">
|
||||||
|
<div class="col-12">
|
||||||
|
<h5 class="card-title">Energy spent</h5>
|
||||||
|
|
||||||
|
{% if manuals.energystar.long_idle_watts %}
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-10">
|
||||||
|
Consumption when inactivity power function is activated (watts)
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-2">
|
||||||
|
{{ manuals.energystar.long_idle_watts }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if manuals.energystar.short_idle_watts %}
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-10">
|
||||||
|
Consumption when inactivity power function is not activated (watts)
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-2">
|
||||||
|
{{ manuals.energystar.short_idle_watts }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if manuals.energystar.sleep_mode_watts %}
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-10">
|
||||||
|
sleep_mode_watts
|
||||||
|
Consumption when computer goes into sleep mode (watts)
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-2">
|
||||||
|
{{ manuals.energystar.sleep_mode_watts }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if manuals.energystar.off_mode_watts %}
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-10">
|
||||||
|
Consumption when the computer is off (watts)
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-2">
|
||||||
|
{{ manuals.energystar.off_mode_watts }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if manuals.energystar.tec_allowance_kwh %}
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-10">
|
||||||
|
Power allocation for normal operation (kwh)
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-2">
|
||||||
|
{{ manuals.energystar.tec_allowance_kwh }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if manuals.energystar.tec_of_model_kwh %}
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-10">
|
||||||
|
Consumption of the model configuration (kwh)
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-2">
|
||||||
|
{{ manuals.energystar.tec_of_model_kwh }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if manuals.energystar.tec_requirement_kwh %}
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-10">
|
||||||
|
Energy allowance provided (kwh)
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-2">
|
||||||
|
{{ manuals.energystar.tec_requirement_kwh }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if manuals.energystar.work_off_mode_watts %}
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-10">
|
||||||
|
The lowest power mode which cannot be switched off (watts)
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-2">
|
||||||
|
{{ manuals.energystar.work_off_mode_watts }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if manuals.energystar.work_weighted_power_of_model_watts %}
|
||||||
|
<div class="row mb-3">
|
||||||
|
<div class="col-sm-10">
|
||||||
|
Weighted energy consumption from all its states (watts)
|
||||||
|
</div>
|
||||||
|
<div class="col-sm-2">
|
||||||
|
{{ manuals.energystar.work_weighted_power_of_model_watts }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
{% if manuals.ifixit %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-12 accordion-item">
|
||||||
|
<h5 class="card-title accordion-header">
|
||||||
|
<button class="accordion-button collapsed" data-bs-target="#manuals-repair" type="button"
|
||||||
|
data-bs-toggle="collapse" aria-expanded="false">
|
||||||
|
Repair manuals
|
||||||
|
</button>
|
||||||
|
</h5>
|
||||||
|
<div id="manuals-repair" class="row accordion-collapse collapse">
|
||||||
|
<div class="list-group col">
|
||||||
|
{% for m in manuals.ifixit %}
|
||||||
|
<div class="list-group-item d-flex align-items-center">
|
||||||
|
{% if m.image %}
|
||||||
|
<img style="max-width: 100px; margin-right: 15px;" src="{{ m.image }}" />
|
||||||
|
{% endif %}
|
||||||
|
{% if m.url %}
|
||||||
|
<a href="{{ m.url }}" target="_blank">{{ m.title }}</a><br />
|
||||||
|
{% else %}
|
||||||
|
{{ m.title }}<br />
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</main>
|
||||||
|
<!-- ======= Footer ======= -->
|
||||||
|
<div class="container">
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<footer class="footer">
|
||||||
|
<div class="copyright">
|
||||||
|
© Copyright <strong><span>Usody</span></strong>. All Rights Reserved
|
||||||
|
</div>
|
||||||
|
<div class="credits">
|
||||||
|
<a href="https://help.usody.com/en/" target="_blank">Help</a> |
|
||||||
|
<a href="https://www.usody.com/legal/privacy-policy" target="_blank">Privacy</a> |
|
||||||
|
<a href="https://www.usody.com/legal/terms" target="_blank">Terms</a>
|
||||||
|
</div>
|
||||||
|
<div class="credits">
|
||||||
|
DeviceHub
|
||||||
|
</div>
|
||||||
|
</footer><!-- End Footer -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if user.is_anonymous and not rols %}
|
||||||
|
<div class="modal fade" id="validateModal" tabindex="-1" style="display: none;" aria-hidden="true">
|
||||||
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
|
<div class="modal-content">
|
||||||
|
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title">Validate as <span id="title-action"></span></h5>
|
||||||
|
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-body">
|
||||||
|
<a class="btn btn-primary" type="button"
|
||||||
|
href="{{ url_for('core.login') }}?next={{ path }}">
|
||||||
|
User of system
|
||||||
|
</a>
|
||||||
|
{% if oidc %}
|
||||||
|
<br />
|
||||||
|
<a class="btn btn-primary mt-3" type="button" href="{{ url_for('oidc.login_other_inventory') }}?next={{ path }}">
|
||||||
|
User of other inventory
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-footer"></div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="modal fade" id="rolsModal" tabindex="-1" style="display: none;" aria-hidden="true">
|
||||||
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
|
<div class="modal-content">
|
||||||
|
|
||||||
|
<form action="{{ path }}" method="get">
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title">Select your Role <span id="title-action"></span></h5>
|
||||||
|
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-body">
|
||||||
|
<select name="rol">
|
||||||
|
{% for k, v in rols %}
|
||||||
|
<option value="{{ k }}" {% if v==rol %}selected=selected{% endif %}>{{ v }}</option>
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-footer">
|
||||||
|
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||||
|
<input type="submit" class="btn btn-primary" value="Send" />
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</body>
|
||||||
|
|
||||||
|
<!-- Custom Code -->
|
||||||
|
{% if not user.is_anonymous and not rol %}
|
||||||
|
<script>
|
||||||
|
$(document).ready(() => {
|
||||||
|
$("#buttonRol").click();
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</html>
|
|
@ -0,0 +1,341 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta content="width=device-width, initial-scale=1.0" name="viewport">
|
||||||
|
|
||||||
|
<title>Device {{ device_real.dhid }} - Usody</title>
|
||||||
|
<meta content="" name="description">
|
||||||
|
<meta content="" name="keywords">
|
||||||
|
|
||||||
|
<!-- Favicons -->
|
||||||
|
<link href="{{ url_for('static', filename='img/favicon.png') }}" rel="icon">
|
||||||
|
<link href="{{ url_for('static', filename='img/apple-touch-icon.png') }}" rel="apple-touch-icon">
|
||||||
|
|
||||||
|
<!-- Google Fonts -->
|
||||||
|
<link href="https://fonts.gstatic.com" rel="preconnect">
|
||||||
|
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,300i,400,400i,600,600i,700,700i|Nunito:300,300i,400,400i,600,600i,700,700i|Poppins:300,300i,400,400i,500,500i,600,600i,700,700i" rel="stylesheet">
|
||||||
|
|
||||||
|
<!-- JS Files -->
|
||||||
|
<script src="{{ url_for('static', filename='js/jquery-3.6.0.min.js') }}"></script>
|
||||||
|
<script src="{{ url_for('static', filename='vendor/bootstrap/js/bootstrap.bundle.min.js') }}"></script>
|
||||||
|
|
||||||
|
<!-- Vendor CSS Files -->
|
||||||
|
<link href="{{ url_for('static', filename='vendor/bootstrap/css/bootstrap.min.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='vendor/bootstrap-icons/bootstrap-icons.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Template Main CSS File -->
|
||||||
|
<link href="{{ url_for('static', filename='css/style.css') }}" rel="stylesheet">
|
||||||
|
<link href="{{ url_for('static', filename='css/devicehub.css') }}" rel="stylesheet">
|
||||||
|
|
||||||
|
<!-- =======================================================
|
||||||
|
* Template Name: NiceAdmin - v2.2.0
|
||||||
|
* Template URL: https://bootstrapmade.com/nice-admin-bootstrap-admin-html-template/
|
||||||
|
* Author: BootstrapMade.com
|
||||||
|
* License: https://bootstrapmade.com/license/
|
||||||
|
======================================================== -->
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
|
||||||
|
<section class="container mt-3">
|
||||||
|
<div class="row">
|
||||||
|
|
||||||
|
<div class="col">
|
||||||
|
<nav class="header-nav ms-auto">
|
||||||
|
<ul class="d-flex align-items-right">
|
||||||
|
<li class="nav-item">
|
||||||
|
{% if not rols and user.is_anonymous %}
|
||||||
|
<button class="btn btn-primary" data-bs-toggle="modal" data-bs-target="#validateModal">Validate</button>
|
||||||
|
{% else %}
|
||||||
|
<button class="btn btn-primary" id="buttonRol" data-bs-toggle="modal" data-bs-target="#rolsModal">Select your role</button>
|
||||||
|
<a class="btn btn-primary" href="{{ url_for('core.logout') }}?next={{ path }}">Logout</a>
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
{% if rol %}
|
||||||
|
<br />Current Role: {{ rol }}
|
||||||
|
{% endif %}
|
||||||
|
</nav>
|
||||||
|
<div class="col-xl-12">
|
||||||
|
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-body">
|
||||||
|
<h3 class="nav-link mt-5" style="color: #993365">{{ device_real.type }} - {{ device_real.verbose_name }}</h3>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-12">
|
||||||
|
<h5 class="card-title">Details</h5>
|
||||||
|
{% if manuals.details %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
{% if manuals.details.logo %}
|
||||||
|
<img style="max-width: 50px; margin-right: 15px;" src="{{ manuals.details.logo }}" />
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{% if manuals.details.image %}
|
||||||
|
<img style="width: 100px;" src="{{ manuals.details.image }}" />
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Type
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.type or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Manufacturer
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.manufacturer or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Model
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.model or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Part Number
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.part_number or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Serial Number
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_abstract.serial_number and device_abstract.serial_number.upper() or '- not detected -' }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Usody Identifier (DHID)
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.dhid }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Inventory Identifier (PHID)
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
{{ device_real.phid() }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Device Identifier (CHID):
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
<a href="{{ url_for('did.did', id_dpp=device_abstract.chid) }}"><small class="text-muted">{{ device_abstract.chid }}</small></a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Manufacturer DPP:
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% if last_dpp %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Last Digital Passport (Last Dpp):
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<a href="{{ url_for('did.did', id_dpp=last_dpp.key) }}"><small class="text-muted">{{ last_dpp.key }}</small></a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Last Digital Passport (Last Dpp):
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
- not detected -
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% if before_dpp %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Before Digital Passport (Before Dpp):
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<a href="{{ url_for('did.did', id_dpp=before_dpp.key) }}"><small class="text-muted">{{ before_dpp.key }}</small></a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
Before Digital Passport (Before Dpp):
|
||||||
|
</div>
|
||||||
|
<div class="col">
|
||||||
|
- not detected -
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row mt-3">
|
||||||
|
<div class="col-12">
|
||||||
|
<h5 class="card-title">Components</h5>
|
||||||
|
<div class="row">
|
||||||
|
{% if components %}
|
||||||
|
<div class="list-group col">
|
||||||
|
{% for component in components|sort(attribute='type') %}
|
||||||
|
<div class="list-group-item">
|
||||||
|
<div class="d-flex w-100 justify-content-between">
|
||||||
|
<h5 class="mb-1">{{ component.type }}</h5>
|
||||||
|
<small class="text-muted">{{ component.created.strftime('%H:%M %d-%m-%Y') }}</small>
|
||||||
|
</div>
|
||||||
|
<p class="mb-1">
|
||||||
|
Manufacturer: {{ component.manufacturer or '- not detected -' }}<br />
|
||||||
|
Model: {{ component.model or '- not detected -' }}<br />
|
||||||
|
{% if rol %}
|
||||||
|
Serial: {{ component.serial_number and component.serial_number.upper() or '- not detected -' }}<br />
|
||||||
|
{% endif %}
|
||||||
|
{% if component.type in ['HardDrive', 'SolidStateDrive'] %}
|
||||||
|
Chid:
|
||||||
|
<small class="text-muted">
|
||||||
|
{{ component.chid }}
|
||||||
|
</small>
|
||||||
|
{% endif %}
|
||||||
|
</p>
|
||||||
|
<small class="text-muted">
|
||||||
|
{% if component.type in ['RamModule', 'HardDrive', 'SolidStateDrive'] %}
|
||||||
|
{{ component.size }}MB
|
||||||
|
{% endif %}
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="list-group col">
|
||||||
|
<div class="list-group-item">
|
||||||
|
- not detected -
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</main>
|
||||||
|
<!-- ======= Footer ======= -->
|
||||||
|
<div class="container">
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<footer class="footer">
|
||||||
|
<div class="copyright">
|
||||||
|
© Copyright <strong><span>Usody</span></strong>. All Rights Reserved
|
||||||
|
</div>
|
||||||
|
<div class="credits">
|
||||||
|
<a href="https://help.usody.com/en/" target="_blank">Help</a> |
|
||||||
|
<a href="https://www.usody.com/legal/privacy-policy" target="_blank">Privacy</a> |
|
||||||
|
<a href="https://www.usody.com/legal/terms" target="_blank">Terms</a>
|
||||||
|
</div>
|
||||||
|
<div class="credits">
|
||||||
|
DeviceHub
|
||||||
|
</div>
|
||||||
|
</footer><!-- End Footer -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if user.is_anonymous and not rols %}
|
||||||
|
<div class="modal fade" id="validateModal" tabindex="-1" style="display: none;" aria-hidden="true">
|
||||||
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
|
<div class="modal-content">
|
||||||
|
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title">Validate as <span id="title-action"></span></h5>
|
||||||
|
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-body">
|
||||||
|
<a class="btn btn-primary" type="button"
|
||||||
|
href="{{ url_for('core.login') }}?next={{ path }}">
|
||||||
|
User of system
|
||||||
|
</a>
|
||||||
|
{% if oidc %}
|
||||||
|
<br />
|
||||||
|
<a class="btn btn-primary mt-3" type="button" href="{{ url_for('oidc.login_other_inventory') }}?next={{ path }}">
|
||||||
|
User of other inventory
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-footer"></div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
<div class="modal fade" id="rolsModal" tabindex="-1" style="display: none;" aria-hidden="true">
|
||||||
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
|
<div class="modal-content">
|
||||||
|
|
||||||
|
<form action="{{ path }}" method="get">
|
||||||
|
<div class="modal-header">
|
||||||
|
<h5 class="modal-title">Select your Role <span id="title-action"></span></h5>
|
||||||
|
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-body">
|
||||||
|
<select name="rol">
|
||||||
|
{% for k, v in rols %}
|
||||||
|
<option value="{{ k }}" {% if v==rol %}selected=selected{% endif %}>{{ v }}</option>
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-footer">
|
||||||
|
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||||
|
<input type="submit" class="btn btn-primary" value="Send" />
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</body>
|
||||||
|
|
||||||
|
<!-- Custom Code -->
|
||||||
|
{% if not user.is_anonymous and not rol %}
|
||||||
|
<script>
|
||||||
|
$(document).ready(() => {
|
||||||
|
$("#buttonRol").click();
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</html>
|
|
@ -0,0 +1,289 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import flask
|
||||||
|
import requests
|
||||||
|
from ereuseapi.methods import API
|
||||||
|
from flask import Blueprint
|
||||||
|
from flask import current_app as app
|
||||||
|
from flask import g, render_template, request, session
|
||||||
|
from flask.json import jsonify
|
||||||
|
from flask.views import View
|
||||||
|
|
||||||
|
from ereuse_devicehub import __version__
|
||||||
|
from ereuse_devicehub.modules.dpp.models import Dpp, ALGORITHM
|
||||||
|
from ereuse_devicehub.resources.device.models import Device
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
did = Blueprint('did', __name__, url_prefix='/did', template_folder='templates')
|
||||||
|
|
||||||
|
|
||||||
|
class DidView(View):
|
||||||
|
methods = ['GET', 'POST']
|
||||||
|
template_name = 'anonymous.html'
|
||||||
|
|
||||||
|
def dispatch_request(self, id_dpp):
|
||||||
|
self.dpp = None
|
||||||
|
self.device = None
|
||||||
|
self.get_ids(id_dpp)
|
||||||
|
|
||||||
|
self.context = {
|
||||||
|
'version': __version__,
|
||||||
|
'oidc': 'oidc' in app.blueprints.keys(),
|
||||||
|
'user': g.user,
|
||||||
|
'path': request.path,
|
||||||
|
'last_dpp': None,
|
||||||
|
'before_dpp': None,
|
||||||
|
'rols': [],
|
||||||
|
'rol': None,
|
||||||
|
}
|
||||||
|
self.get_rols()
|
||||||
|
self.get_rol()
|
||||||
|
self.get_device()
|
||||||
|
self.get_last_dpp()
|
||||||
|
self.get_before_dpp()
|
||||||
|
|
||||||
|
if self.accept_json():
|
||||||
|
return jsonify(self.get_result())
|
||||||
|
|
||||||
|
self.get_manuals()
|
||||||
|
self.get_template()
|
||||||
|
|
||||||
|
return render_template(self.template_name, **self.context)
|
||||||
|
|
||||||
|
def get_template(self):
|
||||||
|
rol = self.context.get('rol')
|
||||||
|
if not rol:
|
||||||
|
return
|
||||||
|
|
||||||
|
tlmp = {
|
||||||
|
"isOperator": "operator.html",
|
||||||
|
"isVerifier": "verifier.html",
|
||||||
|
"operator": "operator.html",
|
||||||
|
"Operator": "operator.html",
|
||||||
|
"verifier": "verifier.html",
|
||||||
|
"Verifier": "verifier.html",
|
||||||
|
}
|
||||||
|
self.template_name = tlmp.get(rol, self.template_name)
|
||||||
|
|
||||||
|
def accept_json(self):
|
||||||
|
if 'json' in request.headers.get('Accept', []):
|
||||||
|
return True
|
||||||
|
if "application/json" in request.headers.get("Content-Type", []):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_ids(self, id_dpp):
|
||||||
|
self.id_dpp = None
|
||||||
|
self.chid = id_dpp
|
||||||
|
|
||||||
|
if len(id_dpp.split(":")) == 2:
|
||||||
|
self.id_dpp = id_dpp
|
||||||
|
self.chid = id_dpp.split(':')[0]
|
||||||
|
|
||||||
|
def get_rols(self):
|
||||||
|
rols = session.get('rols')
|
||||||
|
if not g.user.is_authenticated and not rols:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if rols and rols != [('', '')]:
|
||||||
|
self.context['rols'] = rols
|
||||||
|
|
||||||
|
if 'dpp' not in app.blueprints.keys():
|
||||||
|
return []
|
||||||
|
|
||||||
|
if not session.get('token_dlt'):
|
||||||
|
return []
|
||||||
|
|
||||||
|
_role = g.user.get_rols_dlt()
|
||||||
|
role = session.get('iota_abac_attributes', {}).get('role', '')
|
||||||
|
|
||||||
|
if not _role:
|
||||||
|
return []
|
||||||
|
self.context['rols'] = _role
|
||||||
|
return _role
|
||||||
|
|
||||||
|
def get_rol(self):
|
||||||
|
rols = self.context.get('rols', [])
|
||||||
|
rol = len(rols) == 1 and rols[0][0] or None
|
||||||
|
if 'rol' in request.args and not rol:
|
||||||
|
rol = dict(rols).get(request.args.get('rol'))
|
||||||
|
self.context['rol'] = rol
|
||||||
|
|
||||||
|
def get_device(self):
|
||||||
|
if self.id_dpp:
|
||||||
|
self.dpp = Dpp.query.filter_by(key=self.id_dpp).one()
|
||||||
|
device = self.dpp.device
|
||||||
|
else:
|
||||||
|
device = Device.query.filter_by(chid=self.chid, active=True).first()
|
||||||
|
|
||||||
|
if not device:
|
||||||
|
return flask.abort(404)
|
||||||
|
|
||||||
|
placeholder = device.binding or device.placeholder
|
||||||
|
device_abstract = placeholder and placeholder.binding or device
|
||||||
|
device_real = placeholder and placeholder.device or device
|
||||||
|
self.device = device_abstract
|
||||||
|
components = self.device.components
|
||||||
|
if self.dpp:
|
||||||
|
components = self.dpp.snapshot.components
|
||||||
|
|
||||||
|
self.context.update(
|
||||||
|
{
|
||||||
|
'placeholder': placeholder,
|
||||||
|
'device': self.device,
|
||||||
|
'device_abstract': device_abstract,
|
||||||
|
'device_real': device_real,
|
||||||
|
'components': components,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_last_dpp(self):
|
||||||
|
dpps = sorted(self.device.dpps, key=lambda x: x.created)
|
||||||
|
self.context['last_dpp'] = dpps and dpps[-1] or ''
|
||||||
|
return self.context['last_dpp']
|
||||||
|
|
||||||
|
def get_before_dpp(self):
|
||||||
|
if not self.dpp:
|
||||||
|
self.context['before_dpp'] = ''
|
||||||
|
return ''
|
||||||
|
|
||||||
|
dpps = sorted(self.device.dpps, key=lambda x: x.created)
|
||||||
|
before_dpp = ''
|
||||||
|
for dpp in dpps:
|
||||||
|
if dpp == self.dpp:
|
||||||
|
break
|
||||||
|
before_dpp = dpp
|
||||||
|
|
||||||
|
self.context['before_dpp'] = before_dpp
|
||||||
|
return before_dpp
|
||||||
|
|
||||||
|
def get_result(self):
|
||||||
|
components = []
|
||||||
|
data = {
|
||||||
|
'document': {},
|
||||||
|
'dpp': self.id_dpp,
|
||||||
|
'algorithm': ALGORITHM,
|
||||||
|
'components': components,
|
||||||
|
'manufacturer DPP': '',
|
||||||
|
}
|
||||||
|
result = {
|
||||||
|
'@context': ['https://ereuse.org/dpp0.json'],
|
||||||
|
'data': data,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.dpp:
|
||||||
|
data['document'] = self.dpp.snapshot.json_hw
|
||||||
|
last_dpp = self.get_last_dpp()
|
||||||
|
url_last = ''
|
||||||
|
if last_dpp:
|
||||||
|
url_last = 'https://{host}/{did}'.format(
|
||||||
|
did=last_dpp.key, host=app.config.get('HOST')
|
||||||
|
)
|
||||||
|
data['url_last'] = url_last
|
||||||
|
|
||||||
|
for c in self.dpp.snapshot.components:
|
||||||
|
components.append({c.type: c.chid})
|
||||||
|
return result
|
||||||
|
|
||||||
|
dpps = []
|
||||||
|
for d in self.device.dpps:
|
||||||
|
rr = {
|
||||||
|
'dpp': d.key,
|
||||||
|
'document': d.snapshot.json_hw,
|
||||||
|
'algorithm': ALGORITHM,
|
||||||
|
'manufacturer DPP': '',
|
||||||
|
}
|
||||||
|
dpps.append(rr)
|
||||||
|
return {
|
||||||
|
'@context': ['https://ereuse.org/dpp0.json'],
|
||||||
|
'data': dpps,
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_manuals(self):
|
||||||
|
manuals = {
|
||||||
|
'ifixit': [],
|
||||||
|
'icecat': [],
|
||||||
|
'details': {},
|
||||||
|
'laer': [],
|
||||||
|
'energystar': {},
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
params = {
|
||||||
|
"manufacturer": self.device.manufacturer,
|
||||||
|
"model": self.device.model,
|
||||||
|
}
|
||||||
|
self.params = json.dumps(params)
|
||||||
|
manuals['ifixit'] = self.request_manuals('ifixit')
|
||||||
|
manuals['icecat'] = self.request_manuals('icecat')
|
||||||
|
manuals['laer'] = self.request_manuals('laer')
|
||||||
|
manuals['energystar'] = self.request_manuals('energystar') or {}
|
||||||
|
if manuals['icecat']:
|
||||||
|
manuals['details'] = manuals['icecat'][0]
|
||||||
|
except Exception as err:
|
||||||
|
logger.error("Error: {}".format(err))
|
||||||
|
|
||||||
|
self.context['manuals'] = manuals
|
||||||
|
self.parse_energystar()
|
||||||
|
|
||||||
|
def parse_energystar(self):
|
||||||
|
if not self.context.get('manuals', {}).get('energystar'):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Defined in:
|
||||||
|
# https://dev.socrata.com/foundry/data.energystar.gov/j7nq-iepp
|
||||||
|
|
||||||
|
energy_types = [
|
||||||
|
'functional_adder_allowances_kwh',
|
||||||
|
'tec_allowance_kwh',
|
||||||
|
'long_idle_watts',
|
||||||
|
'short_idle_watts',
|
||||||
|
'off_mode_watts',
|
||||||
|
'sleep_mode_watts',
|
||||||
|
'tec_of_model_kwh',
|
||||||
|
'tec_requirement_kwh',
|
||||||
|
'work_off_mode_watts',
|
||||||
|
'work_weighted_power_of_model_watts',
|
||||||
|
]
|
||||||
|
energy = {}
|
||||||
|
for field in energy_types:
|
||||||
|
energy[field] = []
|
||||||
|
|
||||||
|
for e in self.context['manuals']['energystar']:
|
||||||
|
for field in energy_types:
|
||||||
|
for k, v in e.items():
|
||||||
|
if not v:
|
||||||
|
continue
|
||||||
|
if field in k:
|
||||||
|
energy[field].append(v)
|
||||||
|
|
||||||
|
for k, v in energy.items():
|
||||||
|
if not v:
|
||||||
|
energy[k] = 0
|
||||||
|
continue
|
||||||
|
tt = sum([float(i) for i in v])
|
||||||
|
energy[k] = round(tt / len(v), 2)
|
||||||
|
|
||||||
|
self.context['manuals']['energystar'] = energy
|
||||||
|
|
||||||
|
def request_manuals(self, prefix):
|
||||||
|
url = app.config['URL_MANUALS']
|
||||||
|
if not url:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
res = requests.post(url + "/" + prefix, self.params)
|
||||||
|
if res.status_code > 299:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = res.json()
|
||||||
|
except Exception:
|
||||||
|
response = {}
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
did.add_url_rule('/<string:id_dpp>', view_func=DidView.as_view('did'))
|
|
@ -0,0 +1,74 @@
|
||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = migrations
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# timezone to use when rendering the date
|
||||||
|
# within the migration file as well as the filename.
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
#truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; this defaults
|
||||||
|
# to alembic/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path
|
||||||
|
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
|
@ -0,0 +1,62 @@
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from ereuseapi.methods import API
|
||||||
|
from flask import g, current_app as app
|
||||||
|
from ereuseapi.methods import register_user
|
||||||
|
from ereuse_devicehub.db import db
|
||||||
|
from ereuse_devicehub.resources.user.models import User
|
||||||
|
from ereuse_devicehub.resources.agent.models import Person
|
||||||
|
from ereuse_devicehub.modules.dpp.utils import encrypt
|
||||||
|
|
||||||
|
|
||||||
|
class RegisterUserDlt:
|
||||||
|
# "operator", "verifier" or "witness"
|
||||||
|
|
||||||
|
def __init__(self, app) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.app = app
|
||||||
|
help = "Insert users than are in Dlt with params: path of data set file"
|
||||||
|
self.app.cli.command('dlt_register_user', short_help=help)(self.run)
|
||||||
|
|
||||||
|
@click.argument('dataset_file')
|
||||||
|
def run(self, dataset_file):
|
||||||
|
with open(dataset_file) as f:
|
||||||
|
dataset = json.loads(f.read())
|
||||||
|
|
||||||
|
for d in dataset:
|
||||||
|
self.add_user(d)
|
||||||
|
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
|
def add_user(self, data):
|
||||||
|
email = data.get("email")
|
||||||
|
name = email.split('@')[0]
|
||||||
|
password = data.get("password")
|
||||||
|
ethereum = {"data": data.get("data")}
|
||||||
|
|
||||||
|
user = User.query.filter_by(email=email).first()
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
user = User(email=email, password=password)
|
||||||
|
user.individuals.add(Person(name=name))
|
||||||
|
|
||||||
|
data_eth = json.dumps(ethereum)
|
||||||
|
user.api_keys_dlt = encrypt(password, data_eth)
|
||||||
|
|
||||||
|
roles = []
|
||||||
|
token_dlt = ethereum["data"]["api_token"]
|
||||||
|
api_dlt = app.config.get('API_DLT')
|
||||||
|
api = API(api_dlt, token_dlt, "ethereum")
|
||||||
|
result = api.check_user_roles()
|
||||||
|
|
||||||
|
if result.get('Status') == 200:
|
||||||
|
if 'Success' in result.get('Data', {}).get('status'):
|
||||||
|
rols = result.get('Data', {}).get('data', {})
|
||||||
|
roles = [(k, k) for k, v in rols.items() if v]
|
||||||
|
|
||||||
|
user.rols_dlt = json.dumps(roles)
|
||||||
|
|
||||||
|
db.session.add(user)
|
|
@ -0,0 +1 @@
|
||||||
|
Generic single-database configuration.
|
|
@ -0,0 +1,89 @@
|
||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
|
||||||
|
from ereuse_devicehub.config import DevicehubConfig
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
# target_metadata = None
|
||||||
|
from ereuse_devicehub.resources.models import Thing
|
||||||
|
|
||||||
|
target_metadata = Thing.metadata
|
||||||
|
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def get_url():
|
||||||
|
# url = os.environ["DATABASE_URL"]
|
||||||
|
url = DevicehubConfig.SQLALCHEMY_DATABASE_URI
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline():
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = get_url()
|
||||||
|
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online():
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# connectable = engine_from_config(
|
||||||
|
# config.get_section(config.config_ini_section),
|
||||||
|
# prefix="sqlalchemy.",
|
||||||
|
# poolclass=pool.NullPool,
|
||||||
|
# )
|
||||||
|
|
||||||
|
url = get_url()
|
||||||
|
connectable = create_engine(url)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
version_table='alembic_module_dpp_version',
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
|
@ -0,0 +1,33 @@
|
||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
import sqlalchemy_utils
|
||||||
|
import citext
|
||||||
|
import teal
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
${downgrades if downgrades else "pass"}
|
|
@ -0,0 +1,35 @@
|
||||||
|
"""add api_keys_dlt to user
|
||||||
|
|
||||||
|
Revision ID: 4b7f77f121bf
|
||||||
|
Revises:
|
||||||
|
Create Date: 2022-12-01 10:35:36.795035
|
||||||
|
|
||||||
|
"""
|
||||||
|
import citext
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '4b7f77f121bf'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column(
|
||||||
|
'user',
|
||||||
|
sa.Column('api_keys_dlt', type_=citext.CIText(), nullable=True),
|
||||||
|
schema='common',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column('user', 'api_keys_dlt', schema='common')
|
|
@ -0,0 +1,138 @@
|
||||||
|
"""add digital passport dpp
|
||||||
|
|
||||||
|
Revision ID: 8334535d56fa
|
||||||
|
Revises: 4b7f77f121bf
|
||||||
|
Create Date: 2023-01-19 12:01:54.102326
|
||||||
|
|
||||||
|
"""
|
||||||
|
import citext
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import context, op
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '8334535d56fa'
|
||||||
|
down_revision = '4b7f77f121bf'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_inv():
|
||||||
|
INV = context.get_x_argument(as_dictionary=True).get('inventory')
|
||||||
|
if not INV:
|
||||||
|
raise ValueError("Inventory value is not specified")
|
||||||
|
return INV
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'proof',
|
||||||
|
sa.Column(
|
||||||
|
'updated',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
comment='The last time Devicehub recorded a change for \n this thing.\n ',
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'created',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
comment='When Devicehub created this.',
|
||||||
|
),
|
||||||
|
sa.Column('id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('type', sa.Unicode(), nullable=False),
|
||||||
|
sa.Column('documentId', citext.CIText(), nullable=True),
|
||||||
|
sa.Column('documentSignature', citext.CIText(), nullable=True),
|
||||||
|
sa.Column('normalizeDoc', citext.CIText(), nullable=True),
|
||||||
|
sa.Column('timestamp', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('device_id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('action_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('issuer_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['action_id'],
|
||||||
|
[f'{get_inv()}.action.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['device_id'],
|
||||||
|
[f'{get_inv()}.device.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['issuer_id'],
|
||||||
|
['common.user.id'],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
# op.create_index(op.f('ix_proof_created'), 'proof', ['created'], unique=False, schema=f'{get_inv()}')
|
||||||
|
# op.create_index(op.f('ix_proof_timestamp'), 'proof', ['timestamp'], unique=False, schema=f'{get_inv()}')
|
||||||
|
op.add_column(
|
||||||
|
'snapshot',
|
||||||
|
sa.Column('phid_dpp', citext.CIText(), nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
'snapshot',
|
||||||
|
sa.Column('json_wb', citext.CIText(), nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
'snapshot',
|
||||||
|
sa.Column('json_hw', citext.CIText(), nullable=True),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'dpp',
|
||||||
|
sa.Column(
|
||||||
|
'updated',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
comment='The last time Devicehub recorded a change for \n this thing.\n ',
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
'created',
|
||||||
|
sa.TIMESTAMP(timezone=True),
|
||||||
|
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||||
|
nullable=False,
|
||||||
|
comment='When Devicehub created this.',
|
||||||
|
),
|
||||||
|
sa.Column('id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('documentId', citext.CIText(), nullable=True),
|
||||||
|
sa.Column('documentSignature', citext.CIText(), nullable=True),
|
||||||
|
sa.Column('timestamp', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('device_id', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('snapshot_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('issuer_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['snapshot_id'],
|
||||||
|
[f'{get_inv()}.snapshot.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['device_id'],
|
||||||
|
[f'{get_inv()}.device.id'],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['issuer_id'],
|
||||||
|
['common.user.id'],
|
||||||
|
),
|
||||||
|
sa.Column('key', citext.CIText(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
schema=f'{get_inv()}',
|
||||||
|
)
|
||||||
|
op.execute(f"CREATE SEQUENCE {get_inv()}.proof_seq START 1;")
|
||||||
|
op.execute(f"CREATE SEQUENCE {get_inv()}.dpp_seq START 1;")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('dpp', schema=f'{get_inv()}')
|
||||||
|
op.drop_table('proof', schema=f'{get_inv()}')
|
||||||
|
op.execute(f"DROP SEQUENCE {get_inv()}.proof_seq;")
|
||||||
|
op.execute(f"DROP SEQUENCE {get_inv()}.dpp_seq;")
|
||||||
|
# op.drop_index(op.f('ix_proof_created'), table_name='proof', schema=f'{get_inv()}')
|
||||||
|
# op.drop_index(op.f('ix_proof_timestamp'), table_name='proof', schema=f'{get_inv()}')
|
||||||
|
op.drop_column('snapshot', 'phid_dpp', schema=f'{get_inv()}')
|
||||||
|
op.drop_column('snapshot', 'json_wb', schema=f'{get_inv()}')
|
||||||
|
op.drop_column('snapshot', 'json_hw', schema=f'{get_inv()}')
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue