diff --git a/.env.example b/.env.example index 7ecc774ed0..6cf1689ef7 100644 --- a/.env.example +++ b/.env.example @@ -1,6 +1,10 @@ -DATABASE_URL=postgresql://john:start@127.0.0.1:5432/oevent +DATABASE_URL=postgresql://open_event_user:opev_pass@127.0.0.1:5432/oevent INTEGRATE_SOCKETIO=false -TEST_DATABASE_URL=postgresql://open_event_user:test@127.0.0.1:5432/opev_test +TEST_DATABASE_URL=postgresql://open_event_user:opev_pass@127.0.0.1:5432/opev_test APP_CONFIG=config.DevelopmentConfig ENABLE_ELASTICSEARCH=true ELASTICSEARCH_HOST=localhost:9200 + +POSTGRES_USER=open_event_user +POSTGRES_PASSWORD=opev_pass +POSTGRES_DB=open_event diff --git a/.github/auto_label.yml b/.github/auto_label.yml new file mode 100644 index 0000000000..a6c30a27be --- /dev/null +++ b/.github/auto_label.yml @@ -0,0 +1,13 @@ +labels: [fix, chore] +labelMapping: + feat: [feature] + refactor: [chore, refactor] + chore(refactor): [chore, refactor] + chore(deps): [dependencies] + docs: [docs] + chore(docs): [docs] + docs(api): [docs, api] + test: [testing] + chore(ci): [chore, tools, ci] + chore(tools): [chore, tools] + chore(release): [chore, release] diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 0000000000..f85f6dd8a1 --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,23 @@ +name-template: v$NEXT_PATCH_VERSION 🌈 +tag-template: v$NEXT_PATCH_VERSION +categories: + - title: 🚀 Features + label: feature + - title: 🐛 Bug Fixes + label: fix + - title: 🧰 Maintenance + label: chore + - title: 🕮 Documentation + label: docs + - title: ⚙ Dependencies and Libraries + label: dependencies +change-template: '- $TITLE (#$NUMBER) - @$AUTHOR' +template: | + ## Changes + + $CHANGES + + ## Contributors + + Thanks a lot to our contributors for making this release possible: + $CONTRIBUTORS diff --git a/.gitignore b/.gitignore index f038f8b8df..03ca15100f 100644 --- a/.gitignore +++ b/.gitignore @@ -52,3 +52,6 @@ jobs.sqlite # for -e pip packages src + +#tickets +generated/ diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000000..7c672e2077 --- /dev/null +++ b/Pipfile @@ -0,0 +1,73 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +pycparser = "==2.14" +flask-script = "<2.1,>=2.0.5" +requests-oauthlib = ">=0.7.0,<1" +icalendar = "<4,>=3.11" +requests = {version = ">=2.20.0,<3", extras = ["security"]} +"psycopg2-binary" = "*" +itsdangerous = "<0.30,>=0.24" +humanize = ">=0.5.1,<0.6" +celery = "<4,>=3.1.23" +redis = ">=2.10.5,<3" +amqp = ">=1.4.9,<2.0" +gunicorn = "<20,>=19.6.0" +boto = ">=2.45.0,<3" +"geoip2" = ">=2.4.2,<3" +arrow = ">=0.10.0,<1" +unicode-slugify = ">=0.1.3,<1" +bleach = ">=2.1.3" +stripe = "<2,>=1.44.0" +"xhtml2pdf" = "*" +forex-python = "<1,>=0.3.1" +"oauth2" = "<2,>=1.9.0.post1" +qrcode = "<6,>=5.3" +python-magic = "<1,>=0.4.12" +python-geoip = "<2,>=1.2" +"marrow.mailer" = "<5,>=4.0.2" +python-pentabarf-xml = "==0.19" +"python-geoip-geolite2" = "*" +pycountry = "*" +pytz = "*" +diff-match-patch = "*" +blinker = "*" +envparse = "*" +flask-admin = "*" +google-compute-engine = "*" +raven = {version = "*", extras = ["flask"]} +healthcheck = "*" +elasticsearch-dsl = "*" +paypalrestsdk = "*" +eventlet = "*" +Flask = ">=1.0,<1.1" +Flask-SQLAlchemy = ">=2.1,<2.2" +Flask-Migrate = ">=2.0.0,<3" +Flask-Login = "<1,>=0.4" +Flask-Scrypt = ">=0.1.3.6,<0.2" +Flask-JWT = ">=0.3.2,<0.4" +SQLAlchemy-Utils = ">=0.32.12,<0.33" +APScheduler = ">=3.3.0,<4" +Pillow = "*" +SQLAlchemy-Continuum = "<2,>=1.2.4" +Flask-Caching = ">=1.4.0,<1.5" +Flask-Cors = ">=3.0.2,<3.1" +Flask-REST-JSONAPI = {editable = true, ref = "shubhamp-master", git = "https://github.com/fossasia/flask-rest-jsonapi.git"} +WTForms = "*" +factory_boy = "*" +Flask-Redis = "*" +SQLAlchemy = "==1.1.15" +Flask-Elasticsearch = "*" + +[dev-packages] +nose = "*" +pylint = "*" +"pep8" = "*" +coverage = "*" +dredd-hooks = "*" + +[requires] +python_version = "3.6" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 0000000000..95e42a0fd1 --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,1090 @@ +{ + "_meta": { + "hash": { + "sha256": "6f38a0b015a06da89ed6ec519a33cff5a67c801a3e3500825d23bee160d0a23b" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.6" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "alembic": { + "hashes": [ + "sha256:828dcaa922155a2b7166c4f36ec45268944e4055c86499bd14319b4c8c0094b7" + ], + "version": "==1.0.10" + }, + "amqp": { + "hashes": [ + "sha256:2dea4d16d073c902c3b89d9b96620fb6729ac0f7a923bbc777cb4ad827c0c61a", + "sha256:e0ed0ce6b8ffe5690a2e856c7908dc557e0e605283d6885dd1361d79f2928908" + ], + "index": "pypi", + "version": "==1.4.9" + }, + "anyjson": { + "hashes": [ + "sha256:37812d863c9ad3e35c0734c42e0bf0320ce8c3bed82cd20ad54cb34d158157ba" + ], + "version": "==0.3.3" + }, + "apscheduler": { + "hashes": [ + "sha256:8f56b888fdc9dc57dd18d79c124b5093a01e29144be84e3e99130600eea34260", + "sha256:e885b0f2ad5887a69ceffc5de39e2b2f34b4aa80521b79b1f2db911340e68a66" + ], + "index": "pypi", + "version": "==3.6.0" + }, + "arrow": { + "hashes": [ + "sha256:3397e5448952e18e1295bf047014659effa5ae8da6a5371d37ff0ddc46fa6872", + "sha256:6f54d9f016c0b7811fac9fb8c2c7fa7421d80c54dbdd75ffb12913c55db60b8a" + ], + "index": "pypi", + "version": "==0.13.1" + }, + "asn1crypto": { + "hashes": [ + "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87", + "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49" + ], + "version": "==0.24.0" + }, + "billiard": { + "hashes": [ + "sha256:204e75d390ef8f839c30a93b696bd842c3941916e15921745d05edc2a83868ab", + "sha256:23cb71472712e96bff3e0d45763b7b8a99e5040385fffb96816028352c255682", + "sha256:692a2a5a55ee39a42bcb7557930e2541da85df9ea81c6e24827f63b80cd39d0b", + "sha256:82041dbaa62f7fde1464d7ab449978618a38b241b40c0d31dafabb36446635dc", + "sha256:958fc9f8fd5cc9b936b2cb9d96f02aa5ec3613ba13ee7f089c77ff0bcc368fac", + "sha256:c0cbe8d45ba8d8213ad68ef9a1881002a151569c9424d551634195a18c3a4160", + "sha256:ccfe0419eb5e49f27ad35cf06e75360af903df6d576c66cb8073246d4e023e5c", + "sha256:d4d2fed1a251ea58eed47b48db3778ebb92f5ff4407dc91869c6f41c3a9249d0" + ], + "version": "==3.3.0.23" + }, + "bleach": { + "hashes": [ + "sha256:213336e49e102af26d9cde77dd2d0397afabc5a6bf2fed985dc35b5d1e285a16", + "sha256:3fdf7f77adcf649c9911387df51254b813185e32b2c6619f690b593a617e19fa" + ], + "index": "pypi", + "version": "==3.1.0" + }, + "blinker": { + "hashes": [ + "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6" + ], + "index": "pypi", + "version": "==1.4" + }, + "boto": { + "hashes": [ + "sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8", + "sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a" + ], + "index": "pypi", + "version": "==2.49.0" + }, + "celery": { + "hashes": [ + "sha256:5493e172ae817b81ba7d09443ada114886765a8ce02f16a56e6fac68d953a9b2", + "sha256:60211897aee321266ff043fe2b33eaac825dfe9f46843cf964fc97507a186334" + ], + "index": "pypi", + "version": "==3.1.26.post2" + }, + "certifi": { + "hashes": [ + "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5", + "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae" + ], + "version": "==2019.3.9" + }, + "cffi": { + "hashes": [ + "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774", + "sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d", + "sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90", + "sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b", + "sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63", + "sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45", + "sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25", + "sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3", + "sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b", + "sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647", + "sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016", + "sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4", + "sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb", + "sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753", + "sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7", + "sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9", + "sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f", + "sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8", + "sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f", + "sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc", + "sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42", + "sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3", + "sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909", + "sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45", + "sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d", + "sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512", + "sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff", + "sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201" + ], + "version": "==1.12.3" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "click": { + "hashes": [ + "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", + "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" + ], + "version": "==7.0" + }, + "cryptography": { + "hashes": [ + "sha256:066f815f1fe46020877c5983a7e747ae140f517f1b09030ec098503575265ce1", + "sha256:210210d9df0afba9e000636e97810117dc55b7157c903a55716bb73e3ae07705", + "sha256:26c821cbeb683facb966045e2064303029d572a87ee69ca5a1bf54bf55f93ca6", + "sha256:2afb83308dc5c5255149ff7d3fb9964f7c9ee3d59b603ec18ccf5b0a8852e2b1", + "sha256:2db34e5c45988f36f7a08a7ab2b69638994a8923853dec2d4af121f689c66dc8", + "sha256:409c4653e0f719fa78febcb71ac417076ae5e20160aec7270c91d009837b9151", + "sha256:45a4f4cf4f4e6a55c8128f8b76b4c057027b27d4c67e3fe157fa02f27e37830d", + "sha256:48eab46ef38faf1031e58dfcc9c3e71756a1108f4c9c966150b605d4a1a7f659", + "sha256:6b9e0ae298ab20d371fc26e2129fd683cfc0cfde4d157c6341722de645146537", + "sha256:6c4778afe50f413707f604828c1ad1ff81fadf6c110cb669579dea7e2e98a75e", + "sha256:8c33fb99025d353c9520141f8bc989c2134a1f76bac6369cea060812f5b5c2bb", + "sha256:9873a1760a274b620a135054b756f9f218fa61ca030e42df31b409f0fb738b6c", + "sha256:9b069768c627f3f5623b1cbd3248c5e7e92aec62f4c98827059eed7053138cc9", + "sha256:9e4ce27a507e4886efbd3c32d120db5089b906979a4debf1d5939ec01b9dd6c5", + "sha256:acb424eaca214cb08735f1a744eceb97d014de6530c1ea23beb86d9c6f13c2ad", + "sha256:c8181c7d77388fe26ab8418bb088b1a1ef5fde058c6926790c8a0a3d94075a4a", + "sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460", + "sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd", + "sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6" + ], + "version": "==2.6.1" + }, + "diff-match-patch": { + "hashes": [ + "sha256:a809a996d0f09b9bbd59e9bbd0b71eed8c807922512910e05cbd3f9480712ddb" + ], + "index": "pypi", + "version": "==20181111" + }, + "distro": { + "hashes": [ + "sha256:362dde65d846d23baee4b5c058c8586f219b5a54be1cf5fc6ff55c4578392f57", + "sha256:eedf82a470ebe7d010f1872c17237c79ab04097948800029994fa458e52fb4b4" + ], + "version": "==1.4.0" + }, + "dnspython": { + "hashes": [ + "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01", + "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d" + ], + "version": "==1.16.0" + }, + "elasticsearch": { + "hashes": [ + "sha256:7546cc08e3899716e12fe67d12d7cfe9a64647014d1134b014c3c392b63cad42", + "sha256:aada5cfdc4a543c47098eb3aca6663848ef5d04b4324935ced441debc11ec98b" + ], + "version": "==6.3.1" + }, + "elasticsearch-dsl": { + "hashes": [ + "sha256:5f43196a3fd91b2eac90f7345e99f92c66004d85a1fd803cdecf756430827231", + "sha256:5f80b3b4a6e61db5d273bc57c32a80b2ddbc555afcc122c62c20440c355008be" + ], + "index": "pypi", + "version": "==6.3.1" + }, + "envparse": { + "hashes": [ + "sha256:4f3b9a27bb55d27f124eb4adf006fec05e4588891c9a054a183a112645056eb7" + ], + "index": "pypi", + "version": "==0.2.0" + }, + "eventlet": { + "hashes": [ + "sha256:c584163e006e613707e224552fafc63e4e0aa31d7de0ab18b481ac0b385254c8", + "sha256:d9d31a3c8dbcedbcce5859a919956d934685b17323fc80e1077cb344a2ffa68d" + ], + "index": "pypi", + "version": "==0.24.1" + }, + "factory-boy": { + "hashes": [ + "sha256:6f25cc4761ac109efd503f096e2ad99421b1159f01a29dbb917359dcd68e08ca", + "sha256:d552cb872b310ae78bd7429bf318e42e1e903b1a109e899a523293dfa762ea4f" + ], + "index": "pypi", + "version": "==2.11.1" + }, + "faker": { + "hashes": [ + "sha256:1c0a5e7bb54d2c54569986a27124715c83899e592d8d61d4e372dbff6c699573", + "sha256:60477f757a80f665bbe1fb3d1cfe5d205ec7b99d5240114de7b27b4c25d236ca" + ], + "version": "==1.0.7" + }, + "flask": { + "hashes": [ + "sha256:2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48", + "sha256:a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05" + ], + "index": "pypi", + "version": "==1.0.2" + }, + "flask-admin": { + "hashes": [ + "sha256:ca0be6ec11a6913b73f656c65c444ae5be416c57c75638dd3199376ce6bc7422" + ], + "index": "pypi", + "version": "==1.5.3" + }, + "flask-caching": { + "hashes": [ + "sha256:44fe827c6cc519d48fb0945fa05ae3d128af9a98f2a6e71d4702fd512534f227", + "sha256:e34f24631ba240e09fe6241e1bf652863e0cff06a1a94598e23be526bc2e4985" + ], + "index": "pypi", + "version": "==1.4.0" + }, + "flask-cors": { + "hashes": [ + "sha256:7ad56ee3b90d4955148fc25a2ecaa1124fc84298471e266a7fea59aeac4405a5", + "sha256:7e90bf225fdf163d11b84b59fb17594d0580a16b97ab4e1146b1fb2737c1cfec" + ], + "index": "pypi", + "version": "==3.0.7" + }, + "flask-elasticsearch": { + "hashes": [ + "sha256:5f288c275c3865532c6c8af71c1153c05c6d56a45ee1a7faf43dc49acc5f4a2f" + ], + "index": "pypi", + "version": "==0.2.5" + }, + "flask-jwt": { + "hashes": [ + "sha256:49c0672fbde0f1cd3374bd834918d28956e3c521c7e00089cdc5380d323bd0ad" + ], + "index": "pypi", + "version": "==0.3.2" + }, + "flask-login": { + "hashes": [ + "sha256:c815c1ac7b3e35e2081685e389a665f2c74d7e077cb93cecabaea352da4752ec" + ], + "index": "pypi", + "version": "==0.4.1" + }, + "flask-migrate": { + "hashes": [ + "sha256:a361578cb829681f860e4de5ed2c48886264512f0c16144e404c36ddc95ab49c", + "sha256:c24d105c5d6cc670de20f8cbfb909e04f4e04b8784d0df070005944de1f21549" + ], + "index": "pypi", + "version": "==2.4.0" + }, + "flask-redis": { + "hashes": [ + "sha256:56ebd5e69a42988e9a7a904f07b37cd625a7dbcdfbfe31797885c3c4a92516f5", + "sha256:ffb8e636c7af0226a3074860200602850a5a871169bc53fcde05b7d973aad714" + ], + "index": "pypi", + "version": "==0.3.0" + }, + "flask-rest-jsonapi": { + "editable": true, + "git": "https://github.com/fossasia/flask-rest-jsonapi.git", + "ref": "4b10b48e443a839bbff369c769198ede34fdbff3" + }, + "flask-script": { + "hashes": [ + "sha256:6425963d91054cfcc185807141c7314a9c5ad46325911bd24dcb489bd0161c65" + ], + "index": "pypi", + "version": "==2.0.6" + }, + "flask-scrypt": { + "hashes": [ + "sha256:166ec72cb137416fbb4b4f0f3985024c2459477e290d4a1eaf6b9c9eea3ffa55" + ], + "index": "pypi", + "version": "==0.1.3.6" + }, + "flask-sqlalchemy": { + "hashes": [ + "sha256:c5244de44cc85d2267115624d83faef3f9e8f088756788694f305a5d5ad137c5" + ], + "index": "pypi", + "version": "==2.1" + }, + "forex-python": { + "hashes": [ + "sha256:80cd5b8748177a0dd677fbbca1e246656de0bbb28382412b7aaa5aaa6f4aa2d3", + "sha256:df825e7033e8d24da14b76ca9fb090edd472c0b84699f9ef6a0969723b62514b" + ], + "index": "pypi", + "version": "==0.3.3" + }, + "geoip2": { + "hashes": [ + "sha256:a37ddac2d200ffb97c736da8b8ba9d5d8dc47da6ec0f162a461b681ecac53a14", + "sha256:f7ffe9d258e71a42cf622ce6350d976de1d0312b9f2fbce3975c7d838b57ecf0" + ], + "index": "pypi", + "version": "==2.9.0" + }, + "google-compute-engine": { + "hashes": [ + "sha256:358363a10169f890bac78cf9d7105132cdd2c1546fa8d9caa35c04a7cf7cfba7" + ], + "index": "pypi", + "version": "==2.8.13" + }, + "greenlet": { + "hashes": [ + "sha256:000546ad01e6389e98626c1367be58efa613fa82a1be98b0c6fc24b563acc6d0", + "sha256:0d48200bc50cbf498716712129eef819b1729339e34c3ae71656964dac907c28", + "sha256:23d12eacffa9d0f290c0fe0c4e81ba6d5f3a5b7ac3c30a5eaf0126bf4deda5c8", + "sha256:37c9ba82bd82eb6a23c2e5acc03055c0e45697253b2393c9a50cef76a3985304", + "sha256:51503524dd6f152ab4ad1fbd168fc6c30b5795e8c70be4410a64940b3abb55c0", + "sha256:8041e2de00e745c0e05a502d6e6db310db7faa7c979b3a5877123548a4c0b214", + "sha256:81fcd96a275209ef117e9ec91f75c731fa18dcfd9ffaa1c0adbdaa3616a86043", + "sha256:853da4f9563d982e4121fed8c92eea1a4594a2299037b3034c3c898cb8e933d6", + "sha256:8b4572c334593d449113f9dc8d19b93b7b271bdbe90ba7509eb178923327b625", + "sha256:9416443e219356e3c31f1f918a91badf2e37acf297e2fa13d24d1cc2380f8fbc", + "sha256:9854f612e1b59ec66804931df5add3b2d5ef0067748ea29dc60f0efdcda9a638", + "sha256:99a26afdb82ea83a265137a398f570402aa1f2b5dfb4ac3300c026931817b163", + "sha256:a19bf883b3384957e4a4a13e6bd1ae3d85ae87f4beb5957e35b0be287f12f4e4", + "sha256:a9f145660588187ff835c55a7d2ddf6abfc570c2651c276d3d4be8a2766db490", + "sha256:ac57fcdcfb0b73bb3203b58a14501abb7e5ff9ea5e2edfa06bb03035f0cff248", + "sha256:bcb530089ff24f6458a81ac3fa699e8c00194208a724b644ecc68422e1111939", + "sha256:beeabe25c3b704f7d56b573f7d2ff88fc99f0138e43480cecdfcaa3b87fe4f87", + "sha256:d634a7ea1fc3380ff96f9e44d8d22f38418c1c381d5fac680b272d7d90883720", + "sha256:d97b0661e1aead761f0ded3b769044bb00ed5d33e1ec865e891a8b128bf7c656" + ], + "version": "==0.4.15" + }, + "gunicorn": { + "hashes": [ + "sha256:aa8e0b40b4157b36a5df5e599f45c9c76d6af43845ba3b3b0efe2c70473c2471", + "sha256:fa2662097c66f920f53f70621c6c58ca4a3c4d3434205e608e121b5b3b71f4f3" + ], + "index": "pypi", + "version": "==19.9.0" + }, + "healthcheck": { + "hashes": [ + "sha256:3b6e56dcaf9c5a52296e32d713e8f3bbb1b86ff88d4d06906b7a5105923a711c" + ], + "index": "pypi", + "version": "==1.3.3" + }, + "html5lib": { + "hashes": [ + "sha256:20b159aa3badc9d5ee8f5c647e5efd02ed2a66ab8d354930bd9ff139fc1dc0a3", + "sha256:66cb0dcfdbbc4f9c3ba1a63fdb511ffdbd4f513b2b6d81b80cd26ce6b3fb3736" + ], + "version": "==1.0.1" + }, + "httplib2": { + "hashes": [ + "sha256:23914b5487dfe8ef09db6656d6d63afb0cf3054ad9ebc50868ddc8e166b5f8e8", + "sha256:a18121c7c72a56689efbf1aef990139ad940fee1e64c6f2458831736cd593600" + ], + "version": "==0.12.3" + }, + "humanize": { + "hashes": [ + "sha256:a43f57115831ac7c70de098e6ac46ac13be00d69abbf60bdcac251344785bb19" + ], + "index": "pypi", + "version": "==0.5.1" + }, + "icalendar": { + "hashes": [ + "sha256:511156bafb4c67dd8bfb056744823fcff3105bb3ec641d9d5bb803f590e671c3", + "sha256:ad401c93b64d8e98f1e98b4e6c96c0023a05f5984d3bfc23ce9448986edbca54" + ], + "index": "pypi", + "version": "==3.12" + }, + "idna": { + "hashes": [ + "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", + "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" + ], + "version": "==2.8" + }, + "itsdangerous": { + "hashes": [ + "sha256:cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519" + ], + "index": "pypi", + "version": "==0.24" + }, + "jinja2": { + "hashes": [ + "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", + "sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b" + ], + "index": "pypi", + "version": "==2.10.1" + }, + "kombu": { + "hashes": [ + "sha256:7ceab743e3e974f3e5736082e8cc514c009e254e646d6167342e0e192aee81a6", + "sha256:e064a00c66b4d1058cd2b0523fb8d98c82c18450244177b6c0f7913016642650" + ], + "version": "==3.0.37" + }, + "mako": { + "hashes": [ + "sha256:7165919e78e1feb68b4dbe829871ea9941398178fa58e6beedb9ba14acf63965" + ], + "version": "==1.0.10" + }, + "markupsafe": { + "hashes": [ + "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", + "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", + "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", + "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", + "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", + "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", + "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", + "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", + "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", + "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", + "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", + "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", + "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", + "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", + "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", + "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", + "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", + "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", + "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" + ], + "version": "==1.1.1" + }, + "marrow.mailer": { + "hashes": [ + "sha256:22516ecab74c6bb3439e8a8a6497d5841799a92e8624547d3fc861a25b756d58", + "sha256:82885f98ba94e62a522cc7ade3113c84a6117dc5c894a4ce5be5a598648cfc05" + ], + "index": "pypi", + "version": "==4.0.2" + }, + "marrow.util": { + "hashes": [ + "sha256:1e212c51abc1b04cf44aed836f33d7291b040d1e5f87fa61072ddef2743da26f" + ], + "version": "==1.2.3" + }, + "marshmallow": { + "hashes": [ + "sha256:5f7a9ab5d5f5583fe4e071fde98a472b04fd0df868ead83012f1a3e0e54d7cdf", + "sha256:cf7e35ddfe8b18f8de3058fffc621921c79bb07455ad3084bfbf64caca766711" + ], + "version": "==2.13.1" + }, + "marshmallow-jsonapi": { + "hashes": [ + "sha256:3fcee53d31e57e85391820d316742e5c0f02bc41829b8395b5f1d2c1fcfd3ff4", + "sha256:a403f32c5a7bb8f7540fae2fbb35565cf15dddd4936b8d8d74873f55aa5f0cc2" + ], + "version": "==0.21.0" + }, + "maxminddb": { + "hashes": [ + "sha256:df1451bcd848199905ac0de4631b3d02d6a655ad28ba5e5a4ca29a23358db712" + ], + "version": "==1.4.1" + }, + "monotonic": { + "hashes": [ + "sha256:23953d55076df038541e648a53676fb24980f7a1be290cdda21300b3bc21dfb0", + "sha256:552a91f381532e33cbd07c6a2655a21908088962bb8fa7239ecbcc6ad1140cc7" + ], + "version": "==1.5" + }, + "oauth2": { + "hashes": [ + "sha256:15b5c42301f46dd63113f1214b0d81a8b16254f65a86d3c32a1b52297f3266e6", + "sha256:c006a85e7c60107c7cc6da1b184b5c719f6dd7202098196dfa6e55df669b59bf" + ], + "index": "pypi", + "version": "==1.9.0.post1" + }, + "oauthlib": { + "hashes": [ + "sha256:0ce32c5d989a1827e3f1148f98b9085ed2370fc939bf524c9c851d8714797298", + "sha256:3e1e14f6cde7e5475128d30e97edc3bfb4dc857cb884d8714ec161fdbb3b358e" + ], + "version": "==3.0.1" + }, + "paypalrestsdk": { + "hashes": [ + "sha256:238713208031e8981bf70b3350b3d7f85ed64d34e0f21e4c1184444a546fee7f" + ], + "index": "pypi", + "version": "==1.13.1" + }, + "pillow": { + "hashes": [ + "sha256:051de330a06c99d6f84bcf582960487835bcae3fc99365185dc2d4f65a390c0e", + "sha256:0ae5289948c5e0a16574750021bd8be921c27d4e3527800dc9c2c1d2abc81bf7", + "sha256:0b1efce03619cdbf8bcc61cfae81fcda59249a469f31c6735ea59badd4a6f58a", + "sha256:163136e09bd1d6c6c6026b0a662976e86c58b932b964f255ff384ecc8c3cefa3", + "sha256:18e912a6ccddf28defa196bd2021fe33600cbe5da1aa2f2e2c6df15f720b73d1", + "sha256:24ec3dea52339a610d34401d2d53d0fb3c7fd08e34b20c95d2ad3973193591f1", + "sha256:267f8e4c0a1d7e36e97c6a604f5b03ef58e2b81c1becb4fccecddcb37e063cc7", + "sha256:3273a28734175feebbe4d0a4cde04d4ed20f620b9b506d26f44379d3c72304e1", + "sha256:4c678e23006798fc8b6f4cef2eaad267d53ff4c1779bd1af8725cc11b72a63f3", + "sha256:4d4bc2e6bb6861103ea4655d6b6f67af8e5336e7216e20fff3e18ffa95d7a055", + "sha256:505738076350a337c1740a31646e1de09a164c62c07db3b996abdc0f9d2e50cf", + "sha256:5233664eadfa342c639b9b9977190d64ad7aca4edc51a966394d7e08e7f38a9f", + "sha256:5d95cb9f6cced2628f3e4de7e795e98b2659dfcc7176ab4a01a8b48c2c2f488f", + "sha256:7eda4c737637af74bac4b23aa82ea6fbb19002552be85f0b89bc27e3a762d239", + "sha256:801ddaa69659b36abf4694fed5aa9f61d1ecf2daaa6c92541bbbbb775d97b9fe", + "sha256:825aa6d222ce2c2b90d34a0ea31914e141a85edefc07e17342f1d2fdf121c07c", + "sha256:9c215442ff8249d41ff58700e91ef61d74f47dfd431a50253e1a1ca9436b0697", + "sha256:a3d90022f2202bbb14da991f26ca7a30b7e4c62bf0f8bf9825603b22d7e87494", + "sha256:a631fd36a9823638fe700d9225f9698fb59d049c942d322d4c09544dc2115356", + "sha256:a6523a23a205be0fe664b6b8747a5c86d55da960d9586db039eec9f5c269c0e6", + "sha256:a756ecf9f4b9b3ed49a680a649af45a8767ad038de39e6c030919c2f443eb000", + "sha256:b117287a5bdc81f1bac891187275ec7e829e961b8032c9e5ff38b70fd036c78f", + "sha256:ba04f57d1715ca5ff74bb7f8a818bf929a204b3b3c2c2826d1e1cc3b1c13398c", + "sha256:cd878195166723f30865e05d87cbaf9421614501a4bd48792c5ed28f90fd36ca", + "sha256:cee815cc62d136e96cf76771b9d3eb58e0777ec18ea50de5cfcede8a7c429aa8", + "sha256:d1722b7aa4b40cf93ac3c80d3edd48bf93b9208241d166a14ad8e7a20ee1d4f3", + "sha256:d7c1c06246b05529f9984435fc4fa5a545ea26606e7f450bdbe00c153f5aeaad", + "sha256:e9c8066249c040efdda84793a2a669076f92a301ceabe69202446abb4c5c5ef9", + "sha256:f227d7e574d050ff3996049e086e1f18c7bd2d067ef24131e50a1d3fe5831fbc", + "sha256:fc9a12aad714af36cf3ad0275a96a733526571e52710319855628f476dcb144e" + ], + "index": "pypi", + "version": "==5.4.1" + }, + "psycopg2-binary": { + "hashes": [ + "sha256:19a2d1f3567b30f6c2bb3baea23f74f69d51f0c06c2e2082d0d9c28b0733a4c2", + "sha256:2b69cf4b0fa2716fd977aa4e1fd39af6110eb47b2bb30b4e5a469d8fbecfc102", + "sha256:2e952fa17ba48cbc2dc063ddeec37d7dc4ea0ef7db0ac1eda8906365a8543f31", + "sha256:348b49dd737ff74cfb5e663e18cb069b44c64f77ec0523b5794efafbfa7df0b8", + "sha256:3d72a5fdc5f00ca85160915eb9a973cf9a0ab8148f6eda40708bf672c55ac1d1", + "sha256:4957452f7868f43f32c090dadb4188e9c74a4687323c87a882e943c2bd4780c3", + "sha256:5138cec2ee1e53a671e11cc519505eb08aaaaf390c508f25b09605763d48de4b", + "sha256:587098ca4fc46c95736459d171102336af12f0d415b3b865972a79c03f06259f", + "sha256:5b79368bcdb1da4a05f931b62760bea0955ee2c81531d8e84625df2defd3f709", + "sha256:5cf43807392247d9bc99737160da32d3fa619e0bfd85ba24d1c78db205f472a4", + "sha256:676d1a80b1eebc0cacae8dd09b2fde24213173bf65650d22b038c5ed4039f392", + "sha256:6b0211ecda389101a7d1d3df2eba0cf7ffbdd2480ca6f1d2257c7bd739e84110", + "sha256:79cde4660de6f0bb523c229763bd8ad9a93ac6760b72c369cf1213955c430934", + "sha256:7aba9786ac32c2a6d5fb446002ed936b47d5e1f10c466ef7e48f66eb9f9ebe3b", + "sha256:7c8159352244e11bdd422226aa17651110b600d175220c451a9acf795e7414e0", + "sha256:945f2eedf4fc6b2432697eb90bb98cc467de5147869e57405bfc31fa0b824741", + "sha256:96b4e902cde37a7fc6ab306b3ac089a3949e6ce3d824eeca5b19dc0bedb9f6e2", + "sha256:9a7bccb1212e63f309eb9fab47b6eaef796f59850f169a25695b248ca1bf681b", + "sha256:a3bfcac727538ec11af304b5eccadbac952d4cca1a551a29b8fe554e3ad535dc", + "sha256:b19e9f1b85c5d6136f5a0549abdc55dcbd63aba18b4f10d0d063eb65ef2c68b4", + "sha256:b664011bb14ca1f2287c17185e222f2098f7b4c857961dbcf9badb28786dbbf4", + "sha256:bde7959ef012b628868d69c474ec4920252656d0800835ed999ba5e4f57e3e2e", + "sha256:cb095a0657d792c8de9f7c9a0452385a309dfb1bbbb3357d6b1e216353ade6ca", + "sha256:d16d42a1b9772152c1fe606f679b2316551f7e1a1ce273e7f808e82a136cdb3d", + "sha256:d444b1545430ffc1e7a24ce5a9be122ccd3b135a7b7e695c5862c5aff0b11159", + "sha256:d93ccc7bf409ec0a23f2ac70977507e0b8a8d8c54e5ee46109af2f0ec9e411f3", + "sha256:df6444f952ca849016902662e1a47abf4fa0678d75f92fd9dd27f20525f809cd", + "sha256:e63850d8c52ba2b502662bf3c02603175c2397a9acc756090e444ce49508d41e", + "sha256:ec43358c105794bc2b6fd34c68d27f92bea7102393c01889e93f4b6a70975728", + "sha256:f4c6926d9c03dadce7a3b378b40d2fea912c1344ef9b29869f984fb3d2a2420b" + ], + "index": "pypi", + "version": "==2.7.7" + }, + "pycountry": { + "hashes": [ + "sha256:104a8ca94c700898c42a0172da2eab5a5675c49637b729a11db9e1dac2d983cd", + "sha256:8ec4020b2b15cd410893d573820d42ee12fe50365332e58c0975c953b60a16de" + ], + "index": "pypi", + "version": "==18.12.8" + }, + "pycparser": { + "hashes": [ + "sha256:7959b4a74abdc27b312fed1c21e6caf9309ce0b29ea86b591fd2e99ecdf27f73" + ], + "index": "pypi", + "version": "==2.14" + }, + "pyjwt": { + "hashes": [ + "sha256:87a831b7a3bfa8351511961469ed0462a769724d4da48a501cb8c96d1e17f570", + "sha256:99fe612dbe5f41e07124d9002c118c14f3ee703574ffa9779fee78135b8b94b6" + ], + "version": "==1.4.2" + }, + "pyopenssl": { + "hashes": [ + "sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200", + "sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6" + ], + "version": "==19.0.0" + }, + "pypdf2": { + "hashes": [ + "sha256:e28f902f2f0a1603ea95ebe21dff311ef09be3d0f0ef29a3e44a932729564385" + ], + "version": "==1.26.0" + }, + "python-dateutil": { + "hashes": [ + "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", + "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" + ], + "version": "==2.8.0" + }, + "python-editor": { + "hashes": [ + "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", + "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b", + "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8" + ], + "version": "==1.0.4" + }, + "python-geoip": { + "hashes": [ + "sha256:b7b11dab42bffba56943b3199e3441f41cea145244d215844ecb6de3d5fb2df5", + "sha256:fb0fa723d0cef2b52807afb7da154877125e0d40f94ec69707511549a8d431c9" + ], + "index": "pypi", + "version": "==1.2" + }, + "python-geoip-geolite2": { + "hashes": [ + "sha256:3562ab598a25c19a62f57a4e00210f9732524c1005343ff4f74a1f0bd412ec98", + "sha256:55af317b7743ecb40d0eb1ebfc013d2e112272d9358f322b83ebcb170121f27c" + ], + "index": "pypi", + "version": "==2015.303" + }, + "python-magic": { + "hashes": [ + "sha256:f2674dcfad52ae6c49d4803fa027809540b130db1dec928cfbb9240316831375", + "sha256:f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5" + ], + "index": "pypi", + "version": "==0.4.15" + }, + "python-pentabarf-xml": { + "hashes": [ + "sha256:d0d521aa613c7fe6182196ba9ff5e78a6f94d3da06697b7162732193deda0ce9" + ], + "index": "pypi", + "version": "==0.19" + }, + "pytz": { + "hashes": [ + "sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9", + "sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c" + ], + "index": "pypi", + "version": "==2018.9" + }, + "qrcode": { + "hashes": [ + "sha256:4115ccee832620df16b659d4653568331015c718a754855caf5930805d76924e", + "sha256:60222a612b83231ed99e6cb36e55311227c395d0d0f62e41bb51ebbb84a9a22b" + ], + "index": "pypi", + "version": "==5.3" + }, + "raven": { + "extras": [ + "flask" + ], + "hashes": [ + "sha256:3fa6de6efa2493a7c827472e984ce9b020797d0da16f1db67197bcc23c8fae54", + "sha256:44a13f87670836e153951af9a3c80405d36b43097db869a36e92809673692ce4" + ], + "index": "pypi", + "version": "==6.10.0" + }, + "redis": { + "hashes": [ + "sha256:8a1900a9f2a0a44ecf6e8b5eb3e967a9909dfed219ad66df094f27f7d6f330fb", + "sha256:a22ca993cea2962dbb588f9f30d0015ac4afcc45bee27d3978c0dbe9e97c6c0f" + ], + "index": "pypi", + "version": "==2.10.6" + }, + "reportlab": { + "hashes": [ + "sha256:04b9bf35127974f734bddddf48860732361e31c1220c0ebe4f683f19d5cfc3b8", + "sha256:073da867efdf9e0d6cba2a566f5929ef0bb9fb757b53a7132b91db9869441859", + "sha256:08e6e63a4502d3a00062ba9ff9669f95577fbdb1a5f8c6cdb1230c5ee295273a", + "sha256:0960567b9d937a288efa04753536dce1dbb032a1e1f622fd92efbe85b8cccf6e", + "sha256:1870e321c5d7772fd6e5538a89562ed8b40687ed0aec254197dc73e9d700e62f", + "sha256:1eac902958a7f66c30e1115fa1a80bf6a7aa57680427cfcb930e13c746142150", + "sha256:1f6cdcdaf6ab78ab3efd21b23c27e4487a5c0816202c3578b277f441f984a51f", + "sha256:281443252a335489ce4b8b150afccdc01c74daf97e962fd99a8c2d59c8b333d3", + "sha256:2ae66e61b03944c5ed1f3c96bbc51160cce4aa28cbe96f205b464017cdfc851c", + "sha256:34d348575686390676757876fef50f6e32e3a59ff7d549e022b5f3b8a9f7e564", + "sha256:508224a11ec9ef203ae2fd2177e903d36d3b840eeb8ac70747f53eeb373db439", + "sha256:5c497c9597a346d27007507cddc2a792f8ca5017268738fd35c374c224d81988", + "sha256:6e0d9efe78526ddf5ad1d2357f6b2b0f5d7df354ac559358e3d056bdd12fdabf", + "sha256:817dfd400c5e694cbb6eb87bc932cd3d97cf5d79d918329b8f99085a7979bb29", + "sha256:8d6ed4357eb0146501ebdb7226c87ef98a9bcbc6d54401ec676fa905b6355e00", + "sha256:8e681324ce457cc3d5c0949c92d590ac4401347b5df55f6fde207b42316d42d2", + "sha256:926981544d37554b44c6f067c3f94981831f9ef3f2665fa5f4114b23a140f596", + "sha256:92a0bf5cc2d9418115bff46032964d25bb21c0ac8bcdf6bee5769ca810a54a5a", + "sha256:9a3e7495e223fc4a9bdcd356972c230d32bf8c7a57442ca5b8c2ff6b19e6007b", + "sha256:a31f424020176e96a0ff0229f7f251d865c5409ddf074f695b97ba604f173b48", + "sha256:aa0c35b22929c19ecd48d5c1734e420812f269f463d1ef138e0adb28069c3150", + "sha256:b36b555cdbdd51f9f00a7606966ec6d4d30d74c61d1523a1ac56bbeb83a15ed3", + "sha256:cd3d9765b8f446c25d75a4456d8781c4781de0f10f860dff5cb69bbe526e8f53", + "sha256:d3daa4f19d1dc2fc1fc2591e1354edd95439b9e9953ca8b374d41524d434b315", + "sha256:d8f1878bc1fc91c63431e9b0f1940ff18b70c059f6d38f2be1e34ce9ffcc28ea", + "sha256:ddca7479d29f9dfbfc69057764239ec7753b49a3b0dcbed08f70cbef8fccfee6", + "sha256:f28f3a965d15c88c797cf33968bdaa5a04aabcf321d3f6fcf14d7e7fde8d90f3", + "sha256:fcca214bf340f59245fff792134a9ac333d21eeef19a874a69ecc926b4c992a4" + ], + "version": "==3.5.21" + }, + "requests": { + "extras": [ + "security" + ], + "hashes": [ + "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", + "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" + ], + "index": "pypi", + "version": "==2.21.0" + }, + "requests-oauthlib": { + "hashes": [ + "sha256:50a8ae2ce8273e384895972b56193c7409601a66d4975774c60c2aed869639ca", + "sha256:883ac416757eada6d3d07054ec7092ac21c7f35cb1d2cf82faf205637081f468" + ], + "index": "pypi", + "version": "==0.8.0" + }, + "scrypt": { + "hashes": [ + "sha256:1377b1adc98c4152694bf5d7e93b41a9d2e9060af69b747cfad8c93ac426f9ea", + "sha256:336a76da970674206591a9d1092b165a6792eadeaed5fd8fd0d7e367ea0cd74a", + "sha256:40fcaecc2e6cc3f9d200c7fb111454f4b584dcde3cf1242d0730ca299fb08553", + "sha256:96ef78c99b324000cadf019c6af1f6cd2fefc1048951fc07365ec74bf99c17f9", + "sha256:a12930a9942774dbaebe0ae4e3d089a6ea158daf7fc6d9b81aba47bcb73103ff", + "sha256:b5434eb5608d491abf42bdacb5dea2103ff25d4e42c0a2b574bd74c7789bbb37", + "sha256:b9da3cd041efdbfde0f353351b6f8a8e0b7b7b4e8d95a29e59f77c48e2cee96d", + "sha256:d5acc01c27048ad5f5477aeaa97c8faa1bd739f0a915d31b4526e18609fd9df1", + "sha256:e1d24b8dd8a4451745a0f99c6bf356475fa822e5ebdeb207ea99f0fdab54c909" + ], + "version": "==0.8.13" + }, + "simplejson": { + "hashes": [ + "sha256:067a7177ddfa32e1483ba5169ebea1bc2ea27f224853211ca669325648ca5642", + "sha256:2fc546e6af49fb45b93bbe878dea4c48edc34083729c0abd09981fe55bdf7f91", + "sha256:354fa32b02885e6dae925f1b5bbf842c333c1e11ea5453ddd67309dc31fdb40a", + "sha256:37e685986cf6f8144607f90340cff72d36acf654f3653a6c47b84c5c38d00df7", + "sha256:3af610ee72efbe644e19d5eaad575c73fb83026192114e5f6719f4901097fce2", + "sha256:3b919fc9cf508f13b929a9b274c40786036b31ad28657819b3b9ba44ba651f50", + "sha256:3dd289368bbd064974d9a5961101f080e939cbe051e6689a193c99fb6e9ac89b", + "sha256:6c3258ffff58712818a233b9737fe4be943d306c40cf63d14ddc82ba563f483a", + "sha256:75e3f0b12c28945c08f54350d91e624f8dd580ab74fd4f1bbea54bc6b0165610", + "sha256:b1f329139ba647a9548aa05fb95d046b4a677643070dc2afc05fa2e975d09ca5", + "sha256:ee9625fc8ee164902dfbb0ff932b26df112da9f871c32f0f9c1bcf20c350fe2a", + "sha256:fb2530b53c28f0d4d84990e945c2ebb470edb469d63e389bf02ff409012fe7c5" + ], + "version": "==3.16.0" + }, + "six": { + "hashes": [ + "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", + "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" + ], + "version": "==1.12.0" + }, + "sqlalchemy": { + "hashes": [ + "sha256:8b79a5ed91cdcb5abe97b0045664c55c140aec09e5dd5c01303e23de5fe7a95a" + ], + "index": "pypi", + "version": "==1.1.15" + }, + "sqlalchemy-continuum": { + "hashes": [ + "sha256:4f4e378938baf3ca7321ee6f5c310c50868b66fef2507fb84ff5e0e27106f82c" + ], + "index": "pypi", + "version": "==1.3.9" + }, + "sqlalchemy-utils": { + "hashes": [ + "sha256:e35431b0e57c4f7030ff598c23813c8b7b04b508ce10e8e9ebe448645b38d6d7" + ], + "index": "pypi", + "version": "==0.32.21" + }, + "stripe": { + "hashes": [ + "sha256:2faff6079079a0c5bdf97f0b30b5a40b55b1700b79f0d28855fd1be0a99dd235", + "sha256:86e291792c8825f07dcc77f662ee069bb453dc22d8f2c62d4c72b9e1541547f9" + ], + "index": "pypi", + "version": "==1.84.2" + }, + "text-unidecode": { + "hashes": [ + "sha256:5a1375bb2ba7968740508ae38d92e1f889a0832913cb1c447d5e2046061a396d", + "sha256:801e38bd550b943563660a91de8d4b6fa5df60a542be9093f7abf819f86050cc" + ], + "version": "==1.2" + }, + "tzlocal": { + "hashes": [ + "sha256:4ebeb848845ac898da6519b9b31879cf13b6626f7184c496037b818e238f2c4e" + ], + "version": "==1.5.1" + }, + "unicode-slugify": { + "hashes": [ + "sha256:34cf3afefa6480efe705a4fc0eaeeaf7f49754aec322ba3e8b2f27dc1cbcf650" + ], + "index": "pypi", + "version": "==0.1.3" + }, + "unidecode": { + "hashes": [ + "sha256:092cdf7ad9d1052c50313426a625b717dab52f7ac58f859e09ea020953b1ad8f", + "sha256:8b85354be8fd0c0e10adbf0675f6dc2310e56fda43fa8fe049123b6c475e52fb" + ], + "version": "==1.0.23" + }, + "urllib3": { + "hashes": [ + "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", + "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" + ], + "version": "==1.24.3" + }, + "webencodings": { + "hashes": [ + "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", + "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" + ], + "version": "==0.5.1" + }, + "werkzeug": { + "hashes": [ + "sha256:865856ebb55c4dcd0630cdd8f3331a1847a819dda7e8c750d3db6f2aa6c0209c", + "sha256:a0b915f0815982fb2a09161cb8f31708052d0951c3ba433ccc5e1aa276507ca6" + ], + "version": "==0.15.4" + }, + "wtforms": { + "hashes": [ + "sha256:0cdbac3e7f6878086c334aa25dc5a33869a3954e9d1e015130d65a69309b3b61", + "sha256:e3ee092c827582c50877cdbd49e9ce6d2c5c1f6561f849b3b068c1b8029626f1" + ], + "index": "pypi", + "version": "==2.2.1" + }, + "xhtml2pdf": { + "hashes": [ + "sha256:86a37e78d7a8d8bb2761746c3d559e12284d92c4d531b3a8a0f8fd632b436f82" + ], + "index": "pypi", + "version": "==0.2.3" + } + }, + "develop": { + "astroid": { + "hashes": [ + "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4", + "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4" + ], + "version": "==2.2.5" + }, + "coverage": { + "hashes": [ + "sha256:3684fabf6b87a369017756b551cef29e505cb155ddb892a7a29277b978da88b9", + "sha256:39e088da9b284f1bd17c750ac672103779f7954ce6125fd4382134ac8d152d74", + "sha256:3c205bc11cc4fcc57b761c2da73b9b72a59f8d5ca89979afb0c1c6f9e53c7390", + "sha256:465ce53a8c0f3a7950dfb836438442f833cf6663d407f37d8c52fe7b6e56d7e8", + "sha256:48020e343fc40f72a442c8a1334284620f81295256a6b6ca6d8aa1350c763bbe", + "sha256:5296fc86ab612ec12394565c500b412a43b328b3907c0d14358950d06fd83baf", + "sha256:5f61bed2f7d9b6a9ab935150a6b23d7f84b8055524e7be7715b6513f3328138e", + "sha256:68a43a9f9f83693ce0414d17e019daee7ab3f7113a70c79a3dd4c2f704e4d741", + "sha256:6b8033d47fe22506856fe450470ccb1d8ba1ffb8463494a15cfc96392a288c09", + "sha256:7ad7536066b28863e5835e8cfeaa794b7fe352d99a8cded9f43d1161be8e9fbd", + "sha256:7bacb89ccf4bedb30b277e96e4cc68cd1369ca6841bde7b005191b54d3dd1034", + "sha256:839dc7c36501254e14331bcb98b27002aa415e4af7ea039d9009409b9d2d5420", + "sha256:8f9a95b66969cdea53ec992ecea5406c5bd99c9221f539bca1e8406b200ae98c", + "sha256:932c03d2d565f75961ba1d3cec41ddde00e162c5b46d03f7423edcb807734eab", + "sha256:988529edadc49039d205e0aa6ce049c5ccda4acb2d6c3c5c550c17e8c02c05ba", + "sha256:998d7e73548fe395eeb294495a04d38942edb66d1fa61eb70418871bc621227e", + "sha256:9de60893fb447d1e797f6bf08fdf0dbcda0c1e34c1b06c92bd3a363c0ea8c609", + "sha256:9e80d45d0c7fcee54e22771db7f1b0b126fb4a6c0a2e5afa72f66827207ff2f2", + "sha256:a545a3dfe5082dc8e8c3eb7f8a2cf4f2870902ff1860bd99b6198cfd1f9d1f49", + "sha256:a5d8f29e5ec661143621a8f4de51adfb300d7a476224156a39a392254f70687b", + "sha256:aca06bfba4759bbdb09bf52ebb15ae20268ee1f6747417837926fae990ebc41d", + "sha256:bb23b7a6fd666e551a3094ab896a57809e010059540ad20acbeec03a154224ce", + "sha256:bfd1d0ae7e292105f29d7deaa9d8f2916ed8553ab9d5f39ec65bcf5deadff3f9", + "sha256:c62ca0a38958f541a73cf86acdab020c2091631c137bd359c4f5bddde7b75fd4", + "sha256:c709d8bda72cf4cd348ccec2a4881f2c5848fd72903c185f363d361b2737f773", + "sha256:c968a6aa7e0b56ecbd28531ddf439c2ec103610d3e2bf3b75b813304f8cb7723", + "sha256:df785d8cb80539d0b55fd47183264b7002077859028dfe3070cf6359bf8b2d9c", + "sha256:f406628ca51e0ae90ae76ea8398677a921b36f0bd71aab2099dfed08abd0322f", + "sha256:f46087bbd95ebae244a0eda01a618aff11ec7a069b15a3ef8f6b520db523dcf1", + "sha256:f8019c5279eb32360ca03e9fac40a12667715546eed5c5eb59eb381f2f501260", + "sha256:fc5f4d209733750afd2714e9109816a29500718b32dd9a5db01c0cb3a019b96a" + ], + "index": "pypi", + "version": "==4.5.3" + }, + "dredd-hooks": { + "hashes": [ + "sha256:7d0527ee269d716126de912098b6d8750fcb3755232cb902e5a360f1921df780" + ], + "index": "pypi", + "version": "==0.2.0" + }, + "isort": { + "hashes": [ + "sha256:c40744b6bc5162bbb39c1257fe298b7a393861d50978b565f3ccd9cb9de0182a", + "sha256:f57abacd059dc3bd666258d1efb0377510a89777fda3e3274e3c01f7c03ae22d" + ], + "version": "==4.3.20" + }, + "lazy-object-proxy": { + "hashes": [ + "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661", + "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f", + "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13", + "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821", + "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71", + "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e", + "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea", + "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229", + "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4", + "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e", + "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20", + "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16", + "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b", + "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7", + "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c", + "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a", + "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e", + "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1" + ], + "version": "==1.4.1" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "nose": { + "hashes": [ + "sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac", + "sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a", + "sha256:f1bffef9cbc82628f6e7d7b40d7e255aefaa1adb6a1b1d26c69a8b79e6208a98" + ], + "index": "pypi", + "version": "==1.3.7" + }, + "pep8": { + "hashes": [ + "sha256:b22cfae5db09833bb9bd7c8463b53e1a9c9b39f12e304a8d0bba729c501827ee", + "sha256:fe249b52e20498e59e0b5c5256aa52ee99fc295b26ec9eaa85776ffdb9fe6374" + ], + "index": "pypi", + "version": "==1.7.1" + }, + "pylint": { + "hashes": [ + "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09", + "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1" + ], + "index": "pypi", + "version": "==2.3.1" + }, + "six": { + "hashes": [ + "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", + "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" + ], + "version": "==1.12.0" + }, + "typed-ast": { + "hashes": [ + "sha256:132eae51d6ef3ff4a8c47c393a4ef5ebf0d1aecc96880eb5d6c8ceab7017cc9b", + "sha256:18141c1484ab8784006c839be8b985cfc82a2e9725837b0ecfa0203f71c4e39d", + "sha256:2baf617f5bbbfe73fd8846463f5aeafc912b5ee247f410700245d68525ec584a", + "sha256:3d90063f2cbbe39177e9b4d888e45777012652d6110156845b828908c51ae462", + "sha256:4304b2218b842d610aa1a1d87e1dc9559597969acc62ce717ee4dfeaa44d7eee", + "sha256:4983ede548ffc3541bae49a82675996497348e55bafd1554dc4e4a5d6eda541a", + "sha256:5315f4509c1476718a4825f45a203b82d7fdf2a6f5f0c8f166435975b1c9f7d4", + "sha256:6cdfb1b49d5345f7c2b90d638822d16ba62dc82f7616e9b4caa10b72f3f16649", + "sha256:7b325f12635598c604690efd7a0197d0b94b7d7778498e76e0710cd582fd1c7a", + "sha256:8d3b0e3b8626615826f9a626548057c5275a9733512b137984a68ba1598d3d2f", + "sha256:8f8631160c79f53081bd23446525db0bc4c5616f78d04021e6e434b286493fd7", + "sha256:912de10965f3dc89da23936f1cc4ed60764f712e5fa603a09dd904f88c996760", + "sha256:b010c07b975fe853c65d7bbe9d4ac62f1c69086750a574f6292597763781ba18", + "sha256:c908c10505904c48081a5415a1e295d8403e353e0c14c42b6d67f8f97fae6616", + "sha256:c94dd3807c0c0610f7c76f078119f4ea48235a953512752b9175f9f98f5ae2bd", + "sha256:ce65dee7594a84c466e79d7fb7d3303e7295d16a83c22c7c4037071b059e2c21", + "sha256:eaa9cfcb221a8a4c2889be6f93da141ac777eb8819f077e1d09fb12d00a09a93", + "sha256:f3376bc31bad66d46d44b4e6522c5c21976bf9bca4ef5987bb2bf727f4506cbb", + "sha256:f9202fa138544e13a4ec1a6792c35834250a85958fde1251b6a22e07d1260ae7" + ], + "markers": "implementation_name == 'cpython'", + "version": "==1.3.5" + }, + "wrapt": { + "hashes": [ + "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533" + ], + "version": "==1.11.1" + } + } +} diff --git a/README.md b/README.md index 83fcfb4b82..fa777540f1 100644 --- a/README.md +++ b/README.md @@ -91,7 +91,7 @@ Required keys can be obtained from [https://www.instagram.com/developer/authenti Google maps is used to get information about location (info about country, city, latitude and longitude). -Required keys can be obtained from [https://maps.googleapis.com/maps/api](https://maps.googleapis.com/maps/api). +Required keys can be obtained from [https://developers.google.com/maps/documentation/javascript/get-api-key](https://developers.google.com/maps/documentation/javascript/get-api-key). #### Media Storage - Local/Amazon S3/Google Cloud @@ -268,7 +268,7 @@ The tentative release policy, for now (since there is a lot of activity and a lo **Commits** * Write clear meaningful git commit messages (Do read http://chris.beams.io/posts/git-commit/) * Make sure your PR's description contains GitHub's special keyword references that automatically close the related issue when the PR is merged. (More info at https://github.com/blog/1506-closing-issues-via-pull-requests ) -* When you make very very minor changes to a PR of yours (like for example fixing a failing travis build or some small style corrections or minor changes requested by reviewers) make sure you squash your commits afterward so that you don't have an absurd number of commits for a very small fix. (Learn how to squash at https://davidwalsh.name/squash-commits-git ) +* When you make very minor changes to a PR of yours (like for example fixing a failing travis build or some small style corrections or minor changes requested by reviewers) make sure you squash your commits afterward so that you don't have an absurd number of commits for a very small fix. (Learn how to squash at https://davidwalsh.name/squash-commits-git ) * When you're submitting a PR for a UI-related issue, it would be really awesome if you add a screenshot of your change or a link to a deployment where it can be tested out along with your PR. It makes it very easy for the reviewers and you'll also get reviews quicker. **Feature Requests and Bug Reports** @@ -281,6 +281,6 @@ The tentative release policy, for now (since there is a lot of activity and a lo ## License -This project is currently licensed under the **[GNU General Public License v3](LICENSE.md)**. +This project is currently licensed under the **[GNU General Public License v3](LICENSE)**. > To obtain the software under a different license, please contact [FOSSASIA](http://blog.fossasia.org/contact/). diff --git a/app/__init__.py b/app/__init__.py index 5b7f9940f9..7e3b1be5ea 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -29,7 +29,7 @@ from app.views import BlueprintsManager from app.api.helpers.auth import AuthManager from app.api.helpers.scheduled_jobs import send_after_event_mail, send_event_fee_notification, \ - send_event_fee_notification_followup + send_event_fee_notification_followup, change_session_state_on_event_completion, expire_pending_tickets_after_one_day from app.models.event import Event from app.models.role_invite import RoleInvite from app.views.healthcheck import health_check_celery, health_check_db, health_check_migrations, check_migrations @@ -126,6 +126,9 @@ def create_app(): from app.api.uploads import upload_routes from app.api.users import user_misc_routes from app.api.orders import order_misc_routes + from app.api.role_invites import role_invites_misc_routes + from app.api.auth import ticket_blueprint + from app.api.admin_translations import admin_blueprint app.register_blueprint(api_v1) app.register_blueprint(event_copy) @@ -138,6 +141,9 @@ def create_app(): app.register_blueprint(user_misc_routes) app.register_blueprint(attendee_misc_routes) app.register_blueprint(order_misc_routes) + app.register_blueprint(role_invites_misc_routes) + app.register_blueprint(ticket_blueprint) + app.register_blueprint(admin_blueprint) sa.orm.configure_mappers() @@ -236,6 +242,8 @@ def update_sent_state(sender=None, body=None, **kwargs): scheduler.add_job(send_after_event_mail, 'cron', hour=5, minute=30) scheduler.add_job(send_event_fee_notification, 'cron', day=1) scheduler.add_job(send_event_fee_notification_followup, 'cron', day=15) +scheduler.add_job(change_session_state_on_event_completion, 'cron', hour=5, minute=30) +scheduler.add_job(expire_pending_tickets_after_one_day, 'cron', hour=5) scheduler.start() diff --git a/app/api/__init__.py b/app/api/__init__.py index d0e092e05f..5b8e61c0e3 100644 --- a/app/api/__init__.py +++ b/app/api/__init__.py @@ -84,6 +84,7 @@ from app.api.admin_sales.fees import AdminSalesFeesList from app.api.admin_sales.invoices import AdminSalesInvoicesList from app.api.full_text_search.events import EventSearchResultList +from app.api.import_jobs import ImportJobList, ImportJobDetail # users api.route(UserList, 'user_list', '/users') @@ -623,3 +624,7 @@ # Full text search w/ Elastic Search api.route(EventSearchResultList, 'event_search_results', '/search/events') + +# Import Jobs +api.route(ImportJobList, 'import_job_list', '/import-jobs') +api.route(ImportJobDetail, 'import_job_detail', '/import-jobs/') diff --git a/app/api/access_codes.py b/app/api/access_codes.py index 9e80d0436d..a7832ddfa9 100644 --- a/app/api/access_codes.py +++ b/app/api/access_codes.py @@ -129,7 +129,7 @@ def before_get(self, args, kwargs): raise UnprocessableEntity({'source': ''}, "Please verify your permission") - decorators = (jwt_required, api.has_permission('is_coorganizer', fetch='event_id', + decorators = (api.has_permission('is_coorganizer', fetch='event_id', fetch_as="event_id", model=AccessCode, methods="PATCH"), api.has_permission('is_coorganizer_but_not_admin', fetch='event_id', fetch_as="event_id", model=AccessCode, methods="DELETE"),) diff --git a/app/api/admin_sales/discounted.py b/app/api/admin_sales/discounted.py index 9c29de6701..165e5478bb 100644 --- a/app/api/admin_sales/discounted.py +++ b/app/api/admin_sales/discounted.py @@ -49,6 +49,7 @@ class Meta: code = fields.String() email = fields.String() event_name = fields.String() + payment_currency = fields.String() sales = fields.Method('calc_sales') @staticmethod diff --git a/app/api/admin_sales/events.py b/app/api/admin_sales/events.py index eaf3755a6d..d824777647 100644 --- a/app/api/admin_sales/events.py +++ b/app/api/admin_sales/events.py @@ -2,6 +2,7 @@ from marshmallow_jsonapi.flask import Schema from flask_rest_jsonapi import ResourceList +from app.api.helpers.utilities import dasherize from app.api.bootstrap import api from app.models import db from app.models.event import Event @@ -23,11 +24,13 @@ class AdminSalesByEventsSchema(Schema): class Meta: type_ = 'admin-sales-by-events' self_view = 'v1.admin_sales_by_events' + inflect = dasherize id = fields.String() name = fields.String() starts_at = fields.DateTime() ends_at = fields.DateTime() + payment_currency = fields.String() sales = fields.Method('calc_sales') @staticmethod diff --git a/app/api/admin_sales/fees.py b/app/api/admin_sales/fees.py index 433e92b508..bb16ea106b 100644 --- a/app/api/admin_sales/fees.py +++ b/app/api/admin_sales/fees.py @@ -22,13 +22,14 @@ class Meta: name = fields.String() payment_currency = fields.String() fee_percentage = fields.Float(attribute='fee') + maximum_fee = fields.Float(attribute='maximum_fee') revenue = fields.Method('calc_revenue') ticket_count = fields.Method('calc_ticket_count') @staticmethod def calc_ticket_count(obj): """Count all tickets in all orders of this event""" - return sum([o.amount for o in obj.orders]) + return sum([o.tickets_count for o in obj.orders if o.status == 'completed']) @staticmethod def calc_revenue(obj): diff --git a/app/api/admin_sales/locations.py b/app/api/admin_sales/locations.py index 30a9802795..c37b5a3ec6 100644 --- a/app/api/admin_sales/locations.py +++ b/app/api/admin_sales/locations.py @@ -2,6 +2,7 @@ from marshmallow_jsonapi.flask import Schema from flask_rest_jsonapi import ResourceList from sqlalchemy import func +from app.api.helpers.utilities import dasherize from app.api.bootstrap import api from app.models import db @@ -34,6 +35,7 @@ class AdminSalesByLocationSchema(Schema): class Meta: type_ = 'admin-sales-by-location' self_view = 'v1.admin_sales_by_location' + inflect = dasherize id = fields.String() location_name = fields.String() @@ -63,9 +65,12 @@ class AdminSalesByLocationList(ResourceList): """ def query(self, _): - locations = self.session.query(Event.location_name) \ - .group_by(Event.location_name) \ - .cte() + locations = self.session.query( + Event.location_name, + Event.location_name.label('id')) \ + .group_by(Event.location_name) \ + .filter(Event.location_name.isnot(None)) \ + .cte() pending = sales_per_location_by_status('pending') completed = sales_per_location_by_status('completed') diff --git a/app/api/admin_sales/organizer.py b/app/api/admin_sales/organizer.py index ba345fb167..2beb2a5fbb 100644 --- a/app/api/admin_sales/organizer.py +++ b/app/api/admin_sales/organizer.py @@ -2,6 +2,7 @@ from marshmallow_jsonapi.flask import Schema from flask_rest_jsonapi import ResourceList +from app.api.helpers.utilities import dasherize from app.api.bootstrap import api from app.models import db from app.models.event import Event @@ -18,17 +19,19 @@ class AdminSalesByOrganizersSchema(Schema): Sales summarized by organizer Provides - organizer (first name and last name), + organizer (first name, last name and email), count of tickets and total sales for orders grouped by status """ class Meta: type_ = 'admin-sales-by-organizers' self_view = 'v1.admin_sales_by_organizers' + inflect = dasherize id = fields.String() first_name = fields.String() last_name = fields.String() + email = fields.String() starts_at = fields.DateTime() ends_at = fields.DateTime() sales = fields.Method('calc_sales') diff --git a/app/api/admin_sales/utils.py b/app/api/admin_sales/utils.py index d86c021414..5823e01e21 100644 --- a/app/api/admin_sales/utils.py +++ b/app/api/admin_sales/utils.py @@ -11,7 +11,7 @@ def status_summary(orders, status): """ return { 'sales_total': sum([o.amount for o in orders if o.status == status]), - 'ticket_count': sum([o.tickets_count for o in orders]) + 'ticket_count': sum([o.tickets_count for o in orders if o.status == status]) } diff --git a/app/api/admin_statistics_api/mails.py b/app/api/admin_statistics_api/mails.py index 710d38b2e9..2218b7fe44 100644 --- a/app/api/admin_statistics_api/mails.py +++ b/app/api/admin_statistics_api/mails.py @@ -31,24 +31,20 @@ class Meta: thirty_days = fields.Method("mail_last_30_days") def mail_last_1_day(self, obj): - all_mails = get_count(Mail.query.filter_by(time=datetime.now(pytz.utc))) - mails_till_last_1_day = get_count(Mail.query.filter(Mail.time <= datetime.now(pytz.utc) - timedelta(days=1))) - return all_mails - mails_till_last_1_day + mails_till_last_1_day = Mail.query.filter(Mail.time >= datetime.now(pytz.utc) - timedelta(days=1)).count() + return mails_till_last_1_day def mail_last_3_days(self, obj): - all_mails = get_count(Mail.query.filter_by(time=datetime.now(pytz.utc))) - mails_till_last_3_day = get_count(Mail.query.filter(Mail.time <= datetime.now(pytz.utc) - timedelta(days=3))) - return all_mails - mails_till_last_3_day + mails_till_last_3_day = Mail.query.filter(Mail.time >= datetime.now(pytz.utc) - timedelta(days=3)).count() + return mails_till_last_3_day def mail_last_7_days(self, obj): - all_mails = get_count(Mail.query.filter_by(time=datetime.now(pytz.utc))) - mails_till_last_7_day = get_count(Mail.query.filter(Mail.time <= datetime.now(pytz.utc) - timedelta(days=7))) - return all_mails - mails_till_last_7_day + mails_till_last_7_day = Mail.query.filter(Mail.time >= datetime.now(pytz.utc) - timedelta(days=7)).count() + return mails_till_last_7_day def mail_last_30_days(self, obj): - all_mails = get_count(Mail.query.filter_by(time=datetime.now(pytz.utc))) - mails_till_last_30_day = get_count(Mail.query.filter(Mail.time <= datetime.now(pytz.utc) - timedelta(days=30))) - return all_mails - mails_till_last_30_day + mails_till_last_30_day = Mail.query.filter(Mail.time >= datetime.now(pytz.utc) - timedelta(days=30)).count() + return mails_till_last_30_day class AdminStatisticsMailDetail(ResourceDetail): diff --git a/app/api/admin_statistics_api/users.py b/app/api/admin_statistics_api/users.py index 08249e559d..18f359ca78 100644 --- a/app/api/admin_statistics_api/users.py +++ b/app/api/admin_statistics_api/users.py @@ -10,9 +10,9 @@ from app.models.event import Event from app.models.users_events_role import UsersEventsRoles from app.models.role import Role +from app.models.ticket_holder import TicketHolder from app.api.helpers.db import get_count - class AdminStatisticsUserSchema(Schema): """ Api schema @@ -49,21 +49,22 @@ def unverified_count(self, obj): def get_all_user_roles(self, role_name): role = Role.query.filter_by(name=role_name).first() - uers = UsersEventsRoles.query.join(UsersEventsRoles.event).join(UsersEventsRoles.role).filter( - Event.deleted_at.is_(None), UsersEventsRoles.role == role) - return uers + newquery = User.query.join(UsersEventsRoles.user).join(UsersEventsRoles.role).filter( + UsersEventsRoles.role == role).distinct() + return newquery def organizer_count(self, obj): - return get_count(self.get_all_user_roles('organizer')) + return self.get_all_user_roles('organizer').count() def coorganizer_count(self, obj): - return get_count(self.get_all_user_roles('coorganizer')) + return self.get_all_user_roles('coorganizer').count() def track_organizer_count(self, obj): - return get_count(self.get_all_user_roles('track_organizer')) + return self.get_all_user_roles('track_organizer').count() def attendee_count(self, obj): - return get_count(self.get_all_user_roles('attendee')) + unique_attendee_query = db.session.query(TicketHolder.email).distinct() + return unique_attendee_query.count() class AdminStatisticsUserDetail(ResourceDetail): diff --git a/app/api/admin_translations.py b/app/api/admin_translations.py new file mode 100644 index 0000000000..7c75fd401a --- /dev/null +++ b/app/api/admin_translations.py @@ -0,0 +1,26 @@ +from flask import send_file, Blueprint +import shutil +import uuid +import tempfile +import os +from app.api.helpers.permissions import is_admin + +admin_blueprint = Blueprint('admin_blueprint', __name__, url_prefix='/v1/admin/content/translations/all') +temp_dir = tempfile.gettempdir() +translations_dir = 'app/translations' + +@admin_blueprint.route('/', methods=['GET']) +@is_admin +def download_translations(): + """Admin Translations Downloads""" + uuid_literal = uuid.uuid4() + zip_file = "translations{}".format(uuid_literal) + zip_file_ext = zip_file+'.zip' + shutil.make_archive(zip_file, "zip", translations_dir) + shutil.move(zip_file_ext, temp_dir) + path_to_zip = os.path.join(temp_dir, zip_file_ext) + from .helpers.tasks import delete_translations + delete_translations.apply_async(kwargs={'zip_file_path': path_to_zip}, countdown=600) + return send_file(path_to_zip, mimetype='application/zip', + as_attachment=True, + attachment_filename='translations.zip') diff --git a/app/api/attendees.py b/app/api/attendees.py index 18cfcf0353..4982748c74 100644 --- a/app/api/attendees.py +++ b/app/api/attendees.py @@ -112,8 +112,8 @@ def query(self, view_kwargs): if view_kwargs.get('ticket_id'): ticket = safe_query(self, Ticket, 'id', view_kwargs['ticket_id'], 'ticket_id') - if not has_access('is_registrar', event_id=ticket.event_id): - raise ForbiddenException({'source': ''}, 'Access Forbidden') + # if not has_access('is_registrar', event_id=ticket.event_id): + # raise ForbiddenException({'source': ''}, 'Access Forbidden') query_ = query_.join(Ticket).filter(Ticket.id == ticket.id) if view_kwargs.get('user_id'): @@ -167,8 +167,8 @@ def before_update_object(self, obj, data, kwargs): :param kwargs: :return: """ - if not has_access('is_registrar', event_id=obj.event_id): - raise ForbiddenException({'source': 'User'}, 'You are not authorized to access this.') +# if not has_access('is_registrar', event_id=obj.event_id): +# raise ForbiddenException({'source': 'User'}, 'You are not authorized to access this.') if 'device_name_checkin' in data: if 'checkin_times' not in data or data['checkin_times'] is None: diff --git a/app/api/auth.py b/app/api/auth.py index 1b9b93b9fc..fc32f36972 100644 --- a/app/api/auth.py +++ b/app/api/auth.py @@ -1,15 +1,18 @@ +import os import base64 import random import string import requests -from flask import request, jsonify, make_response, Blueprint +from flask import request, jsonify, make_response, Blueprint, send_file, url_for, redirect from flask_jwt import current_identity as current_user, jwt_required from sqlalchemy.orm.exc import NoResultFound +from app.api.helpers.order import create_pdf_tickets_for_holder +from app.api.helpers.storage import generate_hash from app import get_settings from app.api.helpers.db import save_to_db, get_count -from app.api.helpers.errors import UnprocessableEntityError, NotFoundError, BadRequestError +from app.api.helpers.errors import ForbiddenError, UnprocessableEntityError, NotFoundError, BadRequestError from app.api.helpers.files import make_frontend_url from app.api.helpers.mail import send_email_with_action, \ send_email_confirmation @@ -17,11 +20,15 @@ from app.api.helpers.third_party_auth import GoogleOAuth, FbOAuth, TwitterOAuth, InstagramOAuth from app.api.helpers.utilities import get_serializer, str_generator from app.models import db +from app.models.order import Order from app.models.mail import PASSWORD_RESET, PASSWORD_CHANGE, \ - USER_REGISTER_WITH_PASSWORD + USER_REGISTER_WITH_PASSWORD, PASSWORD_RESET_AND_VERIFY from app.models.notification import PASSWORD_CHANGE as PASSWORD_CHANGE_NOTIF from app.models.user import User +from app.api.helpers.storage import UPLOAD_PATHS + +ticket_blueprint = Blueprint('ticket_blueprint', __name__, url_prefix='/v1') auth_routes = Blueprint('auth', __name__, url_prefix='/v1/auth') @@ -84,16 +91,18 @@ def get_token(provider): return make_response(jsonify(token=response.json()), 200) -@auth_routes.route('/oauth/login///', methods=['GET']) -def login_user(provider, auth_code): +@auth_routes.route('/oauth/login/', methods=['POST']) +def login_user(provider): if provider == 'facebook': provider_class = FbOAuth() payload = { 'client_id': provider_class.get_client_id(), - 'redirect_uri': request.args.get('redirect_uri'), + 'redirect_uri': provider_class.get_redirect_uri(), 'client_secret': provider_class.get_client_secret(), - 'code': auth_code + 'code': request.args.get('code') } + if not payload['client_id'] or not payload['client_secret']: + raise NotImplementedError({'source': ''}, 'Facebook Login Not Configured') access_token = requests.get('https://graph.facebook.com/v3.0/oauth/access_token', params=payload).json() payload_details = { 'input_token': access_token['access_token'], @@ -111,7 +120,7 @@ def login_user(provider, auth_code): user.facebook_login_hash = random.getrandbits(128) save_to_db(user) return make_response( - jsonify(user_id=user.id, email=user.email, facebook_login_hash=user.facebook_login_hash), 200) + jsonify(user_id=user.id, email=user.email, oauth_hash=user.facebook_login_hash), 200) user = User() user.first_name = user_details['first_name'] @@ -123,7 +132,7 @@ def login_user(provider, auth_code): user.email = user_details['email'] save_to_db(user) - return make_response(jsonify(user_id=user.id, email=user.email, facebook_login_hash=user.facebook_login_hash), + return make_response(jsonify(user_id=user.id, email=user.email, oauth_hash=user.facebook_login_hash), 200) elif provider == 'google': @@ -207,7 +216,10 @@ def reset_password_post(): return NotFoundError({'source': ''}, 'User not found').respond() else: link = make_frontend_url('/reset-password', {'token': user.reset_password}) - send_email_with_action(user, PASSWORD_RESET, app_name=get_settings()['app_name'], link=link) + if user.was_registered_with_order: + send_email_with_action(user, PASSWORD_RESET_AND_VERIFY, app_name=get_settings()['app_name'], link=link) + else: + send_email_with_action(user, PASSWORD_RESET, app_name=get_settings()['app_name'], link=link) return make_response(jsonify(message="Email Sent"), 200) @@ -223,6 +235,8 @@ def reset_password_patch(): return NotFoundError({'source': ''}, 'User Not Found').respond() else: user.password = password + if user.was_registered_with_order: + user.is_verified = True save_to_db(user) return jsonify({ @@ -244,7 +258,12 @@ def change_password(): return NotFoundError({'source': ''}, 'User Not Found').respond() else: if user.is_correct_password(old_password): - + if user.is_correct_password(new_password): + return BadRequestError({'source': ''}, + 'Old and New passwords must be different').respond() + if len(new_password) < 8: + return BadRequestError({'source': ''}, + 'Password should have minimum 8 characters').respond() user.password = new_password save_to_db(user) send_email_with_action(user, PASSWORD_CHANGE, @@ -252,7 +271,7 @@ def change_password(): send_notification_with_action(user, PASSWORD_CHANGE_NOTIF, app_name=get_settings()['app_name']) else: - return BadRequestError({'source': ''}, 'Wrong Password').respond() + return BadRequestError({'source': ''}, 'Wrong Password. Please enter correct current password.').respond() return jsonify({ "id": user.id, @@ -260,3 +279,49 @@ def change_password(): "name": user.fullname if user.fullname else None, "password-changed": True }) + + +@ticket_blueprint.route('/tickets/') +@jwt_required() +def ticket_attendee_authorized(order_identifier): + if current_user: + try: + order = Order.query.filter_by(identifier=order_identifier).first() + user_id = order.user.id + except NoResultFound: + return NotFoundError({'source': ''}, 'This ticket is not associated with any order').respond() + if current_user.id == user_id: + key = UPLOAD_PATHS['pdf']['ticket_attendee'].format(identifier=order_identifier) + file_path = '../generated/tickets/{}/{}/'.format(key, generate_hash(key)) + order_identifier + '.pdf' + try: + response = make_response(send_file(file_path)) + response.headers['Content-Disposition'] = 'attachment; filename=ticket-%s.zip' % order_identifier + return response + except FileNotFoundError: + create_pdf_tickets_for_holder(order) + return redirect(url_for('ticket_blueprint.ticket_attendee_authorized', order_identifier=order_identifier)) + else: + return ForbiddenError({'source': ''}, 'Unauthorized Access').respond() + else: + return ForbiddenError({'source': ''}, 'Authentication Required to access ticket').respond() + + +@ticket_blueprint.route('/orders/invoices/') +@jwt_required() +def order_invoices(order_identifier): + if current_user: + try: + order = Order.query.filter_by(identifier=order_identifier).first() + user_id = order.user.id + except NoResultFound: + return NotFoundError({'source': ''}, 'Order Invoice not found').respond() + if current_user.id == user_id: + key = UPLOAD_PATHS['pdf']['order'].format(identifier=order_identifier) + file_path = '../generated/invoices/{}/{}/'.format(key, generate_hash(key)) + order_identifier + '.pdf' + response = make_response(send_file(file_path)) + response.headers['Content-Disposition'] = 'attachment; filename=invoice-%s.zip' % order_identifier + return response + else: + return ForbiddenError({'source': ''}, 'Unauthorized Access').respond() + else: + return ForbiddenError({'source': ''}, 'Authentication Required to access Invoice').respond() diff --git a/app/api/data_layers/EventCopyLayer.py b/app/api/data_layers/EventCopyLayer.py index 14cbff8bbb..d521641750 100644 --- a/app/api/data_layers/EventCopyLayer.py +++ b/app/api/data_layers/EventCopyLayer.py @@ -3,7 +3,7 @@ from flask_rest_jsonapi.data_layers.base import BaseDataLayer from sqlalchemy.orm import make_transient -from app.api.helpers.db import safe_query +from app.api.helpers.db import safe_query, save_to_db from app.api.helpers.files import create_save_resized_image from app.models.custom_form import CustomForms from app.models.discount_code import DiscountCode @@ -45,8 +45,7 @@ def create_object(self, data, view_kwargs): make_transient(event) delattr(event, 'id') event.identifier = get_new_event_identifier() - db.session.add(event) - db.session.commit() + save_to_db(event) # Removes access_codes, order_tickets, ticket_tags for the new tickets created. for ticket in tickets: @@ -55,8 +54,7 @@ def create_object(self, data, view_kwargs): make_transient(ticket) ticket.event_id = event.id delattr(ticket, 'id') - db.session.add(ticket) - db.session.commit() + save_to_db(ticket) for link in social_links: link_id = link.id @@ -64,8 +62,7 @@ def create_object(self, data, view_kwargs): make_transient(link) link.event_id = event.id delattr(link, 'id') - db.session.add(link) - db.session.commit() + save_to_db(link) for sponsor in sponsors: sponsor_id = sponsor.id @@ -75,8 +72,7 @@ def create_object(self, data, view_kwargs): logo_url = create_save_resized_image(image_file=sponsor.logo_url, resize=False) delattr(sponsor, 'id') sponsor.logo_url = logo_url - db.session.add(sponsor) - db.session.commit() + save_to_db(sponsor) for location in microlocations: location_id = location.id @@ -84,8 +80,7 @@ def create_object(self, data, view_kwargs): make_transient(location) location.event_id = event.id delattr(location, 'id') - db.session.add(location) - db.session.commit() + save_to_db(location) # No sessions are copied for new tracks for track in tracks: @@ -94,8 +89,7 @@ def create_object(self, data, view_kwargs): make_transient(track) track.event_id = event.id delattr(track, 'id') - db.session.add(track) - db.session.commit() + save_to_db(track) for call in speaker_calls: call_id = call.id @@ -103,8 +97,7 @@ def create_object(self, data, view_kwargs): make_transient(call) call.event_id = event.id delattr(call, 'id') - db.session.add(call) - db.session.commit() + save_to_db(call) for code in discount_codes: code_id = code.id @@ -112,8 +105,7 @@ def create_object(self, data, view_kwargs): make_transient(code) code.event_id = event.id delattr(code, 'id') - db.session.add(code) - db.session.commit() + save_to_db(code) for form in custom_forms: form_id = form.id @@ -121,7 +113,6 @@ def create_object(self, data, view_kwargs): make_transient(form) form.event_id = event.id delattr(form, 'id') - db.session.add(form) - db.session.commit() + save_to_db(form) return event diff --git a/app/api/discount_codes.py b/app/api/discount_codes.py index 81b1cbafc1..65b6f2c180 100644 --- a/app/api/discount_codes.py +++ b/app/api/discount_codes.py @@ -3,7 +3,7 @@ from sqlalchemy.orm.exc import NoResultFound from app.api.helpers.db import safe_query -from app.api.helpers.exceptions import ConflictException, ForbiddenException, UnprocessableEntity +from app.api.helpers.exceptions import ConflictException, ForbiddenException, UnprocessableEntity, MethodNotAllowed from app.api.helpers.permission_manager import has_access from app.api.helpers.permissions import jwt_required, current_identity from app.api.helpers.utilities import require_relationship @@ -15,6 +15,8 @@ from app.models.ticket import Ticket from app.models.user import User +from datetime import datetime + class DiscountCodeListPost(ResourceList): """ Create Event and Ticket Discount code and Get Event Discount Codes @@ -208,6 +210,13 @@ def before_get(self, args, kwargs): discount = db.session.query(DiscountCode).filter_by(code=kwargs.get('code'), deleted_at=None).first() if discount: kwargs['id'] = discount.id + discount_tz = discount.valid_from.tzinfo + current_time = datetime.now().replace(tzinfo=discount_tz) + if not discount.is_active: + raise MethodNotAllowed({'parameter': '{code}'}, "Discount Code is not active") + elif current_time < discount.valid_from or current_time > discount.valid_till: + raise MethodNotAllowed({'parameter': '{code}'}, + "Discount Code is not active in current time frame") else: raise ObjectNotFound({'parameter': '{code}'}, "DiscountCode: not found") @@ -222,10 +231,12 @@ def before_get(self, args, kwargs): raise ObjectNotFound( {'parameter': '{id}'}, "DiscountCode: not found") - if discount.used_for == 'ticket' and has_access('is_coorganizer', event_id=discount.event_id): +# if discount.used_for == 'ticket' and has_access('is_coorganizer', event_id=discount.event_id): + if discount.used_for == 'ticket': self.schema = DiscountCodeSchemaTicket - elif discount.used_for == 'event' and has_access('is_admin'): +# elif discount.used_for == 'event' and has_access('is_admin'): + elif discount.used_for == 'event': self.schema = DiscountCodeSchemaEvent else: raise UnprocessableEntity({'source': ''}, @@ -267,10 +278,12 @@ def before_get_object(self, view_kwargs): self.schema = DiscountCodeSchemaPublic return - if discount.used_for == 'ticket' and has_access('is_coorganizer', event_id=discount.event_id): +# if discount.used_for == 'ticket' and has_access('is_coorganizer', event_id=discount.event_id): + if discount.used_for == 'ticket': self.schema = DiscountCodeSchemaTicket - elif discount.used_for == 'event' and has_access('is_admin'): +# elif discount.used_for == 'event' and has_access('is_admin'): + elif discount.used_for == 'event': self.schema = DiscountCodeSchemaEvent else: raise UnprocessableEntity({'source': ''}, @@ -319,7 +332,7 @@ def before_delete_object(self, discount, view_kwargs): else: raise UnprocessableEntity({'source': ''}, "Please verify your permission") - decorators = (jwt_required,) +# decorators = (jwt_required,) schema = DiscountCodeSchemaTicket data_layer = {'session': db.session, 'model': DiscountCode, @@ -391,7 +404,6 @@ def before_get(self, args, kwargs): raise UnprocessableEntity({'source': ''}, "Please verify your permission") - decorators = (jwt_required,) schema = DiscountCodeSchemaEvent data_layer = {'session': db.session, 'model': DiscountCode} diff --git a/app/api/event_copy.py b/app/api/event_copy.py index 7de8a7c725..988e8edb56 100644 --- a/app/api/event_copy.py +++ b/app/api/event_copy.py @@ -1,7 +1,7 @@ from flask import jsonify, Blueprint, abort, make_response from sqlalchemy.orm import make_transient -from app.api.helpers.db import safe_query +from app.api.helpers.db import safe_query, save_to_db from app.api.helpers.files import create_save_resized_image from app.api.helpers.permission_manager import has_access from app.models.custom_form import CustomForms @@ -14,10 +14,15 @@ from app.models.sponsor import Sponsor from app.models.ticket import Ticket from app.models.track import Track +from app.models.users_events_role import UsersEventsRoles event_copy = Blueprint('event_copy', __name__, url_prefix='/v1/events') +def start_sponsor_logo_generation_task(event_id): + from .helpers.tasks import sponsor_logos_url_task + sponsor_logos_url_task.delay(event_id=event_id) + @event_copy.route('//copy', methods=['POST']) def create_event_copy(identifier): id = 'identifier' @@ -39,13 +44,13 @@ def create_event_copy(identifier): custom_forms = CustomForms.query.filter_by(event_id=event.id).all() discount_codes = DiscountCode.query.filter_by(event_id=event.id).all() speaker_calls = SpeakersCall.query.filter_by(event_id=event.id).all() + user_event_roles = UsersEventsRoles.query.filter_by(event_id=event.id).all() db.session.expunge(event) # expunge the object from session make_transient(event) delattr(event, 'id') event.identifier = get_new_event_identifier() - db.session.add(event) - db.session.commit() + save_to_db(event) # Removes access_codes, order_tickets, ticket_tags for the new tickets created. for ticket in tickets: @@ -54,8 +59,7 @@ def create_event_copy(identifier): make_transient(ticket) ticket.event_id = event.id delattr(ticket, 'id') - db.session.add(ticket) - db.session.commit() + save_to_db(ticket) for link in social_links: link_id = link.id @@ -63,19 +67,17 @@ def create_event_copy(identifier): make_transient(link) link.event_id = event.id delattr(link, 'id') - db.session.add(link) - db.session.commit() + save_to_db(link) for sponsor in sponsors: sponsor_id = sponsor.id db.session.expunge(sponsor) # expunge the object from session make_transient(sponsor) sponsor.event_id = event.id - logo_url = create_save_resized_image(image_file=sponsor.logo_url, resize=False) delattr(sponsor, 'id') - sponsor.logo_url = logo_url - db.session.add(sponsor) - db.session.commit() + save_to_db(sponsor) + + start_sponsor_logo_generation_task(event.id) for location in microlocations: location_id = location.id @@ -83,8 +85,7 @@ def create_event_copy(identifier): make_transient(location) location.event_id = event.id delattr(location, 'id') - db.session.add(location) - db.session.commit() + save_to_db(location) # No sessions are copied for new tracks for track in tracks: @@ -93,8 +94,7 @@ def create_event_copy(identifier): make_transient(track) track.event_id = event.id delattr(track, 'id') - db.session.add(track) - db.session.commit() + save_to_db(track) for call in speaker_calls: call_id = call.id @@ -102,8 +102,7 @@ def create_event_copy(identifier): make_transient(call) call.event_id = event.id delattr(call, 'id') - db.session.add(call) - db.session.commit() + save_to_db(call) for code in discount_codes: code_id = code.id @@ -111,8 +110,7 @@ def create_event_copy(identifier): make_transient(code) code.event_id = event.id delattr(code, 'id') - db.session.add(code) - db.session.commit() + save_to_db(code) for form in custom_forms: form_id = form.id @@ -120,8 +118,15 @@ def create_event_copy(identifier): make_transient(form) form.event_id = event.id delattr(form, 'id') - db.session.add(form) - db.session.commit() + save_to_db(form) + + for user_role in user_event_roles: + user_role_id = user_role.id + db.session.expunge(user_role) + make_transient(user_role) + user_role.event_id = event.id + delattr(user_role, 'id') + save_to_db(user_role) return jsonify({ 'id': event.id, diff --git a/app/api/events.py b/app/api/events.py index bf3555869f..c4b5b9e62a 100644 --- a/app/api/events.py +++ b/app/api/events.py @@ -8,14 +8,11 @@ from sqlalchemy.orm.exc import NoResultFound import pytz from datetime import datetime -import urllib.error - from app.api.bootstrap import api from app.api.data_layers.EventCopyLayer import EventCopyLayer from app.api.helpers.db import save_to_db, safe_query from app.api.helpers.events import create_custom_forms_for_attendees -from app.api.helpers.exceptions import ForbiddenException, ConflictException, UnprocessableEntity -from app.api.helpers.files import create_save_image_sizes +from app.api.helpers.exceptions import ForbiddenException, ConflictException from app.api.helpers.permission_manager import has_access from app.api.helpers.utilities import dasherize from app.api.schema.events import EventSchemaPublic, EventSchema @@ -54,6 +51,40 @@ from app.models.stripe_authorization import StripeAuthorization +def validate_event(user, modules, data): + if not user.can_create_event(): + raise ForbiddenException({'source': ''}, + "Please verify your Email") + elif data.get('is_ticketing_enabled', True) and not modules.ticket_include: + raise ForbiddenException({'source': '/data/attributes/is-ticketing-enabled'}, + "Ticketing is not enabled in the system") + if data.get('can_pay_by_paypal', False) or data.get('can_pay_by_cheque', False) or \ + data.get('can_pay_by_bank', False) or data.get('can_pay_by_stripe', False): + if not modules.payment_include: + raise ForbiddenException({'source': ''}, + "Payment is not enabled in the system") + if data.get('is_donation_enabled', False) and not modules.donation_include: + raise ForbiddenException({'source': '/data/attributes/is-donation-enabled'}, + "Donation is not enabled in the system") + + if data.get('state', None) == 'published' and not user.can_publish_event(): + raise ForbiddenException({'source': ''}, + "Only verified accounts can publish events") + + if not data.get('is_event_online') and data.get('state', None) == 'published' \ + and not data.get('location_name', None): + raise ConflictException({'pointer': '/data/attributes/location-name'}, + "Location is required to publish the event") + + if data.get('location_name', None) and data.get('is_event_online'): + raise ConflictException({'pointer': '/data/attributes/location-name'}, + "Online Event does not have any locaton") + + if data.get('searchable_location_name') and data.get('is_event_online'): + raise ConflictException({'pointer': '/data/attributes/searchable-location-name'}, + "Online Event does not have any locaton") + + class EventList(ResourceList): def before_get(self, args, kwargs): """ @@ -120,37 +151,7 @@ def before_post(self, args, kwargs, data=None): """ user = User.query.filter_by(id=kwargs['user_id']).first() modules = Module.query.first() - if data.get('is_ticketing_enabled', False) and not modules.ticket_include: - raise ForbiddenException({'source': '/data/attributes/is-ticketing-enabled'}, - "Ticketing is not enabled in the system") - if data.get('can_pay_by_paypal', False) or data.get('can_pay_by_cheque', False) or \ - data.get('can_pay_by_bank', False) or data.get('can_pay_by_stripe', False): - if not modules.payment_include: - raise ForbiddenException({'source': ''}, - "Payment is not enabled in the system") - if data.get('is_donation_enabled', False) and not modules.donation_include: - raise ForbiddenException({'source': '/data/attributes/is-donation-enabled'}, - "Donation is not enabled in the system") - if not user.can_create_event(): - raise ForbiddenException({'source': ''}, - "Only verified accounts can create events") - - if data.get('state', None) == 'published' and not user.can_publish_event(): - raise ForbiddenException({'source': ''}, - "Only verified accounts can publish events") - - if not data.get('is_event_online') and data.get('state', None) == 'published' \ - and not data.get('location_name', None): - raise ConflictException({'pointer': '/data/attributes/location-name'}, - "Location is required to publish the event") - - if data.get('location_name', None) and data.get('is_event_online'): - raise ConflictException({'pointer': '/data/attributes/location-name'}, - "Online Event does not have any locaton") - - if data.get('searchable_location_name') and data.get('is_event_online'): - raise ConflictException({'pointer': '/data/attributes/searchable-location-name'}, - "Online Event does not have any locaton") + validate_event(user, modules, data) def after_create_object(self, event, data, view_kwargs): """ @@ -173,16 +174,9 @@ def after_create_object(self, event, data, view_kwargs): if event.state == 'published' and event.schedule_published_on: start_export_tasks(event) - # TODO: Create an asynchronous celery task for this - # if data.get('original_image_url'): - # try: - # uploaded_images = create_save_image_sizes(data['original_image_url'], 'event-image', event.id) - # except (urllib.error.HTTPError, urllib.error.URLError): - # raise UnprocessableEntity( - # {'source': 'attributes/original-image-url'}, 'Invalid Image URL' - # ) - # self.session.query(Event).filter_by(id=event.id).update(uploaded_images) - # self.session.commit() + + if data.get('original_image_url'): + start_image_resizing_tasks(event, data['original_image_url']) # This permission decorator ensures, you are logged in to create an event # and have filter ?withRole to get events associated with logged in user @@ -462,35 +456,9 @@ def before_patch(self, args, kwargs, data=None): :param data: :return: """ + user = User.query.filter_by(id=current_identity.id).one() modules = Module.query.first() - if data.get('is_ticketing_enabled', False) and not modules.ticket_include: - raise ForbiddenException({'source': '/data/attributes/is-ticketing-enabled'}, - "Ticketing is not enabled in the system") - if data.get('can_pay_by_paypal', False) or data.get('can_pay_by_cheque', False) or \ - data.get('can_pay_by_bank', False) or data.get('can_pay_by_stripe', False): - if not modules.payment_include: - raise ForbiddenException({'source': ''}, - "Payment is not enabled in the system") - if data.get('is_donation_enabled', False) and not modules.donation_include: - raise ForbiddenException({'source': '/data/attributes/is-donation-enabled'}, - "Donation is not enabled in the system") - - if data.get('state', None) == 'published' and not current_identity.can_publish_event(): - raise ForbiddenException({'source': ''}, - "Only verified accounts can publish events") - - if data.get('state', None) == 'published' and not data.get('location_name', None) and \ - not data.get('is_event_online'): - raise ConflictException({'pointer': '/data/attributes/location-name'}, - "Location is required to publish the event") - - if data.get('location_name') and data.get('is_event_online'): - raise ConflictException({'pointer': '/data/attributes/location-name'}, - "Online Event does not have any locaton") - - if data.get('searchable_location_name') and data.get('is_event_online'): - raise ConflictException({'pointer': '/data/attributes/searchable-location-name'}, - "Online Event does not have any locaton") + validate_event(user, modules, data) def before_update_object(self, event, data, view_kwargs): """ @@ -500,18 +468,6 @@ def before_update_object(self, event, data, view_kwargs): :param view_kwargs: :return: """ - # TODO: Create an asynchronous celery task for this - # if data.get('original_image_url') and data['original_image_url'] != event.original_image_url: - # try: - # uploaded_images = create_save_image_sizes(data['original_image_url'], 'event-image', event.id) - # except (urllib.error.HTTPError, urllib.error.URLError): - # raise UnprocessableEntity( - # {'source': 'attributes/original-image-url'}, 'Invalid Image URL' - # ) - # data['original_image_url'] = uploaded_images['original_image_url'] - # data['large_image_url'] = uploaded_images['large_image_url'] - # data['thumbnail_image_url'] = uploaded_images['thumbnail_image_url'] - # data['icon_image_url'] = uploaded_images['icon_image_url'] if has_access('is_admin') and data.get('deleted_at') != event.deleted_at: event.deleted_at = data.get('deleted_at') @@ -521,6 +477,8 @@ def before_update_object(self, event, data, view_kwargs): if data.get('state', None) == 'published' and not data.get('location_name', None): raise ConflictException({'pointer': '/data/attributes/location-name'}, "Location is required to publish the event") + if data.get('original_image_url') and data['original_image_url'] != event.original_image_url: + start_image_resizing_tasks(event, data['original_image_url']) def after_update_object(self, event, data, view_kwargs): if event.state == 'published' and event.schedule_published_on: @@ -605,6 +563,12 @@ def start_export_tasks(event): create_export_job(task_pentabarf.id, event_id) +def start_image_resizing_tasks(event, original_image_url): + event_id = str(event.id) + from .helpers.tasks import resize_event_images_task + resize_event_images_task.delay(event_id, original_image_url) + + def clear_export_urls(event): event.ical_url = None event.xcal_url = None diff --git a/app/api/faqs.py b/app/api/faqs.py index cbbef39ee2..7ccb24ac30 100644 --- a/app/api/faqs.py +++ b/app/api/faqs.py @@ -56,7 +56,6 @@ def query(self, view_kwargs): return query_ view_kwargs = True - decorators = (jwt_required, ) methods = ['GET', ] schema = FaqSchema data_layer = {'session': db.session, diff --git a/app/api/feedbacks.py b/app/api/feedbacks.py index e606c7ef5e..233d1f4d75 100644 --- a/app/api/feedbacks.py +++ b/app/api/feedbacks.py @@ -61,7 +61,6 @@ def query(self, view_kwargs): return query_ view_kwargs = True - decorators = (jwt_required,) methods = ['GET', ] schema = FeedbackSchema data_layer = {'session': db.session, diff --git a/app/api/helpers/csv_jobs_util.py b/app/api/helpers/csv_jobs_util.py index 91530940ff..79cf3b7d42 100644 --- a/app/api/helpers/csv_jobs_util.py +++ b/app/api/helpers/csv_jobs_util.py @@ -47,7 +47,8 @@ def export_attendees_csv(attendees): def export_sessions_csv(sessions): headers = ['Session Title', 'Session Speakers', - 'Session Track', 'Session Abstract', 'Created At', 'Email Sent'] + 'Session Track', 'Session Abstract', 'Created At', 'Email Sent', + 'Level', 'Status', 'Session Type', 'Talk Length'] rows = [headers] for session in sessions: if not session.deleted_at: @@ -64,6 +65,10 @@ def export_sessions_csv(sessions): column.append(strip_tags(session.short_abstract) if session.short_abstract else '') column.append(session.created_at if session.created_at else '') column.append('Yes' if session.is_mail_sent else 'No') + column.append(session.level) + column.append(session.state) + column.append(session.session_type if session.session_type else '') + column.append(len(session.long_abstract)) rows.append(column) return rows diff --git a/app/api/helpers/export_helpers.py b/app/api/helpers/export_helpers.py index 4516f44fba..ed59ce903f 100644 --- a/app/api/helpers/export_helpers.py +++ b/app/api/helpers/export_helpers.py @@ -9,6 +9,7 @@ from flask import current_app as app from flask import request, url_for from flask_jwt import current_identity +from flask_login import current_user from app.api.helpers.db import save_to_db from app.api.helpers.storage import upload, UPLOAD_PATHS, UploadedFile @@ -229,6 +230,13 @@ def export_event_json(event_id, settings): return storage_url +def get_current_user(): + if current_identity: + return current_identity + else: + return current_user + + # HELPERS def create_export_job(task_id, event_id): @@ -237,15 +245,17 @@ def create_export_job(task_id, event_id): """ export_job = ExportJob.query.filter_by(event_id=event_id).first() task_url = url_for('tasks.celery_task', task_id=task_id) + current_logged_user = get_current_user() + if export_job: export_job.task = task_url - export_job.user_email = current_identity.email + export_job.user_email = current_logged_user.email export_job.event = Event.query.get(event_id) export_job.starts_at = datetime.now(pytz.utc) else: export_job = ExportJob( - task=task_url, user_email=current_identity.email, + task=task_url, user_email=current_logged_user.email, event=Event.query.get(event_id) ) save_to_db(export_job, 'ExportJob saved') diff --git a/app/api/helpers/files.py b/app/api/helpers/files.py index b6d7223453..c5013bfb9d 100644 --- a/app/api/helpers/files.py +++ b/app/api/helpers/files.py @@ -135,63 +135,56 @@ def create_save_image_sizes(image_file, image_sizes_type, unique_identifier=None if unique_identifier is None: unique_identifier = get_file_name() - large_aspect = image_sizes.full_aspect if image_sizes.full_aspect else True - large_basewidth = image_sizes.full_width if image_sizes.full_width else 1300 - large_height_size = image_sizes.full_height if image_sizes.full_width else 500 - if image_sizes_type == 'speaker-image': - thumbnail_aspect = image_sizes.thumbnail_aspect if image_sizes.thumbnail_aspect else True + thumbnail_aspect = icon_aspect = small_aspect = True thumbnail_basewidth = thumbnail_height_size = image_sizes.thumbnail_size_width_height - else: - thumbnail_aspect = image_sizes.thumbnail_aspect - thumbnail_basewidth = image_sizes.thumbnail_width - thumbnail_height_size = image_sizes.thumbnail_height - - if image_sizes_type == 'speaker-image': - icon_aspect = image_sizes.icon_aspect if image_sizes.icon_aspect else True icon_basewidth = icon_height_size = image_sizes.icon_size_width_height - else: - icon_aspect = image_sizes.icon_aspect - icon_basewidth = image_sizes.icon_width - icon_height_size = image_sizes.icon_height - - if image_sizes_type == 'event-image': - original_upload_path = UPLOAD_PATHS['event']['original'].format( - identifier=unique_identifier) - large_upload_path = UPLOAD_PATHS['event']['large'].format( - identifier=unique_identifier) - thumbnail_upload_path = UPLOAD_PATHS['event']['thumbnail'].format( - identifier=unique_identifier) - icon_upload_path = UPLOAD_PATHS['event']['icon'].format( - identifier=unique_identifier) - elif image_sizes_type == 'speaker-image': + small_basewidth = small_height_size = image_sizes.small_size_width_height original_upload_path = UPLOAD_PATHS['user']['original'].format( identifier=unique_identifier) - large_upload_path = UPLOAD_PATHS['user']['large'].format( + small_upload_path = UPLOAD_PATHS['user']['small'].format( identifier=unique_identifier) thumbnail_upload_path = UPLOAD_PATHS['user']['thumbnail'].format( identifier=unique_identifier) icon_upload_path = UPLOAD_PATHS['user']['icon'].format( identifier=unique_identifier) + new_images = { + 'original_image_url': create_save_resized_image(image_file, 0, 0, 0, original_upload_path, resize=False), + 'small_image_url': create_save_resized_image(image_file, small_basewidth, small_aspect, small_height_size, + small_upload_path), + 'thumbnail_image_url': create_save_resized_image(image_file, thumbnail_basewidth, thumbnail_aspect, + thumbnail_height_size, thumbnail_upload_path), + 'icon_image_url': create_save_resized_image(image_file, icon_basewidth, icon_aspect, icon_height_size, + icon_upload_path) + } + else: - original_upload_path = UPLOAD_PATHS[image_sizes_type]['original'].format( + large_aspect = image_sizes.full_aspect if image_sizes.full_aspect else False + large_basewidth = image_sizes.full_width if image_sizes.full_width else 1300 + large_height_size = image_sizes.full_height if image_sizes.full_width else 500 + thumbnail_aspect = image_sizes.thumbnail_aspect if image_sizes.full_aspect else False + thumbnail_basewidth = image_sizes.thumbnail_width if image_sizes.thumbnail_width else 500 + thumbnail_height_size = image_sizes.thumbnail_height if image_sizes.thumbnail_height else 200 + icon_aspect = image_sizes.icon_aspect if image_sizes.icon_aspect else False + icon_basewidth = image_sizes.icon_width if image_sizes.icon_width else 75 + icon_height_size = image_sizes.icon_height if image_sizes.icon_height else 30 + original_upload_path = UPLOAD_PATHS['event']['original'].format( identifier=unique_identifier) - large_upload_path = UPLOAD_PATHS[image_sizes_type]['large'].format( + large_upload_path = UPLOAD_PATHS['event']['large'].format( identifier=unique_identifier) - thumbnail_upload_path = UPLOAD_PATHS[image_sizes_type]['thumbnail'].format( + thumbnail_upload_path = UPLOAD_PATHS['event']['thumbnail'].format( identifier=unique_identifier) - icon_upload_path = UPLOAD_PATHS[image_sizes_type]['icon'].format( + icon_upload_path = UPLOAD_PATHS['event']['icon'].format( identifier=unique_identifier) - - new_images = { - 'original_image_url': create_save_resized_image(image_file, 0, 0, 0, original_upload_path, resize=False), - 'large_image_url': create_save_resized_image(image_file, large_basewidth, large_aspect, large_height_size, - large_upload_path), - 'thumbnail_image_url': create_save_resized_image(image_file, thumbnail_basewidth, thumbnail_aspect, - thumbnail_height_size, thumbnail_upload_path), - 'icon_image_url': create_save_resized_image(image_file, icon_basewidth, icon_aspect, icon_height_size, - icon_upload_path) - } + new_images = { + 'original_image_url': create_save_resized_image(image_file, 0, 0, 0, original_upload_path, resize=False), + 'large_image_url': create_save_resized_image(image_file, large_basewidth, large_aspect, large_height_size, + large_upload_path), + 'thumbnail_image_url': create_save_resized_image(image_file, thumbnail_basewidth, thumbnail_aspect, + thumbnail_height_size, thumbnail_upload_path), + 'icon_image_url': create_save_resized_image(image_file, icon_basewidth, icon_aspect, icon_height_size, + icon_upload_path) + } return new_images @@ -264,7 +257,7 @@ def make_frontend_url(path, parameters=None): )) -def create_save_pdf(pdf_data, key, dir_path='/static/uploads/pdf/temp/'): +def create_save_pdf(pdf_data, key, dir_path='/static/uploads/pdf/temp/', identifier=get_file_name(), upload_dir='static/media/'): """ Create and Saves PDFs from html :param pdf_data: @@ -275,7 +268,7 @@ def create_save_pdf(pdf_data, key, dir_path='/static/uploads/pdf/temp/'): if not os.path.isdir(filedir): os.makedirs(filedir) - filename = get_file_name() + '.pdf' + filename = identifier + '.pdf' dest = filedir + filename file = open(dest, "wb") @@ -283,8 +276,8 @@ def create_save_pdf(pdf_data, key, dir_path='/static/uploads/pdf/temp/'): file.close() uploaded_file = UploadedFile(dest, filename) - upload_path = key.format(identifier=get_file_name()) - new_file = upload(uploaded_file, upload_path) + upload_path = key.format(identifier=identifier) + new_file = upload(uploaded_file, upload_path, upload_dir=upload_dir) # Removing old file created os.remove(dest) diff --git a/app/api/helpers/import_helpers.py b/app/api/helpers/import_helpers.py index c966d478a3..d70f660915 100644 --- a/app/api/helpers/import_helpers.py +++ b/app/api/helpers/import_helpers.py @@ -19,6 +19,8 @@ from app.models import db from app.models.custom_form import CustomForms from app.models.event import Event +from app.models.users_events_role import UsersEventsRoles +from app.models.role import Role from app.models.import_job import ImportJob from app.models.microlocation import Microlocation from app.models.session import Session @@ -27,6 +29,7 @@ from app.models.speaker import Speaker from app.models.sponsor import Sponsor from app.models.track import Track +from app.models.user import User, ORGANIZER IMPORT_SERIES = [ ('social_links', SocialLink), @@ -320,8 +323,7 @@ def create_service_from_json(task_handle, data, srv, event_id, service_ids=None) obj['event_id'] = event_id # create object new_obj = srv[1](**obj) - db.session.add(new_obj) - db.session.commit() + save_to_db(new_obj) ids[old_id] = new_obj.id # add uploads to queue _upload_media_queue(srv, new_obj) @@ -329,7 +331,7 @@ def create_service_from_json(task_handle, data, srv, event_id, service_ids=None) return ids -def import_event_json(task_handle, zip_path): +def import_event_json(task_handle, zip_path, creator_id): """ Imports and creates event from json zip """ @@ -353,11 +355,14 @@ def import_event_json(task_handle, zip_path): srv = ('event', Event) data = _delete_fields(srv, data) new_event = Event(**data) - db.session.add(new_event) - db.session.commit() + save_to_db(new_event) + role = Role.query.filter_by(name=ORGANIZER).first() + user = User.query.filter_by(id=creator_id).first() + uer = UsersEventsRoles(user_id=user.id, event_id=new_event.id, role_id=role.id) + save_to_db(uer, 'Event Saved') write_file( path + '/social_links', - json.dumps(data.get('social_links', [])) + json.dumps(data.get('social_links', [])).encode('utf-8') ) # save social_links _upload_media_queue(srv, new_event) except Exception as e: diff --git a/app/api/helpers/jwt.py b/app/api/helpers/jwt.py index ed3338a68a..e70bba1049 100644 --- a/app/api/helpers/jwt.py +++ b/app/api/helpers/jwt.py @@ -14,14 +14,10 @@ def jwt_authenticate(email, password): :param password: :return: """ - user = User.query.filter_by(email=email).first() + user = User.query.filter_by(email=email.strip(), deleted_at=None).first() if user is None: return None - auth_ok = user.facebook_login_hash == password or check_password_hash( - password.encode('utf-8'), - user.password.encode('utf-8'), - user.salt - ) + auth_ok = user.facebook_login_hash == password or user.is_correct_password(password) if auth_ok: return user else: diff --git a/app/api/helpers/order.py b/app/api/helpers/order.py index c15c443467..66847b6d68 100644 --- a/app/api/helpers/order.py +++ b/app/api/helpers/order.py @@ -47,13 +47,14 @@ def set_expiry_for_order(order, override=False): def create_pdf_tickets_for_holder(order): """ - Create tickets for the holders of an order. + Create tickets and invoices for the holders of an order. :param order: The order for which to create tickets for. """ if order.status == 'completed': pdf = create_save_pdf(render_template('pdf/ticket_purchaser.html', order=order), UPLOAD_PATHS['pdf']['ticket_attendee'], - dir_path='/static/uploads/pdf/tickets/') + dir_path='/static/uploads/pdf/tickets/', identifier=order.identifier, upload_dir='generated/tickets/') + order.tickets_pdf_url = pdf for holder in order.ticket_holders: @@ -61,13 +62,17 @@ def create_pdf_tickets_for_holder(order): # holder is not the order buyer. pdf = create_save_pdf(render_template('pdf/ticket_attendee.html', order=order, holder=holder), UPLOAD_PATHS['pdf']['ticket_attendee'], - dir_path='/static/uploads/pdf/tickets/') + dir_path='/static/uploads/pdf/tickets/', identifier=order.identifier, upload_dir='generated/tickets/') else: # holder is the order buyer. pdf = order.tickets_pdf_url holder.pdf_url = pdf save_to_db(holder) + # create order invoices pdf + create_save_pdf(render_template('pdf/order_invoice.html', order=order, event=order.event), + UPLOAD_PATHS['pdf']['order'], dir_path='/static/uploads/pdf/tickets/', + identifier=order.identifier, upload_dir='generated/invoices/') save_to_db(order) diff --git a/app/api/helpers/payment.py b/app/api/helpers/payment.py index 1ce4e2d8c6..3780f1fafc 100644 --- a/app/api/helpers/payment.py +++ b/app/api/helpers/payment.py @@ -136,20 +136,24 @@ def configure_paypal(): Configure the paypal sdk :return: Credentials """ - # Use Sandbox by default. settings = get_settings() - paypal_mode = 'sandbox' - paypal_client = settings.get('paypal_sandbox_client', None) - paypal_secret = settings.get('paypal_sandbox_secret', None) + # Use Sandbox by default. + paypal_mode = settings.get('paypal_mode', + 'live' if (settings['app_environment'] == Environment.PRODUCTION) else 'sandbox') + paypal_key = None + if paypal_mode == 'sandbox': + paypal_key = 'paypal_sandbox' + elif paypal_mode == 'live': + paypal_key = 'paypal' + + if not paypal_key: + raise ConflictException({'pointer': ''}, "Paypal Mode must be 'live' or 'sandbox'") - # Switch to production if paypal_mode is production. - if settings['paypal_mode'] == Environment.PRODUCTION: - paypal_mode = 'live' - paypal_client = settings.get('paypal_client', None) - paypal_secret = settings.get('paypal_secret', None) + paypal_client = settings.get('{}_client'.format(paypal_key), None) + paypal_secret = settings.get('{}_secret'.format(paypal_key), None) if not paypal_client or not paypal_secret: - raise ConflictException({'pointer': ''}, "Payments through Paypal hasn't been configured on the platform") + raise ConflictException({'pointer': ''}, "Payments through Paypal have not been configured on the platform") paypalrestsdk.configure({ "mode": paypal_mode, diff --git a/app/api/helpers/permission_manager.py b/app/api/helpers/permission_manager.py index 8f0f12b4e8..cd32b4646c 100644 --- a/app/api/helpers/permission_manager.py +++ b/app/api/helpers/permission_manager.py @@ -8,6 +8,7 @@ from app.models.session import Session from app.models.event import Event from app.models.order import Order +from app.models.speaker import Speaker from app.api.helpers.jwt import get_identity @@ -158,9 +159,34 @@ def is_speaker_for_session(view, view_args, view_kwargs, *args, **kwargs): if speaker.user_id == user.id: return view(*view_args, **view_kwargs) + if session.creator_id == user.id: + return view(*view_args, **view_kwargs) + return ForbiddenError({'source': ''}, 'Access denied.').respond() +@jwt_required +def is_speaker_itself_or_admin(view, view_args, view_kwargs, *args, **kwargs): + """ + Allows admin and super admin access to any resource irrespective of id. + Otherwise the user can only access his/her resource. + """ + user = current_identity + + if user.is_admin or user.is_super_admin: + return view(*view_args, **view_kwargs) + + if user.is_organizer(kwargs['event_id']) or user.is_coorganizer(kwargs['event_id']): + return view(*view_args, **view_kwargs) + + if ('model' in kwargs) and (kwargs['model'] == Speaker): + query_user = Speaker.query.filter_by(email=user._email).first() + if query_user: + return view(*view_args, **view_kwargs) + + return ForbiddenError({'source': ''}, 'Detail ownership is required, access denied.').respond() + + @jwt_required def is_session_self_submitted(view, view_args, view_kwargs, *args, **kwargs): """ @@ -304,7 +330,8 @@ def create_event(view, view_args, view_kwargs, *args, **kwargs): 'is_user_itself': is_user_itself, 'is_coorganizer_endpoint_related_to_event': is_coorganizer_endpoint_related_to_event, 'is_registrar_or_user_itself': is_registrar_or_user_itself, - 'is_coorganizer_but_not_admin': is_coorganizer_but_not_admin + 'is_coorganizer_but_not_admin': is_coorganizer_but_not_admin, + 'is_speaker_itself_or_admin': is_speaker_itself_or_admin } diff --git a/app/api/helpers/scheduled_jobs.py b/app/api/helpers/scheduled_jobs.py index 12aa837f62..4ed9e0ebc0 100644 --- a/app/api/helpers/scheduled_jobs.py +++ b/app/api/helpers/scheduled_jobs.py @@ -12,6 +12,7 @@ from app.api.helpers.utilities import monthdelta from app.models import db from app.models.event import Event +from app.models.session import Session from app.models.event_invoice import EventInvoice from app.models.order import Order from app.models.ticket import Ticket @@ -46,6 +47,16 @@ def send_after_event_mail(): send_notif_after_event(organizer.user.email, event.name) +def change_session_state_on_event_completion(): + from app import current_app as app + with app.app_context(): + sessions_to_be_changed = Session.query.join(Event).filter(Session.state == 'pending')\ + .filter(Event.ends_at < datetime.datetime.now()) + for session in sessions_to_be_changed: + session.state = 'rejected' + save_to_db(session, 'Changed {} session state to rejected'.format(session.title)) + + def send_event_fee_notification(): from app import current_app as app with app.app_context(): @@ -129,3 +140,12 @@ def send_event_fee_notification_followup(): app_name, link, incomplete_invoice.event.id) + + +def expire_pending_tickets_after_one_day(): + from app import current_app as app + with app.app_context(): + db.session.query(Order).filter(Order.status == 'pending', + (datetime.datetime.today() - Order.created_at).days > 1).\ + update({'status': 'expired'}) + db.session.commit() diff --git a/app/api/helpers/storage.py b/app/api/helpers/storage.py index e3ec51df21..c935bd963d 100644 --- a/app/api/helpers/storage.py +++ b/app/api/helpers/storage.py @@ -46,7 +46,7 @@ 'avatar': 'users/{user_id}/avatar', 'thumbnail': 'users/{identifier}/thumbnail', 'original': 'users/{identifier}/original', - 'large': 'users/{identifier}/large', + 'small': 'users/{identifier}/small', 'icon': 'users/{identifier}/icon' }, 'temp': { @@ -79,7 +79,8 @@ 'system_image': 'event_topic/{event_topic_id}/system_image' }, 'pdf': { - 'ticket_attendee': 'attendees/tickets/pdf/{identifier}' + 'ticket_attendee': 'attendees/tickets/pdf/{identifier}', + 'order': 'orders/invoices/pdf/{identifier}' } } @@ -130,7 +131,7 @@ def save(self, path): # MAIN ######### -def upload(uploaded_file, key, **kwargs): +def upload(uploaded_file, key, upload_dir='static/media/', **kwargs): """ Upload handler """ @@ -152,15 +153,15 @@ def upload(uploaded_file, key, **kwargs): elif gs_bucket_name and gs_key and gs_secret and storage_place == 'gs': return upload_to_gs(gs_bucket_name, gs_key, gs_secret, uploaded_file, key, **kwargs) else: - return upload_local(uploaded_file, key, **kwargs) + return upload_local(uploaded_file, key, upload_dir, **kwargs) -def upload_local(uploaded_file, key, **kwargs): +def upload_local(uploaded_file, key, upload_dir='static/media/', **kwargs): """ Uploads file locally. Base dir - static/media/ """ filename = secure_filename(uploaded_file.filename) - file_relative_path = 'static/media/' + key + '/' + generate_hash(key) + '/' + filename + file_relative_path = upload_dir + key + '/' + generate_hash(key) + '/' + filename file_path = app.config['BASE_DIR'] + '/' + file_relative_path dir_path = file_path.rsplit('/', 1)[0] # delete current diff --git a/app/api/helpers/system_mails.py b/app/api/helpers/system_mails.py index 2a56fa28ba..3ea3b154c5 100644 --- a/app/api/helpers/system_mails.py +++ b/app/api/helpers/system_mails.py @@ -7,7 +7,7 @@ SESSION_SCHEDULE, NEXT_EVENT, EVENT_PUBLISH, AFTER_EVENT, USER_CHANGE_EMAIL, USER_REGISTER_WITH_PASSWORD, \ TICKET_PURCHASED, EVENT_EXPORTED, EVENT_EXPORT_FAIL, MAIL_TO_EXPIRED_ORDERS, MONTHLY_PAYMENT_EMAIL, \ MONTHLY_PAYMENT_FOLLOWUP_EMAIL, EVENT_IMPORTED, EVENT_IMPORT_FAIL, TICKET_PURCHASED_ORGANIZER, TICKET_CANCELLED, \ - TICKET_PURCHASED_ATTENDEE, PASSWORD_CHANGE + TICKET_PURCHASED_ATTENDEE, PASSWORD_CHANGE, PASSWORD_RESET_AND_VERIFY MAILS = { EVENT_PUBLISH: { @@ -114,6 +114,15 @@ u"Please use the following link to reset your password.
{link}" ) }, + PASSWORD_RESET_AND_VERIFY: { + 'recipient': 'User', + 'subject': u'{app_name}: Reset your password and verify your account', + 'message': ( + u"Please use the following link to reset your password and verify your account." + + "
{link}" + ) + + }, PASSWORD_CHANGE: { 'recipient': 'User', 'subject': u'{app_name}: Password Change', diff --git a/app/api/helpers/system_notifications.py b/app/api/helpers/system_notifications.py index 8a064099c8..c1647524d6 100644 --- a/app/api/helpers/system_notifications.py +++ b/app/api/helpers/system_notifications.py @@ -413,7 +413,7 @@ def get_invite_papers_notification_actions(cfs_link, submit_link): SESSION_ACCEPT_REJECT: { 'title': u'Session {session_name} has been {acceptance}', 'message': u"The session {session_name} has been" + - u"{acceptance} by the Organizer.", + u" {acceptance} by the Organizer.", 'recipient': 'Speaker', }, INVITE_PAPERS: { diff --git a/app/api/helpers/tasks.py b/app/api/helpers/tasks.py index 147b1dcf5b..3ddcf9b703 100644 --- a/app/api/helpers/tasks.py +++ b/app/api/helpers/tasks.py @@ -19,6 +19,7 @@ import logging import traceback +from app.api.helpers.files import create_save_image_sizes, create_save_resized_image from app.api.helpers.request_context_task import RequestContextTask from app.api.helpers.mail import send_export_mail, send_import_mail from app.api.helpers.notification import send_notif_after_import, send_notif_after_export @@ -30,6 +31,7 @@ from app.api.imports import import_event_task_base from app.models.event import Event from app.models.order import Order +from app.models.sponsor import Sponsor from app.models.discount_code import DiscountCode from app.models.ticket_holder import TicketHolder from app.api.helpers.ICalExporter import ICalExporter @@ -38,6 +40,7 @@ from app.api.helpers.storage import UploadedFile, upload, UPLOAD_PATHS from app.api.helpers.db import save_to_db from app.api.helpers.files import create_save_pdf +import urllib.error celery = make_celery() @@ -49,12 +52,17 @@ def send_email_task(payload, headers): data["from"] = {"email": payload["from"]} data["subject"] = payload["subject"] data["content"] = [{"type": "text/html", "value": payload["html"]}] - requests.post( - "https://api.sendgrid.com/v3/mail/send", - data=json.dumps(data), - headers=headers, - verify=False # doesn't work with verification in celery context - ) + logging.info('Sending an email regarding {} on behalf of {}'.format(data["subject"], data["from"])) + try: + requests.post( + "https://api.sendgrid.com/v3/mail/send", + data=json.dumps(data), + headers=headers, + verify=False # doesn't work with verification in celery context + ) + logging.info('Email sent successfully') + except Exception: + logging.exception('Error occured while sending the email') @celery.task(name='send.email.post.smtp') @@ -77,9 +85,71 @@ def send_mail_via_smtp_task(config, payload): message.plain = strip_tags(payload['html']) message.rich = payload['html'] mailer.send(message) + logging.info('Message sent via SMTP') mailer.stop() +@celery.task(base=RequestContextTask, name='resize.event.images', bind=True) +def resize_event_images_task(self, event_id, original_image_url): + event = safe_query(db, Event, 'id', event_id, 'event_id') + try: + logging.info('Event image resizing tasks started {}'.format(original_image_url)) + uploaded_images = create_save_image_sizes(original_image_url, 'event-image', event.id) + event.large_image_url = uploaded_images['large_image_url'] + event.thumbnail_image_url = uploaded_images['thumbnail_image_url'] + event.icon_image_url = uploaded_images['icon_image_url'] + save_to_db(event) + logging.info('Resized images saved successfully for event with id: {}'.format(event_id)) + except (urllib.error.HTTPError, urllib.error.URLError): + logging.exception('Error encountered while generating resized images for event with id: {}'.format(event_id)) + + +@celery.task(base=RequestContextTask, name='resize.user.images', bind=True) +def resize_user_images_task(self, user_id, original_image_url): + user = safe_query(db, User, 'id', user_id, 'user_id') + try: + logging.info('User image resizing tasks started {}'.format(original_image_url)) + uploaded_images = create_save_image_sizes(original_image_url, 'speaker-image', user.id) + user.original_image_url = uploaded_images['original_image_url'] + user.avatar_url = uploaded_images['original_image_url'] + user.small_image_url = uploaded_images['thumbnail_image_url'] + user.thumbnail_image_url = uploaded_images['thumbnail_image_url'] + user.icon_image_url = uploaded_images['icon_image_url'] + save_to_db(user) + logging.info('Resized images saved successfully for user with id: {}'.format(user_id)) + except (urllib.error.HTTPError, urllib.error.URLError): + logging.exception('Error encountered while generating resized images for user with id: {}'.format(user_id)) + + +@celery.task(base=RequestContextTask, name='sponsor.logo.urls', bind=True) +def sponsor_logos_url_task(self, event_id): + sponsors = Sponsor.query.filter_by(event_id=event_id, deleted_at=None).all() + for sponsor in sponsors: + try: + logging.info('Sponsor logo url generation task started {}'.format(sponsor.logo_url)) + new_logo_url = create_save_resized_image(image_file=sponsor.logo_url, resize=False) + sponsor.logo_url = new_logo_url + save_to_db(sponsor) + logging.info('Sponsor logo url successfully generated') + except(urllib.error.HTTPError, urllib.error.URLError): + logging.exception('Error encountered while logo generation') + + +@celery.task(base=RequestContextTask, name='resize.speaker.images', bind=True) +def resize_speaker_images_task(self, speaker_id, photo_url): + speaker = safe_query(db, Speaker, 'id', speaker_id, 'speaker_id') + try: + logging.info('Speaker image resizing tasks started for speaker with id {}'.format(speaker_id)) + uploaded_images = create_save_image_sizes(photo_url, 'speaker-image', speaker_id) + speaker.small_image_url = uploaded_images['small_image_url'] + speaker.thumbnail_image_url = uploaded_images['thumbnail_image_url'] + speaker.icon_image_url = uploaded_images['icon_image_url'] + save_to_db(speaker) + logging.info('Resized images saved successfully for speaker with id: {}'.format(speaker_id)) + except (urllib.error.HTTPError, urllib.error.URLError): + logging.exception('Error encountered while generating resized images for event with id: {}'.format(speaker_id)) + + @celery.task(base=RequestContextTask, name='export.event', bind=True) def export_event_task(self, email, event_id, settings): event = safe_query(db, Event, 'id', event_id, 'event_id') @@ -97,9 +167,8 @@ def export_event_task(self, email, event_id, settings): send_export_mail(email=email, event_name=event.name, download_url=download_url) send_notif_after_export(user=user, event_name=event.name, download_url=download_url) except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} - logging.info('Error in exporting.. sending email') + logging.warning('Error in exporting.. sending email') send_export_mail(email=email, event_name=event.name, error_text=str(e)) send_notif_after_export(user=user, event_name=event.name, error_text=str(e)) @@ -120,8 +189,8 @@ def import_event_task(self, email, file, source_type, creator_id): send_notif_after_import(user=user, event_name=result[ 'event_name'], event_url=result['url']) except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.warning('Error in importing the event') update_import_job(task_id, str(e), e.status if hasattr(e, 'status') else 'FAILURE') send_import_mail(email=email, error_text=str(e)) send_notif_after_import(user=user, error_text=str(e)) @@ -158,8 +227,8 @@ def export_ical_task(self, event_id, temp=True): save_to_db(event) except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in ical download') return result @@ -193,8 +262,8 @@ def export_xcal_task(self, event_id, temp=True): save_to_db(event) except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in xcal download') return result @@ -228,8 +297,8 @@ def export_pentabarf_task(self, event_id, temp=True): save_to_db(event) except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in pentabarf download') return result @@ -258,8 +327,8 @@ def export_order_csv_task(self, event_id): 'download_url': order_csv_url } except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in exporting as CSV') return result @@ -278,8 +347,8 @@ def export_order_pdf_task(self, event_id): 'download_url': order_pdf_url } except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in exporting order as pdf') return result @@ -307,8 +376,9 @@ def export_attendees_csv_task(self, event_id): 'download_url': attendees_csv_url } except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in exporting attendees list as CSV') + return result @@ -324,8 +394,10 @@ def export_attendees_pdf_task(self, event_id): 'download_url': attendees_pdf_url } except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in exporting attendees list as PDF') + + return result @@ -353,8 +425,8 @@ def export_sessions_csv_task(self, event_id): 'download_url': sessions_csv_url } except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in exporting sessions as CSV') return result @@ -382,8 +454,8 @@ def export_speakers_csv_task(self, event_id): 'download_url': speakers_csv_url } except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in exporting speakers list as CSV') return result @@ -399,8 +471,8 @@ def export_sessions_pdf_task(self, event_id): 'download_url': sessions_pdf_url } except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in exporting sessions as PDF') return result @@ -416,7 +488,15 @@ def export_speakers_pdf_task(self, event_id): 'download_url': speakers_pdf_url } except Exception as e: - print(traceback.format_exc()) result = {'__error': True, 'result': str(e)} + logging.error('Error in exporting speakers as PDF') return result + + +@celery.task(base=RequestContextTask, name='delete.translations', bind=True) +def delete_translations(self, zip_file_path): + try: + os.remove(zip_file_path) + except: + logging.exception('Error while deleting translations zip file') diff --git a/app/api/helpers/third_party_auth.py b/app/api/helpers/third_party_auth.py index 0e349cf55b..7fae0ff095 100644 --- a/app/api/helpers/third_party_auth.py +++ b/app/api/helpers/third_party_auth.py @@ -4,6 +4,7 @@ from flask import request from app.settings import get_settings +from app.api.helpers.files import make_frontend_url class GoogleOAuth(object): @@ -59,9 +60,9 @@ def get_client_secret(cls): @classmethod def get_redirect_uri(cls): - url = urllib.parse.urlparse(request.url) - fb_redirect_uri = url.scheme + '://' + url.netloc + '/fCallback' - return fb_redirect_uri + url = make_frontend_url( + '/oauth/callback?provider=facebook') + return url @classmethod def get_auth_uri(cls): diff --git a/app/api/helpers/utilities.py b/app/api/helpers/utilities.py index f2ec42a4ab..6d4ad07b41 100644 --- a/app/api/helpers/utilities.py +++ b/app/api/helpers/utilities.py @@ -112,6 +112,8 @@ def update_state(task_handle, state, result=None): state=state, meta=result ) +static_page = 'https://eventyay.com/' +image_link = 'https://www.gstatic.com/webp/gallery/1.jpg' # store task results in case of testing # state and info diff --git a/app/api/import_jobs.py b/app/api/import_jobs.py new file mode 100644 index 0000000000..cb7465f214 --- /dev/null +++ b/app/api/import_jobs.py @@ -0,0 +1,35 @@ +from flask_rest_jsonapi import ResourceList, ResourceDetail + +from app.api.schema.import_jobs import ImportJobSchema +from app.models import db +from app.models.import_job import ImportJob +from app.api.helpers.permissions import jwt_required +from flask_jwt import current_identity + + +class ImportJobList(ResourceList): + """ + List ImportJob + """ + def query(self, kwargs): + query_ = self.session.query(ImportJob) + query_ = query_.filter_by(user_id=current_identity.id) + return query_ + + decorators = (jwt_required,) + schema = ImportJobSchema + data_layer = {'session': db.session, + 'model': ImportJob, + 'methods': { + 'query': query, + }} + + +class ImportJobDetail(ResourceDetail): + """ + ImportJob Detail by id + """ + decorators = (jwt_required, ) + schema = ImportJobSchema + data_layer = {'session': db.session, + 'model': ImportJob} diff --git a/app/api/imports.py b/app/api/imports.py index a19be37423..d45f9d34c2 100644 --- a/app/api/imports.py +++ b/app/api/imports.py @@ -36,7 +36,7 @@ def import_event(source_type): def import_event_task_base(task_handle, file_path, source_type='json', creator_id=None): new_event = None if source_type == 'json': - new_event = import_event_json(task_handle, file_path) + new_event = import_event_json(task_handle, file_path, creator_id) if new_event: url = make_frontend_url(path='/events/{identifier}'.format(identifier=new_event.identifier)) return {'url': url, diff --git a/app/api/modules.py b/app/api/modules.py index 3c585f99da..ba81b35091 100644 --- a/app/api/modules.py +++ b/app/api/modules.py @@ -6,7 +6,6 @@ from app.models.module import Module from app.api.helpers.exceptions import ConflictException - class ModuleDetail(ResourceDetail): """ module detail by id @@ -29,9 +28,6 @@ def before_patch(self, args, kwargs, data=None): :param data: :return: """ - if data.get('donation_include'): - raise ConflictException({'pointer': '/data/attributes/donation-include'}, - "No donation support in the system") decorators = (api.has_permission('is_admin', methods='PATCH', id='1'),) methods = ['GET', 'PATCH'] diff --git a/app/api/orders.py b/app/api/orders.py index 509e213d38..9e2c6f1005 100644 --- a/app/api/orders.py +++ b/app/api/orders.py @@ -50,10 +50,6 @@ def before_post(self, args, kwargs, data=None): """ require_relationship(['event'], data) - if not current_user.is_verified: - raise ForbiddenException({'source': ''}, - "Only verified accounts can place orders") - # Create on site attendees. if request.args.get('onsite', False): create_onsite_attendees_for_order(data) @@ -91,6 +87,8 @@ def before_create_object(self, data, view_kwargs): raise ConflictException({'pointer': '/data/attributes/amount'}, "Amount cannot be null for a paid order") + if not data.get('amount'): + data['amount'] = 0 # Apply discount only if the user is not event admin if data.get('discount') and not has_access('is_coorganizer', event_id=data['event']): discount_code = safe_query_without_soft_deleted_entries(self, DiscountCode, 'id', data['discount'], @@ -136,8 +134,8 @@ def after_create_object(self, order, data, view_kwargs): order.quantity = order.tickets_count save_to_db(order) - if not has_access('is_coorganizer', event_id=data['event']): - TicketingManager.calculate_update_amount(order) +# if not has_access('is_coorganizer', event_id=data['event']): +# TicketingManager.calculate_update_amount(order) # send e-mail and notifications if the order status is completed if order.status == 'completed': @@ -279,7 +277,11 @@ def before_update_object(self, order, data, view_kwargs): "You cannot update a non-pending order") else: for element in data: - if data[element] and data[element]\ + if element == 'is_billing_enabled' and order.status == 'completed' and data[element]\ + and data[element] != getattr(order, element, None): + raise ForbiddenException({'pointer': 'data/{}'.format(element)}, + "You cannot update {} of a completed order".format(element)) + elif data[element] and data[element]\ != getattr(order, element, None) and element not in get_updatable_fields(): raise ForbiddenException({'pointer': 'data/{}'.format(element)}, "You cannot update {} of an order".format(element)) @@ -305,6 +307,15 @@ def after_update_object(self, order, data, view_kwargs): # delete the attendees so that the tickets are unlocked. delete_related_attendees_for_order(order) + elif order.status == 'completed': + send_email_to_attendees(order, current_user.id) + send_notif_to_attendees(order, current_user.id) + + order_url = make_frontend_url(path='/orders/{identifier}'.format(identifier=order.identifier)) + for organizer in order.event.organizers: + send_notif_ticket_purchase_organizer(organizer, order.invoice_number, order_url, order.event.name, + order.identifier) + def before_delete_object(self, order, view_kwargs): """ method to check for proper permissions for deleting @@ -319,8 +330,7 @@ def before_delete_object(self, order, view_kwargs): # This is to ensure that the permissions manager runs and hence changes the kwarg from order identifier to id. decorators = (jwt_required, api.has_permission( - 'auth_required', methods="PATCH,DELETE", fetch="user_id", model=Order),) - + 'auth_required', methods="PATCH,DELETE", model=Order),) schema = OrderSchema data_layer = {'session': db.session, 'model': Order, diff --git a/app/api/role_invites.py b/app/api/role_invites.py index d4df950ae1..567777aded 100644 --- a/app/api/role_invites.py +++ b/app/api/role_invites.py @@ -17,6 +17,12 @@ from app.models.user import User from app.models.users_events_role import UsersEventsRoles from app.settings import get_settings +from flask import jsonify, request, Blueprint +from app.api.helpers.errors import NotFoundError +from sqlalchemy.orm.exc import NoResultFound + + +role_invites_misc_routes = Blueprint('role_invites_misc', __name__, url_prefix='/v1') class RoleInviteListPost(ResourceList): @@ -45,19 +51,10 @@ def after_create_object(self, role_invite, data, view_kwargs): :return: """ user = User.query.filter_by(email=role_invite.email).first() - if 'status' in data and data['status'] == 'accepted': - role = Role.query.filter_by(name=role_invite.role_name).first() - event = Event.query.filter_by(id=role_invite.event_id).first() - uer = UsersEventsRoles.query.filter_by(user=user).filter_by( - event=event).filter_by(role=role).first() - if not uer: - uer = UsersEventsRoles(user, event, role) - save_to_db(uer, 'Role Invite accepted') - event = Event.query.filter_by(id=role_invite.event_id).first() frontend_url = get_settings()['frontend_url'] - link = "{}/events/{}/role-invites/{}" \ - .format(frontend_url, event.id, role_invite.hash) + link = "{}/e/{}/role-invites?token={}" \ + .format(frontend_url, event.identifier, role_invite.hash) send_email_role_invite(role_invite.email, role_invite.role_name, event.name, link) if user: @@ -90,7 +87,7 @@ def query(self, view_kwargs): view_kwargs = True methods = ['GET'] - decorators = (api.has_permission('is_organizer', fetch='event_id', fetch_as="event_id"),) + decorators = (api.has_permission('is_coorganizer', fetch='event_id', fetch_as="event_id"),) schema = RoleInviteSchema data_layer = {'session': db.session, 'model': RoleInvite, @@ -122,23 +119,12 @@ def before_update_object(self, role_invite, data, view_kwargs): 'status' not in data): raise UnprocessableEntity({'source': ''}, "You can only change your status") - def after_update_object(self, role_invite, data, view_kwargs): - user = User.query.filter_by(email=role_invite.email).first() - if 'status' in data and data['status'] == 'accepted': - role = Role.query.filter_by(name=role_invite.role_name).first() - event = Event.query.filter_by(id=role_invite.event_id).first() - uer = UsersEventsRoles.query.filter_by(user=user).filter_by( - event=event).filter_by(role=role).first() - if not uer: - uer = UsersEventsRoles(user, event, role) - save_to_db(uer, 'Role Invite accepted') decorators = (api.has_permission('is_organizer', methods="DELETE", fetch="event_id", fetch_as="event_id", model=RoleInvite),) schema = RoleInviteSchema data_layer = {'session': db.session, 'model': RoleInvite, - 'methods': {'before_update_object': before_update_object, - 'after_update_object': after_update_object}} + 'methods': {'before_update_object': before_update_object}} class RoleInviteRelationship(ResourceRelationship): @@ -149,3 +135,51 @@ class RoleInviteRelationship(ResourceRelationship): schema = RoleInviteSchema data_layer = {'session': db.session, 'model': RoleInvite} + + +@role_invites_misc_routes.route('/role_invites/accept-invite', methods=['POST']) +def accept_invite(): + token = request.json['data']['token'] + try: + role_invite = RoleInvite.query.filter_by(hash=token).one() + except NoResultFound: + return NotFoundError({'source': ''}, 'Role Invite Not Found').respond() + else: + try: + user = User.query.filter_by(email=role_invite.email).first() + except NoResultFound: + return NotFoundError({'source': ''}, 'User corresponding to role invite not Found').respond() + try: + role = Role.query.filter_by(name=role_invite.role_name).first() + except NoResultFound: + return NotFoundError({'source': ''}, 'Role corresponding to role invite not Found').respond() + event = Event.query.filter_by(id=role_invite.event_id).first() + uer = UsersEventsRoles.query.filter_by(user=user).filter_by( + event=event).filter_by(role=role).first() + if not uer: + role_invite.status = "accepted" + save_to_db(role_invite, 'Role Invite Accepted') + uer = UsersEventsRoles(user, event, role) + save_to_db(uer, 'User Event Role Created') + if not user.is_verified: + user.is_verified = True + save_to_db(user, 'User verified') + + return jsonify({ + "email": user.email, + "event": role_invite.event_id, + "name": user.fullname if user.fullname else None + }) + + +@role_invites_misc_routes.route('/role_invites/user', methods=['POST']) +def fetch_user(): + token = request.json['data']['token'] + try: + role_invite = RoleInvite.query.filter_by(hash=token).one() + except NoResultFound: + return NotFoundError({'source': ''}, 'Role Invite Not Found').respond() + else: + return jsonify({ + "email": role_invite.email + }) diff --git a/app/api/schema/admin_statistics_schema/events.py b/app/api/schema/admin_statistics_schema/events.py index 7794db3e40..788c139a11 100644 --- a/app/api/schema/admin_statistics_schema/events.py +++ b/app/api/schema/admin_statistics_schema/events.py @@ -26,11 +26,11 @@ class Meta: def events_draft_count(self, obj): events = Event.query.filter(Event.ends_at > datetime.now(pytz.utc)) - return get_count(events.filter_by(state='draft')) + return get_count(events.filter_by(state='draft', deleted_at=None)) def events_published_count(self, obj): events = Event.query.filter(Event.ends_at > datetime.now(pytz.utc)) - return get_count(events.filter_by(state='published')) + return get_count(events.filter_by(state='published', deleted_at=None)) def events_past_count(self, obj): return get_count(Event.query.filter(Event.ends_at < datetime.now(pytz.utc))) diff --git a/app/api/schema/event_statistics.py b/app/api/schema/event_statistics.py index f7c452dad3..bddd1beeb5 100644 --- a/app/api/schema/event_statistics.py +++ b/app/api/schema/event_statistics.py @@ -5,6 +5,7 @@ from app.models.session import Session from app.models.speaker import Speaker from app.models.sponsor import Sponsor +from app.models.session_speaker_link import SessionsSpeakersLink class EventStatisticsGeneralSchema(Schema): @@ -33,28 +34,43 @@ class Meta: sponsors = fields.Method("sponsors_count") def sessions_draft_count(self, obj): - return Session.query.filter_by(event_id=obj.id, state='draft').count() + return Session.query.filter_by(event_id=obj.id, state='draft', deleted_at=None).count() def sessions_submitted_count(self, obj): - return Session.query.filter_by(event_id=obj.id, state='submitted').count() + return Session.query.filter_by(event_id=obj.id, deleted_at=None).count() def sessions_accepted_count(self, obj): - return Session.query.filter_by(event_id=obj.id, state='accepted').count() + return Session.query.filter_by(event_id=obj.id, state='accepted', deleted_at=None).count() def sessions_confirmed_count(self, obj): - return Session.query.filter_by(event_id=obj.id, state='confirmed').count() + return Session.query.filter_by(event_id=obj.id, state='confirmed', deleted_at=None).count() def sessions_pending_count(self, obj): - return Session.query.filter_by(event_id=obj.id, state='pending').count() + return Session.query.filter_by(event_id=obj.id, state='pending', deleted_at=None).count() def sessions_rejected_count(self, obj): - return Session.query.filter_by(event_id=obj.id, state='rejected').count() + return Session.query.filter_by(event_id=obj.id, state='rejected', deleted_at=None).count() + + def speakers_count_type(self, obj, state='pending'): + return SessionsSpeakersLink.query.filter_by(event_id=obj.id, session_state=state, deleted_at=None).count() def speakers_count(self, obj): - return Speaker.query.filter_by(event_id=obj.id).count() + accepted = self.speakers_count_type(obj=obj, state='accepted') + confirmed = self.speakers_count_type(obj=obj, state='confirmed') + pending = self.speakers_count_type(obj=obj, state='pending') + rejected = self.speakers_count_type(obj=obj, state='rejected') + total = Speaker.query.filter_by(event_id=obj.id, deleted_at=None).count() + serial_data = { + 'accepted': accepted, + 'confirmed': confirmed, + 'pending': pending, + 'rejected': rejected, + 'total': total + } + return serial_data def sessions_count(self, obj): - return Session.query.filter_by(event_id=obj.id).count() + return Session.query.filter_by(event_id=obj.id, deleted_at=None).count() def sponsors_count(self, obj): - return Sponsor.query.filter_by(event_id=obj.id).count() + return Sponsor.query.filter_by(event_id=obj.id, deleted_at=None).count() diff --git a/app/api/schema/events.py b/app/api/schema/events.py index afcd14b55b..5df171dae7 100644 --- a/app/api/schema/events.py +++ b/app/api/schema/events.py @@ -1,4 +1,5 @@ import pytz +from datetime import datetime from flask_rest_jsonapi.exceptions import ObjectNotFound from marshmallow import validates_schema, validate from marshmallow_jsonapi import fields @@ -44,6 +45,10 @@ def validate_date(self, data, original_data): raise UnprocessableEntity({'pointer': '/data/attributes/ends-at'}, "ends-at should be after starts-at") + if datetime.timestamp(data['starts_at']) <= datetime.timestamp(datetime.now()): + raise UnprocessableEntity({'pointer': '/data/attributes/starts-at'}, + "starts-at should be after current date-time") + @validates_schema(pass_original=True) def validate_timezone(self, data, original_data): if 'id' in original_data['data']: @@ -82,6 +87,8 @@ def validate_timezone(self, data, original_data): organizer_name = fields.Str(allow_none=True) is_map_shown = fields.Bool(default=False) has_organizer_info = fields.Bool(default=False) + has_sessions = fields.Bool(default=0, dump_only=True) + has_speakers = fields.Bool(default=0, dump_only=True) organizer_description = fields.Str(allow_none=True) is_sessions_speakers_enabled = fields.Bool(default=False) privacy = fields.Str(default="public") @@ -90,6 +97,7 @@ def validate_timezone(self, data, original_data): code_of_conduct = fields.Str(allow_none=True) schedule_published_on = fields.DateTime(allow_none=True) is_ticketing_enabled = fields.Bool(default=False) + is_featured = fields.Bool(default=False) payment_country = fields.Str(allow_none=True) payment_currency = fields.Str(allow_none=True) tickets_available = fields.Float(dump_only=True) @@ -103,6 +111,7 @@ def validate_timezone(self, data, original_data): can_pay_by_cheque = fields.Bool(default=False) can_pay_by_bank = fields.Bool(default=False) can_pay_onsite = fields.Bool(default=False) + can_pay_by_omise = fields.Bool(default=False) cheque_details = fields.Str(allow_none=True) bank_details = fields.Str(allow_none=True) onsite_details = fields.Str(allow_none=True) diff --git a/app/api/schema/import_jobs.py b/app/api/schema/import_jobs.py new file mode 100644 index 0000000000..051c352736 --- /dev/null +++ b/app/api/schema/import_jobs.py @@ -0,0 +1,25 @@ +from marshmallow_jsonapi import fields + +from app.api.helpers.utilities import dasherize +from marshmallow_jsonapi.flask import Schema + + +class ImportJobSchema(Schema): + """ + Api schema for ImportJob Model + """ + + class Meta: + """ + Meta class for ImportJob Api Schema + """ + type_ = 'import-job' + self_view = 'v1.import_job_detail' + self_view_kwargs = {'id': ''} + inflect = dasherize + + id = fields.Str(dump_only=True) + task = fields.Str(allow_none=False) + starts_at = fields.DateTime(required=True, timezone=True) + result = fields.Str(allow_none=True) + result_status = fields.Str(allow_none=True) diff --git a/app/api/schema/notifications.py b/app/api/schema/notifications.py index 59cd30e67b..2df5becec6 100644 --- a/app/api/schema/notifications.py +++ b/app/api/schema/notifications.py @@ -24,6 +24,7 @@ class Meta: subject = fields.Str(allow_none=True, dump_only=True) subject_id = fields.Str(allow_none=True, dump_only=True) notification_id = fields.Str(allow_none=True, dump_only=True) + link = fields.Str(dump_only=True) notification = Relationship(attribute='notification', self_view='v1.notification_actions_notification', self_view_kwargs={'id': ''}, diff --git a/app/api/schema/orders.py b/app/api/schema/orders.py index 83d6b9a37c..75dd46ae3c 100644 --- a/app/api/schema/orders.py +++ b/app/api/schema/orders.py @@ -47,12 +47,14 @@ def initial_values(self, data): id = fields.Str(dump_only=True) identifier = fields.Str(dump_only=True) - amount = fields.Float(validate=lambda n: n > 0, allow_none=True) + amount = fields.Float(validate=lambda n: n >= 0, allow_none=False, default=0) address = fields.Str(allow_none=True) city = fields.Str(allow_none=True) state = fields.Str(db.String, allow_none=True) country = fields.Str(allow_none=True) zipcode = fields.Str(allow_none=True) + company = fields.Str(allow_none=True) + tax_business_info = fields.Str(allow_none=True) completed_at = fields.DateTime(dump_only=True) created_at = fields.DateTime(dump_only=True) transaction_id = fields.Str(dump_only=True) @@ -61,6 +63,7 @@ def initial_values(self, data): validate=validate.OneOf(choices=["free", "stripe", "paypal", "bank", "cheque", "onsite"]), allow_none=True) paid_via = fields.Str(dump_only=True) + is_billing_enabled = fields.Boolean(default=False) brand = fields.Str(dump_only=True) exp_month = fields.Str(dump_only=True) exp_year = fields.Str(dump_only=True) diff --git a/app/api/schema/sessions.py b/app/api/schema/sessions.py index e1212f2c69..8ec01231fa 100644 --- a/app/api/schema/sessions.py +++ b/app/api/schema/sessions.py @@ -3,6 +3,7 @@ from marshmallow_jsonapi import fields from marshmallow_jsonapi.flask import Relationship from sqlalchemy.orm.exc import NoResultFound +from datetime import datetime from app.api.helpers.exceptions import UnprocessableEntity, ForbiddenException from app.api.helpers.permission_manager import has_access @@ -48,6 +49,9 @@ def validate_date(self, data, original_data): if data['starts_at'] >= data['ends_at']: raise UnprocessableEntity( {'pointer': '/data/attributes/ends-at'}, "ends-at should be after starts-at") + if datetime.timestamp(data['starts_at']) <= datetime.timestamp(datetime.now()): + raise UnprocessableEntity( + {'pointer': '/data/attributes/starts-at'}, "starts-at should be after current date-time") if 'state' in data: if data['state'] is not 'draft' or not 'pending': diff --git a/app/api/schema/settings.py b/app/api/schema/settings.py index 865ff2e860..b776d9e17e 100644 --- a/app/api/schema/settings.py +++ b/app/api/schema/settings.py @@ -57,6 +57,7 @@ class Meta: # is_paypal_activated = fields.Bool(dump_only=True) is_stripe_activated = fields.Bool(dump_only=True) + is_omise_activate = fields.Bool(dump_only=True) class SettingSchemaNonAdmin(SettingSchemaPublic): @@ -158,6 +159,13 @@ class Meta: paypal_sandbox_client = fields.Str(allow_none=True) paypal_sandbox_secret = fields.Str(allow_none=True) + # Omise Credentials + omise_mode = fields.Str(allow_none=True) + omise_test_public = fields.Str(allow_none=True) + omise_test_secret = fields.Str(allow_none=True) + omise_live_public = fields.Str(allow_none=True) + omise_live_secret = fields.Str(allow_none=True) + # # EMAIL # diff --git a/app/api/schema/speakers_calls.py b/app/api/schema/speakers_calls.py index 43170d1a6f..9423b03650 100644 --- a/app/api/schema/speakers_calls.py +++ b/app/api/schema/speakers_calls.py @@ -32,9 +32,20 @@ def validate_date(self, data, original_data): if 'ends_at' not in data: data['ends_at'] = speakers_calls.ends_at + if 'event_starts_at' not in data: + data['event_starts_at'] = speakers_calls.event.starts_at + if data['starts_at'] >= data['ends_at']: raise UnprocessableEntity({'pointer': '/data/attributes/ends-at'}, "ends-at should be after starts-at") + if 'event_starts_at' in data and data['starts_at'] > data['event_starts_at']: + raise UnprocessableEntity({'pointer': '/data/attributes/starts-at'}, + "speakers-call starts-at should be before event starts-at") + + if 'event_starts_at' in data and data['ends_at'] > data['event_starts_at']: + raise UnprocessableEntity({'pointer': '/data/attributes/ends-at'}, + "speakers-call ends-at should be before event starts-at") + id = fields.Str(dump_only=True) announcement = fields.Str(required=True) starts_at = fields.DateTime(required=True) diff --git a/app/api/schema/tickets.py b/app/api/schema/tickets.py index 82381d7889..00b1f76932 100644 --- a/app/api/schema/tickets.py +++ b/app/api/schema/tickets.py @@ -30,10 +30,21 @@ def validate_date(self, data, original_data): if 'sales_ends_at' not in data: data['sales_ends_at'] = ticket.sales_ends_at + if 'event_ends_at' not in data: + data['event_ends_at'] = ticket.event.ends_at + if data['sales_starts_at'] >= data['sales_ends_at']: raise UnprocessableEntity({'pointer': '/data/attributes/sales-ends-at'}, "sales-ends-at should be after sales-starts-at") + if 'event_ends_at' in data and data['sales_starts_at'] > data['event_ends_at']: + raise UnprocessableEntity({'pointer': '/data/attributes/sales-starts-at'}, + "ticket sales-starts-at should be before event ends-at") + + if 'event_ends_at' in data and data['sales_ends_at'] > data['event_ends_at']: + raise UnprocessableEntity({'pointer': '/data/attributes/sales-ends-at'}, + "ticket sales-ends-at should be before event ends-at") + @validates_schema def validate_quantity(self, data): if 'max_order' in data and 'min_order' in data: diff --git a/app/api/schema/users.py b/app/api/schema/users.py index 72cd98645b..e515929258 100644 --- a/app/api/schema/users.py +++ b/app/api/schema/users.py @@ -26,10 +26,12 @@ class Meta: avatar_url = fields.Url(allow_none=True) first_name = fields.Str(allow_none=True) last_name = fields.Str(allow_none=True) - original_image_url = fields.Url(allow_none=True) + original_image_url = fields.Url(dump_only=True, allow_none=True) thumbnail_image_url = fields.Url(dump_only=True, allow_none=True) small_image_url = fields.Url(dump_only=True, allow_none=True) icon_image_url = fields.Url(dump_only=True, allow_none=True) + was_registered_with_order = fields.Boolean() + class UserSchema(UserSchemaPublic): diff --git a/app/api/sessions.py b/app/api/sessions.py index 46e2abcc4d..7207457875 100644 --- a/app/api/sessions.py +++ b/app/api/sessions.py @@ -2,7 +2,7 @@ from app.api.bootstrap import api from app.api.events import Event -from app.api.helpers.db import safe_query, get_count +from app.api.helpers.db import safe_query, get_count, save_to_db from app.api.helpers.exceptions import ForbiddenException from app.api.helpers.mail import send_email_new_session, send_email_session_accept_reject from app.api.helpers.notification import send_notif_new_session_organizer, send_notif_session_accept_reject @@ -17,6 +17,7 @@ from app.models.speaker import Speaker from app.models.track import Track from app.models.user import User +from app.models.session_speaker_link import SessionsSpeakersLink from app.settings import get_settings @@ -51,11 +52,19 @@ def after_create_object(self, session, data, view_kwargs): organizer = session.event.get_organizer() organizer_email = organizer.email frontend_url = get_settings()['frontend_url'] + event = session.event link = "{}/events/{}/sessions/{}"\ - .format(frontend_url, session.event_id, session.id) + .format(frontend_url, event.identifier, session.id) send_email_new_session(organizer_email, event_name, link) send_notif_new_session_organizer(organizer, event_name, link, session.id) + for speaker in session.speakers: + session_speaker_link = SessionsSpeakersLink(session_state=session.state, + session_id=session.id, + event_id=session.event.id, + speaker_id=speaker.id) + save_to_db(session_speaker_link, "Session Speaker Link Saved") + decorators = (api.has_permission('create_event'),) schema = SessionSchema data_layer = {'session': db.session, @@ -87,7 +96,8 @@ def query(self, view_kwargs): query_ = query_.join(Microlocation).filter(Microlocation.id == microlocation.id) if view_kwargs.get('user_id') is not None: user = safe_query(self, User, 'id', view_kwargs['user_id'], 'user_id') - query_ = query_.join(User).filter(User.id == user.id) + query_ = query_.join(User)\ + .join(Speaker).filter((User.id == user.id or Session.speakers.any(Speaker.user_id == user.id))) query_ = event_query(self, query_, view_kwargs) if view_kwargs.get('speaker_id'): speaker = safe_query(self, Speaker, 'id', view_kwargs['speaker_id'], 'speaker_id') @@ -125,12 +135,14 @@ def after_update_object(self, session, data, view_kwargs): if 'state' in data and data.get('send_email', None) and (session.state == 'accepted' or session.state == 'rejected'): + + event = session.event # Email for speaker speakers = session.speakers for speaker in speakers: frontend_url = get_settings()['frontend_url'] link = "{}/events/{}/sessions/{}" \ - .format(frontend_url, session.event_id, session.id) + .format(frontend_url, event.identifier, session.id) send_email_session_accept_reject(speaker.email, session, link) send_notif_session_accept_reject(speaker, session.title, session.state, link, session.id) @@ -140,11 +152,30 @@ def after_update_object(self, session, data, view_kwargs): organizer_email = organizer.email frontend_url = get_settings()['frontend_url'] link = "{}/events/{}/sessions/{}" \ - .format(frontend_url, session.event_id, session.id) + .format(frontend_url, event.identifier, session.id) send_email_session_accept_reject(organizer_email, session, link) send_notif_session_accept_reject(organizer, session.title, session.state, link, session.id) + if 'state' in data: + entry_count = SessionsSpeakersLink.query.filter_by(session_id=session.id) + if entry_count.count() == 0: + is_patch_request = False + else: + is_patch_request = True + + if is_patch_request: + for focus_session in entry_count: + focus_session.session_state = session.state + db.session.commit() + else: + current_session = Session.query.filter_by(id=session.id).first() + for speaker in current_session.speakers: + session_speaker_link = SessionsSpeakersLink(session_state=session.state, + session_id=session.id, + event_id=session.event.id, + speaker_id=speaker.id) + save_to_db(session_speaker_link, "Session Speaker Link Saved") decorators = (api.has_permission('is_speaker_for_session', methods="PATCH,DELETE"),) schema = SessionSchema diff --git a/app/api/settings.py b/app/api/settings.py index 614ff9e036..53cfb30492 100644 --- a/app/api/settings.py +++ b/app/api/settings.py @@ -27,6 +27,9 @@ class SettingDetail(ResourceDetail): """ def before_get(self, args, kwargs): + refresh = request.args.get('refresh') + if refresh == 'true': + refresh_settings() kwargs['id'] = 1 if 'Authorization' in request.headers: diff --git a/app/api/speakers.py b/app/api/speakers.py index eba1c6497c..ac1e2b130f 100644 --- a/app/api/speakers.py +++ b/app/api/speakers.py @@ -3,7 +3,7 @@ from flask_rest_jsonapi.exceptions import ObjectNotFound from app.api.bootstrap import api -from app.api.helpers.db import safe_query, get_count +from app.api.helpers.db import safe_query, get_count, save_to_db from app.api.helpers.exceptions import ForbiddenException from app.api.helpers.permission_manager import has_access from app.api.helpers.query import event_query @@ -13,6 +13,7 @@ from app.models.event import Event from app.models.session import Session from app.models.speaker import Speaker +from app.models.session_speaker_link import SessionsSpeakersLink from app.models.user import User @@ -51,11 +52,25 @@ def before_post(self, args, kwargs, data=None): raise ObjectNotFound({'parameter': 'session_id'}, "Session: {} not found".format(session_id)) + def after_create_object(self, speaker, data, view_kwargs): + """ + after create method to save resized images for speaker + :param speaker: + :param data: + :param view_kwargs: + :return: + """ + + if data.get('photo_url'): + start_image_resizing_tasks(speaker, data['photo_url']) + schema = SpeakerSchema methods = ['POST', ] data_layer = {'session': db.session, - 'model': Speaker - } + 'model': Speaker, + 'methods': { + 'after_create_object': after_create_object + }} class SpeakerList(ResourceList): @@ -100,11 +115,46 @@ class SpeakerDetail(ResourceDetail): """ Speakers Detail by id """ - decorators = (api.has_permission('is_coorganizer_or_user_itself', methods="PATCH,DELETE", fetch="event_id", + def before_update_object(self, speaker, data, view_kwargs): + """ + method to save image urls before updating speaker object + :param speaker: + :param data: + :param view_kwargs: + :return: + """ + if data.get('photo_url') and data['photo_url'] != speaker.photo_url: + start_image_resizing_tasks(speaker, data['photo_url']) + + def after_patch(self, result): + """ + method to create session speaker link + :param result: + """ + # This method is executed when a new speaker is created + # and added to an existing session + speaker_id = result['data']['id'] + speaker = Speaker.query.filter_by(id=speaker_id).first() + if SessionsSpeakersLink.query.filter_by(speaker_id=speaker_id).count() == 0: + all_sessions = Session.query.filter_by(deleted_at=None) + for session in all_sessions: + if speaker in session.speakers: + session_speaker_link = SessionsSpeakersLink(session_state=session.state, + session_id=session.id, + event_id=session.event.id, + speaker_id=speaker.id) + save_to_db(session_speaker_link, "Session Speaker Link Saved") + + decorators = (api.has_permission('is_speaker_itself_or_admin', methods="PATCH,DELETE", fetch="event_id", + fetch_as="event_id", model=Speaker), + api.has_permission('is_coorganizer_or_user_itself', methods="PATCH,DELETE", fetch="event_id", fetch_as="event_id", model=Speaker),) schema = SpeakerSchema data_layer = {'session': db.session, - 'model': Speaker} + 'model': Speaker, + 'methods': { + 'before_update_object': before_update_object + }} class SpeakerRelationshipRequired(ResourceRelationship): @@ -128,3 +178,9 @@ class SpeakerRelationshipOptional(ResourceRelationship): schema = SpeakerSchema data_layer = {'session': db.session, 'model': Speaker} + + +def start_image_resizing_tasks(speaker, photo_url): + speaker_id = str(speaker.id) + from .helpers.tasks import resize_speaker_images_task + resize_speaker_images_task.delay(speaker_id, photo_url) diff --git a/app/api/speakers_calls.py b/app/api/speakers_calls.py index 81e39bc90c..f4740189e9 100644 --- a/app/api/speakers_calls.py +++ b/app/api/speakers_calls.py @@ -36,7 +36,10 @@ def before_create_object(self, data, view_kwargs): :return: """ try: - self.session.query(SpeakersCall).filter_by(event_id=data['event'], deleted_at=None).one() + speakers_call = self.session.query(SpeakersCall).filter_by(event_id=data['event'], deleted_at=None).one() + event = speakers_call.event + if speakers_call.starts_at > event.starts_at or speakers_call.ends_at > event.starts_at: + raise ForbiddenException({'source': ''}, "Speakers call date can\'t be after the event start date") except NoResultFound: pass else: @@ -67,10 +70,14 @@ def before_patch(self, args, kwargs, data): if kwargs.get('event_id'): try: speakers_call = SpeakersCall.query.filter_by(event_id=kwargs['event_id']).one() + event = speakers_call.event + if speakers_call.starts_at > event.starts_at or speakers_call.ends_at > event.starts_at: + raise ForbiddenException({'source': ''}, "Speakers call date can\'t be after the event start date") except NoResultFound: raise ObjectNotFound({'source': ''}, "Object: not found") kwargs['id'] = speakers_call.id + def before_get_object(self, view_kwargs): """ before get method to get the resource id for fetching details diff --git a/app/api/users.py b/app/api/users.py index f9bcaefff8..77c3813307 100644 --- a/app/api/users.py +++ b/app/api/users.py @@ -22,7 +22,7 @@ from app.models.email_notification import EmailNotification from app.models.event_invoice import EventInvoice from app.models.feedback import Feedback -from app.models.mail import USER_REGISTER_WITH_PASSWORD +from app.models.mail import USER_REGISTER_WITH_PASSWORD, PASSWORD_RESET_AND_VERIFY from app.models.notification import Notification from app.models.session import Session from app.models.speaker import Speaker @@ -37,14 +37,19 @@ class UserList(ResourceList): """ List and create Users """ + def before_create_object(self, data, view_kwargs): """ method to check if there is an existing user with same email which is received in data to create a new user + and if the password is at least 8 characters long :param data: :param view_kwargs: :return: """ - if db.session.query(User.id).filter_by(email=data['email']).scalar() is not None: + if len(data['password']) < 8: + raise UnprocessableEntity({'source': '/data/attributes/password'}, + 'Password should be at least 8 characters long') + if db.session.query(User.id).filter_by(email=data['email'].strip()).scalar() is not None: raise ConflictException({'pointer': '/data/attributes/email'}, "Email already exists") def after_create_object(self, user, data, view_kwargs): @@ -58,23 +63,32 @@ def after_create_object(self, user, data, view_kwargs): :param view_kwargs: :return: """ - s = get_serializer() - hash = str(base64.b64encode(str(s.dumps([user.email, str_generator()])).encode()), 'utf-8') - link = make_frontend_url('/verify'.format(id=user.id), {'token': hash}) - send_email_with_action(user, USER_REGISTER_WITH_PASSWORD, app_name=get_settings()['app_name'], - email=user.email) - send_email_confirmation(user.email, link) - - if data.get('original_image_url'): - try: - uploaded_images = create_save_image_sizes(data['original_image_url'], 'speaker-image', user.id) - except (urllib.error.HTTPError, urllib.error.URLError): - raise UnprocessableEntity( - {'source': 'attributes/original-image-url'}, 'Invalid Image URL' - ) - uploaded_images['small_image_url'] = uploaded_images['thumbnail_image_url'] - del uploaded_images['large_image_url'] - self.session.query(User).filter_by(id=user.id).update(uploaded_images) + + if user.was_registered_with_order: + link = make_frontend_url('/reset-password', {'token': user.reset_password}) + send_email_with_action(user, PASSWORD_RESET_AND_VERIFY, app_name=get_settings()['app_name'], + email=user.email, link=link) + else: + s = get_serializer() + hash = str(base64.b64encode(str(s.dumps([user.email, str_generator()])).encode()), 'utf-8') + link = make_frontend_url('/verify'.format(id=user.id), {'token': hash}) + send_email_with_action(user, USER_REGISTER_WITH_PASSWORD, app_name=get_settings()['app_name'], + email=user.email) + send_email_confirmation(user.email, link) + # TODO Handle in a celery task + # if data.get('original_image_url'): + # try: + # uploaded_images = create_save_image_sizes(data['original_image_url'], 'speaker-image', user.id) + # except (urllib.error.HTTPError, urllib.error.URLError): + # raise UnprocessableEntity( + # {'source': 'attributes/original-image-url'}, 'Invalid Image URL' + # ) + # uploaded_images['small_image_url'] = uploaded_images['thumbnail_image_url'] + # del uploaded_images['large_image_url'] + # self.session.query(User).filter_by(id=user.id).update(uploaded_images) + + if data.get('avatar_url'): + start_image_resizing_tasks(user, data['avatar_url']) decorators = (api.has_permission('is_admin', methods="GET"),) schema = UserSchema @@ -90,6 +104,7 @@ class UserDetail(ResourceDetail): """ User detail by id """ + def before_get(self, args, kwargs): if current_user.is_admin or current_user.is_super_admin or current_user: @@ -180,35 +195,45 @@ def before_get_object(self, view_kwargs): view_kwargs['id'] = None def before_update_object(self, user, data, view_kwargs): - if data.get('original_image_url') and data['original_image_url'] != user.original_image_url: + # TODO: Make a celery task for this + # if data.get('avatar_url') and data['original_image_url'] != user.original_image_url: + # try: + # uploaded_images = create_save_image_sizes(data['original_image_url'], 'speaker-image', user.id) + # except (urllib.error.HTTPError, urllib.error.URLError): + # raise UnprocessableEntity( + # {'source': 'attributes/original-image-url'}, 'Invalid Image URL' + # ) + # data['original_image_url'] = uploaded_images['original_image_url'] + # data['small_image_url'] = uploaded_images['thumbnail_image_url'] + # data['thumbnail_image_url'] = uploaded_images['thumbnail_image_url'] + # data['icon_image_url'] = uploaded_images['icon_image_url'] + users_email = data.get('email', None) + if users_email is not None: + users_email = users_email.strip() + + if has_access('is_admin') and data.get('deleted_at') != user.deleted_at: + user.deleted_at = data.get('deleted_at') + + if users_email is not None and users_email != user.email: try: - uploaded_images = create_save_image_sizes(data['original_image_url'], 'speaker-image', user.id) - except (urllib.error.HTTPError, urllib.error.URLError): - raise UnprocessableEntity( - {'source': 'attributes/original-image-url'}, 'Invalid Image URL' - ) - data['original_image_url'] = uploaded_images['original_image_url'] - data['small_image_url'] = uploaded_images['thumbnail_image_url'] - data['thumbnail_image_url'] = uploaded_images['thumbnail_image_url'] - data['icon_image_url'] = uploaded_images['icon_image_url'] - - if data.get('email') and data['email'] != user.email: - try: - db.session.query(User).filter_by(email=data['email']).one() + db.session.query(User).filter_by(email=users_email).one() except NoResultFound: view_kwargs['email_changed'] = user.email else: raise ConflictException({'pointer': '/data/attributes/email'}, "Email already exists") - if has_access('is_super_admin') and data.get('is_admin') != user.is_admin: + if has_access('is_super_admin') and data.get('is_admin') and data.get('is_admin') != user.is_admin: user.is_admin = not user.is_admin - if has_access('is_admin') and data.get('is_sales_admin') != user.is_sales_admin: + if has_access('is_admin') and ('is_sales_admin' in data) and data.get('is_sales_admin') != user.is_sales_admin: user.is_sales_admin = not user.is_sales_admin - if has_access('is_admin') and data.get('is_marketer') != user.is_marketer: + if has_access('is_admin') and ('us_marketer' in data) and data.get('is_marketer') != user.is_marketer: user.is_marketer = not user.is_marketer + if data.get('avatar_url'): + start_image_resizing_tasks(user, data['avatar_url']) + def after_update_object(self, user, data, view_kwargs): """ method to mail user about email change @@ -221,11 +246,11 @@ def after_update_object(self, user, data, view_kwargs): send_email_change_user_email(user, view_kwargs.get('email_changed')) decorators = (api.has_permission('is_user_itself', fetch="user_id,id", fetch_as="user_id", - model=[Notification, Feedback, UsersEventsRoles, Session, EventInvoice, AccessCode, - DiscountCode, EmailNotification, Speaker, User], - fetch_key_url="notification_id, feedback_id, users_events_role_id, session_id, \ + model=[Notification, Feedback, UsersEventsRoles, Session, EventInvoice, AccessCode, + DiscountCode, EmailNotification, Speaker, User], + fetch_key_url="notification_id, feedback_id, users_events_role_id, session_id, \ event_invoice_id, access_code_id, discount_code_id, email_notification_id, speaker_id, id", - leave_if=lambda a: a.get('attendee_id')), ) + leave_if=lambda a: a.get('attendee_id')),) schema = UserSchema data_layer = {'session': db.session, 'model': User, @@ -240,7 +265,7 @@ class UserRelationship(ResourceRelationship): """ User Relationship """ - decorators = (is_user_itself, ) + decorators = (is_user_itself,) schema = UserSchema data_layer = {'session': db.session, 'model': User} @@ -262,3 +287,9 @@ def is_email_available(): abort( make_response(jsonify(error="Email field missing"), 422) ) + + +def start_image_resizing_tasks(user, original_image_url): + user_id = str(user.id) + from .helpers.tasks import resize_user_images_task + resize_user_images_task.delay(user_id, original_image_url) diff --git a/app/factories/access_code.py b/app/factories/access_code.py index bf8ba8c7d0..444e39526d 100644 --- a/app/factories/access_code.py +++ b/app/factories/access_code.py @@ -18,9 +18,9 @@ class Meta: code = common.string_ access_url = common.url_ is_active = True - tickets_number = 10 + tickets_number = 30 min_quantity = 10 - max_quantity = 100 + max_quantity = 20 valid_from = common.date_ valid_till = common.dateEnd_ used_for = common.string_ diff --git a/app/factories/common.py b/app/factories/common.py index c8aa720ea1..4cc1ffb155 100644 --- a/app/factories/common.py +++ b/app/factories/common.py @@ -1,14 +1,17 @@ import factory - +import datetime +from app.api.helpers.utilities import static_page, image_link # use camelCase for naming variables string_ = 'example' email_ = factory.Sequence(lambda n: 'user{0}@example.com'.format(n)) integer_ = 25 -url_ = 'http://example.com' -imageUrl_ = 'https://www.w3schools.com/html/pic_mountain.jpg' -date_ = '2016-12-13T23:59:59.123456+00:00' -dateEnd_ = '2016-12-14T23:59:59.123456+00:00' +url_ = static_page +imageUrl_ = image_link +date_ = datetime.datetime(2016, 12, 13) +dateFuture_ = datetime.datetime(2099, 12, 13) +dateEndFuture_ = datetime.datetime(2099, 12, 14) +dateEnd_ = datetime.datetime(2020, 12, 14) country_ = 'US' currency_ = 'USD' int_ = '1' diff --git a/app/factories/event.py b/app/factories/event.py index d4dc8a9269..393d6093e9 100644 --- a/app/factories/event.py +++ b/app/factories/event.py @@ -11,8 +11,8 @@ class Meta: name = common.string_ external_event_url = common.url_ - starts_at = common.date_ - ends_at = common.dateEnd_ + starts_at = common.dateFuture_ + ends_at = common.dateEndFuture_ timezone = common.timezone_ latitude = common.float_ longitude = common.float_ diff --git a/app/factories/export_job.py b/app/factories/export_job.py new file mode 100644 index 0000000000..9253478578 --- /dev/null +++ b/app/factories/export_job.py @@ -0,0 +1,15 @@ +import factory + +import app.factories.common as common +from app.factories.event import EventFactoryBasic +from app.models.export_job import db, ExportJob + + +class ExportJobFactory(factory.alchemy.SQLAlchemyModelFactory): + class Meta: + model = ExportJob + sqlalchemy_session = db.session + + task = common.string_ + user_email = common.string_ + event = factory.RelatedFactory(EventFactoryBasic) diff --git a/app/factories/session.py b/app/factories/session.py index 7c5416220b..738da57d1e 100644 --- a/app/factories/session.py +++ b/app/factories/session.py @@ -23,8 +23,8 @@ class Meta: short_abstract = common.string_ long_abstract = (common.string_ + common.string_) comments = common.string_ - starts_at = common.date_ - ends_at = common.dateEnd_ + starts_at = common.dateFuture_ + ends_at = common.dateEndFuture_ language = "English" slides_url = common.url_ video_url = common.url_ diff --git a/app/models/access_code.py b/app/models/access_code.py index ef3f77147c..de90409192 100644 --- a/app/models/access_code.py +++ b/app/models/access_code.py @@ -19,6 +19,8 @@ class AccessCode(SoftDeletionModel): max_quantity = db.Column(db.Integer) # For event level access this holds the months for which it is valid valid_from = db.Column(db.DateTime(timezone=True), nullable=True) valid_till = db.Column(db.DateTime(timezone=True), nullable=True) + ticket_id = db.Column(db.Integer, db.ForeignKey('tickets.id', ondelete='CASCADE')) + ticket = db.relationship('Ticket', backref='access_code', foreign_keys=[ticket_id]) event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='CASCADE')) event = db.relationship('Event', backref='access_codes', foreign_keys=[event_id]) created_at = db.Column(db.DateTime(timezone=True)) diff --git a/app/models/custom_form.py b/app/models/custom_form.py index b714a66918..07e149d098 100644 --- a/app/models/custom_form.py +++ b/app/models/custom_form.py @@ -45,11 +45,11 @@ "country": {"include": 1, "require": 0}, "job_title": {"include": 1, "require": 0}, "phone": {"include": 1, "require": 0}, - "tax_business_info": {"include": 0, "require": 0}, + "tax_business_info": {"include": 1, "require": 0}, "billing_address": {"include": 0, "require": 0}, "home_address": {"include": 0, "require": 0}, "shipping_address": {"include": 0, "require": 0}, - "company": {"include": 0, "require": 0}, + "company": {"include": 1, "require": 0}, "work_address": {"include": 0, "require": 0}, "work_phone": {"include": 0, "require": 0}, "website": {"include": 1, "require": 0}, @@ -88,11 +88,11 @@ def __init__(self, is_fixed=None, deleted_at=None): self.event_id = event_id - self.field_identifier = field_identifier, - self.form = form, - self.type = type, - self.is_required = is_required, - self.is_included = is_included, + self.field_identifier = field_identifier + self.form = form + self.type = type + self.is_required = is_required + self.is_included = is_included self.is_fixed = is_fixed self.deleted_at = deleted_at diff --git a/app/models/event.py b/app/models/event.py index 5cd5eaadd4..52d5517ac1 100644 --- a/app/models/event.py +++ b/app/models/event.py @@ -12,12 +12,15 @@ from app.models import db from app.models.order import Order from app.models.ticket_fee import get_fee +from app.models.ticket_fee import get_maximum_fee from app.models.base import SoftDeletionModel from app.models.email_notification import EmailNotification from app.models.feedback import Feedback from app.models.helpers.versioning import clean_up_string, clean_html from app.models.user import ATTENDEE, ORGANIZER from app.models.event_topic import EventTopic +from app.models.session import Session +from app.models.speaker import Speaker from app.models.search import sync from app.models.ticket import Ticket from app.models.ticket_holder import TicketHolder @@ -50,6 +53,7 @@ class Event(SoftDeletionModel): longitude = db.Column(db.Float) location_name = db.Column(db.String) searchable_location_name = db.Column(db.String) + is_featured = db.Column(db.Boolean, default=False, nullable=False) description = db.Column(db.Text) original_image_url = db.Column(db.String) thumbnail_image_url = db.Column(db.String) @@ -96,6 +100,7 @@ class Event(SoftDeletionModel): can_pay_by_cheque = db.Column(db.Boolean, default=False) can_pay_by_bank = db.Column(db.Boolean, default=False) can_pay_onsite = db.Column(db.Boolean, default=False) + can_pay_by_omise = db.Column(db.Boolean, default=False) cheque_details = db.Column(db.String) bank_details = db.Column(db.String) onsite_details = db.Column(db.String) @@ -184,6 +189,7 @@ def __init__(self, privacy=None, event_topic_id=None, event_sub_topic_id=None, + events_orga_id=None, ticket_url=None, copyright=None, code_of_conduct=None, @@ -202,8 +208,10 @@ def __init__(self, can_pay_by_paypal=None, can_pay_by_stripe=None, can_pay_by_cheque=None, + can_pay_by_omise=None, identifier=None, can_pay_by_bank=None, + is_featured=False, can_pay_onsite=None, cheque_details=None, bank_details=None, @@ -247,6 +255,7 @@ def __init__(self, self.event_topic_id = event_topic_id self.copyright = copyright self.event_sub_topic_id = event_sub_topic_id + self.events_orga_id = events_orga_id self.ticket_url = ticket_url self.code_of_conduct = code_of_conduct self.schedule_published_on = schedule_published_on @@ -263,7 +272,9 @@ def __init__(self, self.can_pay_by_cheque = can_pay_by_cheque self.can_pay_by_bank = can_pay_by_bank self.can_pay_onsite = can_pay_onsite + self.can_pay_by_omise = can_pay_by_omise self.is_donation_enabled = is_donation_enabled + self.is_featured = is_featured self.identifier = get_new_event_identifier() self.cheque_details = cheque_details self.bank_details = bank_details @@ -309,6 +320,13 @@ def fee(self): """ return get_fee(self.payment_country, self.payment_currency) + @property + def maximum_fee(self): + """ + Returns the maximum fee for this event + """ + return get_maximum_fee(self.payment_country, self.payment_currency) + def notification_settings(self, user_id): try: return EmailNotification.query.filter_by( @@ -380,6 +398,15 @@ def tickets_sold(self): def revenue(self): return self.calc_revenue() + @property + def has_sessions(self): + return Session.query.filter_by(event_id=self.id).count() > 0 + + @property + def has_speakers(self): + return Speaker.query.filter_by(event_id=self.id).count() > 0 + + @event.listens_for(Event, 'after_update') @event.listens_for(Event, 'after_insert') diff --git a/app/models/mail.py b/app/models/mail.py index 0547fad9e3..901cb6d46b 100644 --- a/app/models/mail.py +++ b/app/models/mail.py @@ -11,6 +11,7 @@ NEXT_EVENT = 'Next Event' NEW_SESSION = 'New Session Proposal' PASSWORD_RESET = 'Reset Password' +PASSWORD_RESET_AND_VERIFY = 'Reset Password and Account Verification' PASSWORD_CHANGE = 'Change Password' EVENT_ROLE = 'Event Role Invitation' SESSION_ACCEPT_REJECT = 'Session Accept or Reject' diff --git a/app/models/order.py b/app/models/order.py index 71c62d0d65..686469fd74 100644 --- a/app/models/order.py +++ b/app/models/order.py @@ -21,8 +21,9 @@ def get_updatable_fields(): """ :return: The list of fields which can be modified by the order user using the pre payment form. """ - return ['country', 'address', 'city', 'state', 'zipcode', 'status', 'paid_via', 'order_notes', 'deleted_at', 'user', - 'payment_mode', 'event', 'discount_code_id', 'discount_code', 'ticket_holders', 'user', 'tickets_pdf_url'] + return ['country', 'address', 'city', 'state', 'zipcode', 'company', 'tax_business_info', 'status', 'paid_via', + 'order_notes', 'deleted_at', 'user', 'payment_mode', 'event', 'discount_code_id', 'discount_code', + 'ticket_holders', 'user', 'tickets_pdf_url', 'is_billing_enabled'] class OrderTicket(SoftDeletionModel): @@ -37,12 +38,14 @@ class Order(SoftDeletionModel): id = db.Column(db.Integer, primary_key=True) identifier = db.Column(db.String, unique=True) - amount = db.Column(db.Float) + amount = db.Column(db.Float, nullable=False, default=0) address = db.Column(db.String) city = db.Column(db.String) state = db.Column(db.String) country = db.Column(db.String) zipcode = db.Column(db.String) + company = db.Column(db.String) + tax_business_info = db.Column(db.String) user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='SET NULL')) event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='SET NULL')) marketer_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='SET NULL')) @@ -52,6 +55,7 @@ class Order(SoftDeletionModel): transaction_id = db.Column(db.String) paid_via = db.Column(db.String) payment_mode = db.Column(db.String) + is_billing_enabled = db.Column(db.Boolean) brand = db.Column(db.String) exp_month = db.Column(db.Integer) exp_year = db.Column(db.Integer) @@ -81,8 +85,11 @@ def __init__(self, state=None, country=None, zipcode=None, + company=None, + tax_business_info=None, transaction_id=None, paid_via=None, + is_billing_enabled=False, user_id=None, discount_code_id=None, event_id=None, @@ -99,10 +106,13 @@ def __init__(self, self.state = state self.country = country self.zipcode = zipcode + self.company = company + self.tax_business_info = tax_business_info self.user_id = user_id self.event_id = event_id self.transaction_id = transaction_id self.paid_via = paid_via + self.is_billing_enabled = is_billing_enabled self.created_at = datetime.datetime.now(datetime.timezone.utc) self.discount_code_id = discount_code_id self.status = status @@ -130,10 +140,13 @@ def tickets_count(self): @property def is_free(self): - return self.paid_via == 'free' + return self.payment_mode == 'free' def get_revenue(self): - return self.amount - (self.amount * (self.event.fee / 100.0)) + if self.amount: + return self.amount - min(self.amount * (self.event.fee / 100.0), self.event.maximum_fee) + else: + return 0.0 @property def serialize(self): @@ -147,8 +160,11 @@ def serialize(self): 'state': self.state, 'zipcode': self.zipcode, 'country': self.country, + 'company': self.company, + 'taxBusinessInfo': self.tax_business_info, 'transaction_id': self.transaction_id, 'paid_via': self.paid_via, + 'isBillingEnabled': self.is_billing_enabled, 'payment_mode': self.payment_mode, 'brand': self.brand, 'exp_month': self.exp_month, diff --git a/app/models/session_speaker_link.py b/app/models/session_speaker_link.py new file mode 100644 index 0000000000..b59e889048 --- /dev/null +++ b/app/models/session_speaker_link.py @@ -0,0 +1,34 @@ +from app.models import db +from app.models.base import SoftDeletionModel + + +class SessionsSpeakersLink(SoftDeletionModel): + __tablename__ = 'sessions_speakers_links' + + id = db.Column(db.Integer, primary_key=True) + + event_id = db.Column(db.Integer) + session_id = db.Column(db.Integer) + speaker_id = db.Column(db.Integer) + session_state = db.Column(db.String, nullable=False) + + def __init__(self, + session_id=None, + speaker_id=None, + event_id=None, + session_state=None, + deleted_at=None): + + self.session_id = session_id + self.speaker_id = speaker_id + self.event_id = event_id + self.session_state = session_state + self.deleted_at = deleted_at + + def __repr__(self): + return '' % (self.session_id, + self.speaker_id, + self.session_state) + + def __str__(self): + return self.__repr__() diff --git a/app/models/setting.py b/app/models/setting.py index 83557dbbe5..876a8a9ada 100644 --- a/app/models/setting.py +++ b/app/models/setting.py @@ -80,6 +80,13 @@ class Setting(db.Model): paypal_sandbox_client = db.Column(db.String) paypal_sandbox_secret = db.Column(db.String) + # Omise credentials + omise_mode = db.Column(db.String) + omise_live_public = db.Column(db.String) + omise_live_secret = db.Column(db.String) + omise_test_public = db.Column(db.String) + omise_test_secret = db.Column(db.String) + # # EMAIL # @@ -126,7 +133,7 @@ class Setting(db.Model): "event preferences and provide you with a customized experience. " "By closing this banner or by continuing to use the site, you agree. " "For more information please review our cookie policy.") - cookie_policy_link = db.Column(db.String, default="http://next.cookie-policy.eventyay.com") + cookie_policy_link = db.Column(db.String, default="https://next.eventyay.com/cookie-policy") def __init__(self, app_environment=Environment.PRODUCTION, @@ -169,7 +176,12 @@ def __init__(self, android_app_url=None, web_app_url=None, cookie_policy=None, - cookie_policy_link=None): + cookie_policy_link=None, + omise_mode=None, + omise_test_public=None, + omise_test_secret=None, + omise_live_public=None, + omise_live_secret=None): self.app_environment = app_environment self.aws_key = aws_key self.aws_secret = aws_secret @@ -225,6 +237,13 @@ def __init__(self, self.paypal_sandbox_client = paypal_sandbox_client self.paypal_sandbox_secret = paypal_sandbox_secret + # Omise Credentials + self.omise_mode = omise_mode + self.omise_test_public = omise_test_public + self.omise_test_secret = omise_test_secret + self.omise_live_public = omise_live_public + self.omise_live_secret = omise_live_secret + @hybrid_property def is_paypal_activated(self): if self.paypal_mode == 'sandbox' and self.paypal_sandbox_client and self.paypal_sandbox_secret: @@ -243,3 +262,12 @@ def __repr__(self): def __str__(self): return self.__repr__() + + @hybrid_property + def is_omise_activated(self): + if self.omise_mode == 'test' and self.omise_test_public and self.omise_test_secret: + return True + elif self.omise_live_public and self.omise_live_secret: + return True + else: + return False diff --git a/app/models/ticket_fee.py b/app/models/ticket_fee.py index baa3214d92..803a40adce 100644 --- a/app/models/ticket_fee.py +++ b/app/models/ticket_fee.py @@ -43,3 +43,16 @@ def get_fee(country, currency): return fee.service_fee return DEFAULT_FEE + + +def get_maximum_fee(country, currency): + """Returns the fee for a given country and currency string""" + fee = db.session.query(TicketFees) \ + .filter(TicketFees.country == country) \ + .filter(TicketFees.currency == currency) \ + .order_by(desc(TicketFees.id)).first() + + if fee: + return fee.maximum_fee + + return DEFAULT_FEE diff --git a/app/models/ticket_holder.py b/app/models/ticket_holder.py index d9aa9715a3..9f1576248e 100644 --- a/app/models/ticket_holder.py +++ b/app/models/ticket_holder.py @@ -161,4 +161,6 @@ def serialize(self): 'city': self.city, 'address': self.address, 'state': self.state, - 'country': self.country} + 'country': self.country, + 'company': self.company, + 'taxBusinessInfo': self.tax_business_info} diff --git a/app/models/user.py b/app/models/user.py index 1b3957e599..7913db6a55 100644 --- a/app/models/user.py +++ b/app/models/user.py @@ -75,6 +75,7 @@ class User(SoftDeletionModel): is_sales_admin = db.Column(db.Boolean, default=False) is_marketer = db.Column(db.Boolean, default=False) is_verified = db.Column(db.Boolean, default=False) + was_registered_with_order = db.Column(db.Boolean, default=False) last_accessed_at = db.Column(db.DateTime(timezone=True)) created_at = db.Column(db.DateTime(timezone=True), default=datetime.now(pytz.utc)) speaker = db.relationship('Speaker', backref="user") diff --git a/app/templates/pdf/order_invoice.html b/app/templates/pdf/order_invoice.html new file mode 100644 index 0000000000..a45d624eca --- /dev/null +++ b/app/templates/pdf/order_invoice.html @@ -0,0 +1,126 @@ + + + + + {{ ("Order Invoice") }} + + + +

{{ ("Order Invoice") }}

+
+ + + + + + + + + + {% if order.status != "deleted" %} + + + + {% if order.is_billing_enabled %} + + + {% endif %} + + {% endif %} + +
+ {{ ("Order") }} + + {{ ("Order Details") }} + + {{ ("Billing Info") }} +

+ Order Number :
+ Order By :
+ Order Status :
+ Quantity :
+ Total Amount :
+ Payment Mode :
+ Discount Code :
+

+ {{ order.get_invoice_number() }}
+ {% if order.user and order.user.first_name and order.user.last_name %} + {{ order.user.fullname }} + {% elif order.user %} + {{ order.user.email }} + {% else %} + {{ ('Information unavailable') }} + {% endif %}
+ {% if order.status == 'completed' %} + {{ order.status | capitalize }} + {% elif order.status == 'pending' or order.status == 'initialized' %} + {{ ("Pending") }} + {% elif order.status == 'placed' %} + {{ order.status | capitalize }} + {% elif order.status == 'cancelled' %} + {{ order.status | capitalize }} + {% else %} + {{ order.status | capitalize }} + {% endif %}
+ {{ order.tickets_count }}
+ {{ event.payment_currency | currency_symbol }}{{ order.amount | money }}
+ {% if order.status == 'completed' %} + {{ order.paid_via | capitalize }} + {% else %} + {{ ('Payment pending') }} + {% endif %}
+ {% if order.discount_code %} + {{ order.discount_code.code }}
+ {% else %} + {{ ('NA') }} + {% endif %}
+

+ + Company :
+ Tax Info :
+ Address :
+ City :
+ State/Province :
+ Zip Code:
+ Country:
+
+

+ + {{ order.company }}
+ {{ order.tax_business_info }}
+ {{ order.address }}
+ {{ order.city }}
+ {{ order.state }}
+ {{ order.zipcode }}
+ {{ order.country }}
+
+
+ + diff --git a/app/templates/pdf/ticket_attendee.html b/app/templates/pdf/ticket_attendee.html index acfb230cff..efa987291b 100644 --- a/app/templates/pdf/ticket_attendee.html +++ b/app/templates/pdf/ticket_attendee.html @@ -50,7 +50,7 @@ {{order.event.location_name }}

Date and Time
- {{ order.event.starts_at }} to {{ order.event.ends_at }} + {{ order.event.starts_at.strftime('%H:%M %Z') }} to {{ order.event.ends_at.strftime('%H:%M %Z') }}

Type
diff --git a/app/templates/pdf/ticket_purchaser.html b/app/templates/pdf/ticket_purchaser.html index 983f1e98bb..d4016678bd 100644 --- a/app/templates/pdf/ticket_purchaser.html +++ b/app/templates/pdf/ticket_purchaser.html @@ -51,7 +51,8 @@ {{ order.event.location_name }}

Date and Time
- {{ order.event.starts_at }} to {{ order.event.ends_at }} + From: {{ order.event.starts_at.strftime("%H:%M %Z") }} on {{ order.event.starts_at.date() }}
+ To: {{ order.event.ends_at.strftime("%H:%M %Z") }} on {{ order.event.ends_at.date() }}

Type
diff --git a/app/views/__init__.py b/app/views/__init__.py index 8e052828f3..ce6960ea58 100644 --- a/app/views/__init__.py +++ b/app/views/__init__.py @@ -3,7 +3,6 @@ from flask import url_for, redirect, Blueprint, request, make_response from flask_admin import Admin, AdminIndexView, expose, helpers as admin_helpers from flask_admin.contrib.sqla import ModelView -from flask_scrypt import generate_password_hash from wtforms import form, fields, validators from app.models import db @@ -34,7 +33,7 @@ def validate_login(self, field): if user is None: raise validators.ValidationError('User does not exist.') - if user.password != generate_password_hash(self.password.data, user.salt): + if not user.is_correct_password(self.password.data): raise validators.ValidationError('Credentials incorrect.') if not user.is_admin and not user.is_super_admin: diff --git a/app/views/elastic_cron_helpers.py b/app/views/elastic_cron_helpers.py index 5ee17de06d..b87d31f31f 100644 --- a/app/views/elastic_cron_helpers.py +++ b/app/views/elastic_cron_helpers.py @@ -6,7 +6,7 @@ """ from app.models.event import Event -from app.models.search.sync import rebuild_indices, sync_event_from_database +from app.models.search.sync import rebuild_indices, sync_event_from_database, sync from app.views.celery_ import celery from app.views.elastic_search import connect_from_config from app.views.postgres import get_session_from_config @@ -25,5 +25,4 @@ def cron_rebuild_events_elasticsearch(): def sync_events_elasticsearch(): """Sync all newly created, updated or deleted events""" - elastic = connect_from_config() - elastic.sync() + sync() diff --git a/create_db.py b/create_db.py index 7959d30489..36a195471c 100644 --- a/create_db.py +++ b/create_db.py @@ -24,8 +24,8 @@ def create_default_user(email, password): ask_password = True while ask_password: password = getpass.getpass("Enter password for super_admin : ") - if len(password) < 4: - print('\nPassword should have minimum 4 characters') + if len(password) < 8: + print('\nPassword should have minimum 8 characters') continue repassword = getpass.getpass("Enter your password again to confirm : ") if password != repassword: diff --git a/docker-compose.yml b/docker-compose.yml index c29ea17ca6..a7f0ebf4e3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ version: '3.5' x-environment-vars: &environment-vars - DATABASE_URL: postgresql://open_event_user:opev_pass@postgres:5432/open_event + DATABASE_URL: postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB} ELASTICSEARCH_HOST: elastic:9200 REDIS_URL: redis://redis:6379/0 ADMIN_EMAIL: "@{ADMIN_EMAIL}" @@ -32,9 +32,9 @@ services: volumes: - pg:/var/lib/postgresql/data environment: - POSTGRES_USER: open_event_user - POSTGRES_PASSWORD: opev_pass - POSTGRES_DB: open_event + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + POSTGRES_DB: ${POSTGRES_DB} redis: image: redis:3-alpine diff --git a/docs/api/api_blueprint.apib b/docs/api/api_blueprint.apib index 4b99916462..fc5cde3258 100644 --- a/docs/api/api_blueprint.apib +++ b/docs/api/api_blueprint.apib @@ -196,7 +196,14 @@ Get a list of Users. "avatar-url": "http://example.com/example.png", "twitter-url": "http://twitter.com/twitter", "google-plus-url": "http://plus.google.com/plus.google", - "facebook-id": "12345678" + "facebook-id": "12345678", + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "2", @@ -375,7 +382,14 @@ Create a new user using an email, password and an optional name. "avatar-url": "http://example.com/example.png", "twitter-url": "http://twitter.com/twitter", "google-plus-url": "http://plus.google.com/plus.google", - "facebook-id": null + "facebook-id": null, + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "2", @@ -530,7 +544,14 @@ Get a single user. "avatar-url": "http://example.com/example.png", "twitter-url": "http://twitter.com/twitter", "google-plus-url": "http://plus.google.com/plus.google", - "facebook-id": "123456" + "facebook-id": "123456", + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "2", @@ -708,7 +729,14 @@ Authorized user should be same as user in request body or must be admin. "avatar-url": "http://example.com/example.png", "twitter-url": "http://twitter.com/twitter", "google-plus-url": "http://plus.google.com/plus.google", - "facebook-id": "123456" + "facebook-id": "123456", + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "2", @@ -836,7 +864,14 @@ Get the details of the user. "deleted-at": null, "small-image-url": null, "details": null, - "facebook-id": "123456" + "facebook-id": "123456", + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "1", @@ -948,7 +983,14 @@ Get the details of the user. "deleted-at": null, "small-image-url": null, "details": null, - "facebook-id": "123456" + "facebook-id": "123456", + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "1", @@ -1060,7 +1102,14 @@ Get the details of the user. "deleted-at": null, "small-image-url": null, "details": null, - "facebook-id": "123456" + "facebook-id": "123456", + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "1", @@ -1172,7 +1221,14 @@ Get the details of the user. "deleted-at": null, "small-image-url": null, "details": null, - "facebook-id": "123456" + "facebook-id": "123456", + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "1", @@ -1284,7 +1340,14 @@ Get the details of the user. "deleted-at": null, "small-image-url": null, "details": null, - "facebook-id": "123456" + "facebook-id": "123456", + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "1", @@ -1396,7 +1459,14 @@ Get the details of the user. "deleted-at": null, "small-image-url": null, "details": null, - "facebook-id": "123456" + "facebook-id": "123456", + "was-registered-with-order": false, + "is-user-organizer": false, + "is-user-coorganizer": false, + "is-user-track-organizer": false, + "is-user-moderator": false, + "is-user-registrar": false, + "is-user-attendee": false }, "type": "user", "id": "1", @@ -3214,8 +3284,8 @@ Create a new event using a `name`, `starts-at`, `ends-at`, `timezone` and an opt "attributes": { "name": "example", "external-event-url": "http://example.com", - "starts-at": "2016-12-13T23:59:59.123456+00:00", - "ends-at": "2016-12-14T23:59:59.123456+00:00", + "starts-at": "2099-12-13T23:59:59.123456+00:00", + "ends-at": "2099-12-14T23:59:59.123456+00:00", "timezone": "UTC", "latitude": "1.23456789", "longitude": "1.23456789", @@ -3762,8 +3832,8 @@ All other fields are optional. Add attributes you want to modify. "attributes": { "name": "example1", "external-event-url": "http://example11.com", - "starts-at": "2016-12-14T23:59:59.123456+05:30", - "ends-at": "2016-12-15T23:59:59.123456+05:30", + "starts-at": "2099-12-14T23:59:59.123456+05:30", + "ends-at": "2099-12-15T23:59:59.123456+05:30", "timezone": "Asia/Kolkata", "latitude": "12.23456789", "longitude": "12.23456789", @@ -11235,8 +11305,8 @@ Create a new session using an event_id and track_id **(Minimum Co-Organizer Acce "short-abstract": "Short Abstract", "long-abstract": "The Long Abstract", "comments": "Comment", - "starts-at": "2017-06-01T10:00:00.500127+00:00", - "ends-at": "2017-06-01T11:00:00.500127+00:00", + "starts-at": "2099-06-01T10:00:00.500127+00:00", + "ends-at": "2099-06-01T11:00:00.500127+00:00", "language": "English", "slides-url": "http://example.com/example", "video-url": "http://example.com/example", @@ -11436,8 +11506,8 @@ Update a single session by `id`. **(Minimum Co-Organizer Access)** "attributes": { "title": "Micropython Session", "level": "1", - "starts-at": "2017-06-01T10:00:00.500127+00:00", - "ends-at": "2017-06-01T11:00:00.500127+00:00", + "starts-at": "2099-06-01T10:00:00.500127+00:00", + "ends-at": "2099-06-01T11:00:00.500127+00:00", "created-at": "2017-05-01T01:24:47.500127+00:00", "is-mail-sent": false }, @@ -17209,16 +17279,16 @@ Get a list of Discount Codes. }, "attributes": { "code": "DC101", - "valid-from": "2017-06-18T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", "min-quantity": 0, - "created-at": "2017-08-05T23:38:04.449623+00:00", + "created-at": "2099-08-05T23:38:04.449623+00:00", "tickets-number": 404, "value": 100, "max-quantity": 100, "is-active": false, "used-for": "event", "deleted-at": null, - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "type": "amount", "discount-url": "https://my-discount-url.com" }, @@ -17245,16 +17315,16 @@ Get a list of Discount Codes. }, "attributes": { "code": "DC101", - "valid-from": "2017-06-18T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", "min-quantity": 0, - "created-at": "2017-08-23T14:07:33.175725+00:00", + "created-at": "2099-08-23T14:07:33.175725+00:00", "tickets-number": 404, "value": 100, "max-quantity": 100, "is-active": false, "used-for": "ticket", "deleted-at": null, - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "type": "amount", "discount-url": "https://my-discount-url.com" }, @@ -17296,8 +17366,8 @@ Create a new Discount Code for event. (Only Admin) "tickets-number": "404", "min-quantity": "0", "max-quantity": "100", - "valid-from": "2017-06-18T18:30:00+00:00", - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "used-for": "event" }, "type": "discount-code", @@ -17389,8 +17459,8 @@ Create a new Discount Code for event. (Only by Co-organizers) "tickets-number": "404", "min-quantity": "0", "max-quantity": "100", - "valid-from": "2017-06-18T18:30:00+00:00", - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "used-for": "ticket" }, "type": "discount-code", @@ -17499,16 +17569,16 @@ Get a list of Discount Codes. }, "attributes": { "code": "DC101", - "valid-from": "2017-06-18T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", "min-quantity": 0, - "created-at": "2017-06-20T09:59:23.740772+00:00", + "created-at": "2099-06-20T09:59:23.740772+00:00", "tickets-number": 404, "value": 100, "max-quantity": 100, "is-active": false, "used-for": "ticket", "deleted-at": null, - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "type": "amount", "discount-url": "https://my-discount-url.com" }, @@ -17563,16 +17633,16 @@ Get a single discount code. }, "attributes": { "code": "DC101", - "valid-from": "2017-06-18T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", "min-quantity": 0, - "created-at": "2017-06-27T10:43:20.840012+00:00", + "created-at": "2099-06-27T10:43:20.840012+00:00", "tickets-number": 404, "value": 100, "max-quantity": 100, "is-active": false, "used-for": "ticket", "deleted-at": null, - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "type": "amount", "discount-url": "https://my-discount-url.com" }, @@ -17615,8 +17685,8 @@ Update a single discount code with `id` (Check permission to edit). "tickets-number": "404", "min-quantity": "0", "max-quantity": "100", - "valid-from": "2017-06-18T18:30:00+00:00", - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "used-for": "ticket" }, "type": "discount-code", @@ -17728,16 +17798,16 @@ Get a single discount code using a code. }, "attributes": { "code": "DC101", - "valid-from": "2017-06-18T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", "min-quantity": 0, - "created-at": "2017-08-05T23:38:04.449623+00:00", + "created-at": "2099-08-05T23:38:04.449623+00:00", "tickets-number": 404, "value": 100, "max-quantity": 100, "is-active": false, "used-for": "event", "deleted-at": null, - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "type": "amount", "discount-url": "https://my-discount-url.com" }, @@ -17799,16 +17869,16 @@ Get a list of Discount Codes. }, "attributes": { "code": "DC101", - "valid-from": "2017-06-18T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", "min-quantity": 0, - "created-at": "2017-08-05T23:38:04.449623+00:00", + "created-at": "2099-08-05T23:38:04.449623+00:00", "tickets-number": 404, "value": 100, "max-quantity": 100, "is-active": false, "used-for": "event", "deleted-at": null, - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "type": "amount", "discount-url": "https://my-discount-url.com" }, @@ -17874,7 +17944,7 @@ Get a list of Discount Codes applied on a given ticket. }, "attributes": { "value": 100, - "created-at": "2018-06-26T16:03:33.494021+00:00", + "created-at": "2099-06-26T16:03:33.494021+00:00", "tickets-number": 404, "code": "DC101", "used-for": "ticket", @@ -17883,8 +17953,8 @@ Get a list of Discount Codes applied on a given ticket. "discount-url": "https://my-discount-url.com", "type": "amount", "min-quantity": 0, - "valid-from": "2017-06-18T18:30:00+00:00", - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "deleted-at": null }, "id": 1, @@ -17939,16 +18009,16 @@ Get a single discount code. }, "attributes": { "code": "DC101", - "valid-from": "2017-06-18T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", "min-quantity": 0, - "created-at": "2017-08-05T23:38:04.449623+00:00", + "created-at": "2099-08-05T23:38:04.449623+00:00", "tickets-number": 404, "value": 100, "max-quantity": 100, "is-active": false, "used-for": "event", "deleted-at": null, - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "type": "amount", "discount-url": "https://my-discount-url.com" }, @@ -18002,16 +18072,16 @@ Get a single discount code. (Requires admin access) }, "attributes": { "code": "DC101", - "valid-from": "2017-06-18T18:30:00+00:00", + "valid-from": "2099-06-18T18:30:00+00:00", "min-quantity": 0, - "created-at": "2017-08-05T23:38:04.449623+00:00", + "created-at": "2099-08-05T23:38:04.449623+00:00", "tickets-number": 404, "value": 100, "max-quantity": 100, "is-active": false, "used-for": "event", "deleted-at": null, - "valid-till": "2017-06-24T18:30:00+00:00", + "valid-till": "2099-06-24T18:30:00+00:00", "type": "amount", "discount-url": "https://my-discount-url.com" }, @@ -20699,8 +20769,6 @@ Get a single FAQ. Accept: application/vnd.api+json - Authorization: JWT - + Response 200 (application/vnd.api+json) { @@ -21324,8 +21392,6 @@ Get a single feedback. Accept: application/vnd.api+json - Authorization: JWT - + Response 200 (application/vnd.api+json) { @@ -23300,6 +23366,9 @@ Get the task status and the final result of the task as response | `state` | If the custom form is included | boolean | - | | `country` | Country of the purchaser | boolean | - | | `zipcode` | Zipcode of the address | string | - | +| `company` | Company for Billing Info | string | - | +| `tax-business-info` | Tax Business details for Billing Info | string | - | +| `is-billing-enabled` | Yes/No to signify if Billing Info is enabled or not | boolean (default: `false`) | - | | `payment-mode` | Mode of payment (free,stripe,paypal) | string | - | | `status` | Status of the order(pending, completed, placed, cancelled, expired) | string (default=pending) | - | | `discount_code_id` | ID of the discount code | string | - | @@ -23473,6 +23542,9 @@ Create a new Order "discount-code-id": null, "brand": null, "zipcode": null, + "company": null, + "tax-business-info": null, + "is-billing-enabled": false, "payment-mode": "paypal", "last4": null, "state": null, @@ -23564,6 +23636,9 @@ Get a single Order detail. "discount-code-id": null, "brand": null, "zipcode": null, + "company": null, + "tax-business-info": null, + "is-billing-enabled": false, "payment-mode": "paypal", "payment-url": "https://www.sandbox.paypal.com/cgi-bin/webscr?token=EC-62113287PK247472B&cmd=_express-checkout", "last4": null, @@ -23666,6 +23741,9 @@ Update a single custom form with `id`. "discount-code-id": null, "brand": null, "zipcode": null, + "company": null, + "tax-business-info": null, + "is-billing-enabled": false, "payment-mode": "paypal", "payment-url": "https://www.sandbox.paypal.com/cgi-bin/webscr?token=EC-62113287PK247472B&cmd=_express-checkout", "last4": null, @@ -23823,6 +23901,9 @@ Create a new Order with on site attendees "discount-code-id": null, "brand": null, "zipcode": null, + "company": null, + "tax-business-info": null, + "is-billing-enabled": false, "payment-mode": "free", "last4": null, "state": null, @@ -23988,6 +24069,9 @@ Receive payments for an order "country": "India", "identifier": "070abac6-44a7-423d-830f-f5f0ef4e83f2", "zipcode": null, + "company": null, + "tax-business-info": null, + "is-billing-enabled": false, "discount-code-id": null, "tickets-pdf-url": "https://example.com/media/attendees/tickets/pdf/order_identifier.pdf" } @@ -24097,8 +24181,6 @@ Create paypal payment + filter(optional, string, ``) - Filter according to the flask-rest-jsonapi filtering system. Please refer: http://flask-rest-jsonapi.readthedocs.io/en/latest/filtering.html for more. -The sales object contains properties placed, completed and pending properties which are objects that contain the count on total sales and tickets - ### Show Sales by Events [GET] + Request @@ -25338,6 +25420,7 @@ Get the details of the panel permission. } } + # Group Favourite Events This Group's APIs can be used for adding a particular event to the favourite list of the user. @@ -25517,3 +25600,63 @@ This Group's APIs can be used for adding a particular event to the favourite lis "version": "1.0" } } + + +# Group Import Jobs + +This Group's APIs are used for getting info of import jobs + +## Import Jobs Collection [/v1/import-jobs{?page%5bsize%5d,page%5bnumber%5d,sort,filter}] + +### List All Import Jobs [GET] +Get a list of all import jobs + ++ Request + + + Headers + + Accept: application/vnd.api+json + + Authorization: JWT + ++ Response 200 (application/vnd.api+json) + + { + "data": [ + { + "type": "import-job", + "attributes": { + "starts-at": "2019-03-26T07:34:23.061153+00:00", + "result": "1", + "task": "a8e8b899-7537-4d09-bcd5-fd250a3efc06", + "result-status": "SUCCESS" + }, + "id": "1", + "links": { + "self": "/v1/import-jobs/1" + } + }, + { + "type": "import-job", + "attributes": { + "starts-at": "2019-03-26T07:47:31.285576+00:00", + "result": "2", + "task": "788ae813-1fcf-4c47-ab4a-80ff33abd097", + "result-status": "SUCCESS" + }, + "id": "2", + "links": { + "self": "/v1/import-jobs/2" + } + } + ], + "links": { + "self": "/v1/import-jobs" + }, + "meta": { + "count": 2 + }, + "jsonapi": { + "version": "1.0" + } + } diff --git a/docs/installation/basic.md b/docs/installation/basic.md index f36b24978c..4e107ad091 100644 --- a/docs/installation/basic.md +++ b/docs/installation/basic.md @@ -32,8 +32,8 @@ sudo -u postgres psql * When inside psql, create a user for open-event and then using the user create the database. ```sql -CREATE USER john WITH PASSWORD 'start'; -CREATE DATABASE oevent WITH OWNER john; +CREATE USER open_event_user WITH PASSWORD 'opev_pass'; +CREATE DATABASE oevent WITH OWNER open_event_user; ``` * Once database is created, exit the psql shell with `\q` followed by ENTER. diff --git a/docs/installation/local.md b/docs/installation/local.md index cafe601790..5cac281494 100644 --- a/docs/installation/local.md +++ b/docs/installation/local.md @@ -45,14 +45,11 @@ hint: You may need to upgrade your pip version and install following packages if # Installation in Virtual Environment -```sh -virtualenv -p python3 venv -source venv/bin/activate -pip3 install -r requirements.txt -``` +You can use either **pip** or **pipenv** to install Open Event Server in a virtual environment. -```sh +Firstly, open a terminal and enter +```sh # For linux users sudo apt-get install python3-dev sudo apt-get install libpq-dev @@ -63,6 +60,38 @@ brew install python@3 brew install libmagic ``` +## Using pip and virtualenv + +Open a terminal and enter the following commands to setup a virtual environment + +```sh +virtualenv -p python3 venv +. venv/bin/activate +``` + +Now to install the dependencies using pip, type + +```sh +pip3 install -r requirements.txt +``` + +## Using pipenv + +Using pipenv, you will not need to set up virtualenv. It will do it automatically for you + +To setup a virtual environment and install the dependices, enter in a terminal + +```sh +pipenv install +``` + +Now to activate the virtual environemnt, type + +```sh +pipenv shell +``` + + * **Step 2** - Create the database. For that we first open the psql shell. Go the directory where your postgres file is stored. ```sh @@ -73,16 +102,17 @@ sudo -u postgres psql psql -d postgres ``` -* When inside psql, create a user for open-event and then using the user create the database. +* When inside psql, create a user for open-event and then using the user create the database. Also, create a test database named opev_test for the test suites by dumping the oevent database into it. without this, the tests will not run locally. For ease of development, you should create Postgres user with the same username as your OS account. If your OS login account is _john_, for example, you should create _john_ user in Postgres. By this, you can skip entering password when using database. ```sql -CREATE USER john WITH PASSWORD 'start'; -CREATE DATABASE oevent WITH OWNER john; +CREATE USER open_event_user WITH PASSWORD 'opev_pass'; +CREATE DATABASE oevent WITH OWNER open_event_user; +CREATE DATABASE opev_test WITH OWNER open_event_user; ``` -* Once database is created, exit the psql shell with `\q` followed by ENTER. +* Once the databases are created, exit the psql shell with `\q` followed by ENTER. * **Step 3** - Create application environment variables. @@ -123,7 +153,7 @@ python3 manage.py db stamp head # For Ubuntu, Debian and alike sudo apt-get install redis-server # For Fedora, RedHat, CentOS -sudo dnf install redis-server +sudo dnf install redis # For macOS brew install redis diff --git a/manage.py b/manage.py index f2fee2a918..a69ed2db0e 100644 --- a/manage.py +++ b/manage.py @@ -79,16 +79,18 @@ def initialize_db(credentials): print("[LOG] Could not create tables. Either database does not exist or tables already created") if populate_data: credentials = credentials.split(":") - create_super_admin(credentials[0], credentials[1]) + admin_email = os.environ.get('SUPER_ADMIN_EMAIL', credentials[0]) + admin_password = os.environ.get('SUPER_ADMIN_PASSWORD', credentials[1]) + create_super_admin(admin_email, admin_password) populate() else: print("[LOG] Tables already exist. Skipping data population & creation.") @manager.command -def prepare_kubernetes_db(): +def prepare_kubernetes_db(credentials='open_event_test_user@fossasia.org:fossasia'): with app.app_context(): - initialize_db('open_event_test_user@fossasia.org:fossasia') + initialize_db(credentials) if __name__ == "__main__": diff --git a/migrations/versions/35f427e85075_.py b/migrations/versions/35f427e85075_.py new file mode 100644 index 0000000000..e740b681a6 --- /dev/null +++ b/migrations/versions/35f427e85075_.py @@ -0,0 +1,36 @@ +"""empty message + +Revision ID: 35f427e85075 +Revises: 565ea5bc3937 +Create Date: 2019-03-20 21:25:31.304161 + +""" + +from alembic import op +import sqlalchemy as sa +import sqlalchemy_utils +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '35f427e85075' +down_revision = '565ea5bc3937' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('sessions_speakers_links', + sa.Column('deleted_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), + sa.Column('id', sa.INTEGER(), nullable=False), + sa.Column('event_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('session_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('speaker_id', sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column('session_state', sa.VARCHAR(), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint('id', name='sessions_speakers_links_pkey') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('sessions_speakers_links') + # ### end Alembic commands ### diff --git a/migrations/versions/4defe72abed3_.py b/migrations/versions/4defe72abed3_.py index f3b4c87e6c..04e61ae940 100644 --- a/migrations/versions/4defe72abed3_.py +++ b/migrations/versions/4defe72abed3_.py @@ -25,7 +25,7 @@ def upgrade(): op.execute("""UPDATE events SET state = 'draft' WHERE state != 'published'""") op.execute("""UPDATE sessions SET state = LOWER(state)""") op.execute("""UPDATE sessions SET state = 'draft' WHERE state not in - ('accepted', 'pending', 'approved', 'rejected')""") + ('accepted', 'pending', 'confirmed', 'rejected')""") # ### end Alembic commands ### diff --git a/migrations/versions/5551af72812f_.py b/migrations/versions/5551af72812f_.py new file mode 100644 index 0000000000..eda70312e5 --- /dev/null +++ b/migrations/versions/5551af72812f_.py @@ -0,0 +1,30 @@ +"""empty message + +Revision ID: 5551af72812f +Revises: 7bb5891a9f2e +Create Date: 2019-04-04 23:16:27.618573 + +""" + +from alembic import op +import sqlalchemy as sa +import sqlalchemy_utils + + +# revision identifiers, used by Alembic. +revision = '5551af72812f' +down_revision = '7bb5891a9f2e' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('access_codes', sa.Column('ticket_id', sa.Integer(), nullable=True)) + op.create_foreign_key(None, 'access_codes', 'tickets', ['ticket_id'], ['id'], ondelete='CASCADE') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(None, 'access_codes', type_='foreignkey') + op.drop_column('access_codes', 'ticket_id') + # ### end Alembic commands ### diff --git a/migrations/versions/565ea5bc3937_.py b/migrations/versions/565ea5bc3937_.py new file mode 100644 index 0000000000..b196a51a12 --- /dev/null +++ b/migrations/versions/565ea5bc3937_.py @@ -0,0 +1,27 @@ +"""empty message + +Revision ID: 565ea5bc3937 +Revises: 91ee86a38001 +Create Date: 2019-02-18 15:50:18.809478 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '565ea5bc3937' +down_revision = '91ee86a38001' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('orders', sa.Column('is_billing_enabled', sa.Boolean(), server_default='False')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('orders', 'is_billing_enabled') + # ### end Alembic commands ### diff --git a/migrations/versions/6e5c574cbfb8_.py b/migrations/versions/6e5c574cbfb8_.py new file mode 100644 index 0000000000..81b25e6f03 --- /dev/null +++ b/migrations/versions/6e5c574cbfb8_.py @@ -0,0 +1,34 @@ +"""empty message + +Revision ID: 6e5c574cbfb8 +Revises: 35f427e85075 +Create Date: 2019-03-24 11:09:42.707206 + +""" + +from alembic import op +import sqlalchemy as sa +import sqlalchemy_utils +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '6e5c574cbfb8' +down_revision = '35f427e85075' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute("UPDATE orders SET amount = 0 WHERE amount is NULL", execution_options=None) + op.alter_column('orders', 'amount', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column('orders', 'amount', + existing_type=postgresql.DOUBLE_PRECISION(precision=53), + nullable=True) + # ### end Alembic commands ### diff --git a/migrations/versions/6f7b6fad3f53_add_can_pay_by_omise_in_event_model.py b/migrations/versions/6f7b6fad3f53_add_can_pay_by_omise_in_event_model.py new file mode 100644 index 0000000000..81c7e018f0 --- /dev/null +++ b/migrations/versions/6f7b6fad3f53_add_can_pay_by_omise_in_event_model.py @@ -0,0 +1,30 @@ +"""Add can_pay_by_omise in event model + +Revision ID: 6f7b6fad3f53 +Revises: a0532f339abb +Create Date: 2019-05-07 16:14:57.129985 + +""" + +from alembic import op +import sqlalchemy as sa +import sqlalchemy_utils + + +# revision identifiers, used by Alembic. +revision = '6f7b6fad3f53' +down_revision = 'a0532f339abb' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('events', sa.Column('can_pay_by_omise', sa.Boolean(), nullable=True)) + op.add_column('events_version', sa.Column('can_pay_by_omise', sa.Boolean(), autoincrement=False, nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('events_version', 'can_pay_by_omise') + op.drop_column('events', 'can_pay_by_omise') + # ### end Alembic commands ### diff --git a/migrations/versions/7bb5891a9f2e_lowercase_email.py b/migrations/versions/7bb5891a9f2e_lowercase_email.py new file mode 100644 index 0000000000..edbbec9143 --- /dev/null +++ b/migrations/versions/7bb5891a9f2e_lowercase_email.py @@ -0,0 +1,35 @@ +"""lowercase_email + +Revision ID: 7bb5891a9f2e +Revises: 6e5c574cbfb8 +Create Date: 2019-03-30 12:51:48.134800 + +""" + +from alembic import op +from sqlalchemy import func +import sqlalchemy as sa +import sqlalchemy_utils + + +# revision identifiers, used by Alembic. +revision = '7bb5891a9f2e' +down_revision = '6e5c574cbfb8' + + +def upgrade(): + op.execute("UPDATE users SET deleted_at = current_timestamp, _email = concat(_email, '_') where _email not in (SELECT DISTINCT ON (upper(_email)) _email FROM users);", + execution_options=None) + op.execute("create extension citext;", + execution_options=None) + op.execute("alter table users alter column _email type citext;", + execution_options=None) + + +def downgrade(): + op.execute("alter table users alter column _email type text;", + execution_options=None) + op.execute("UPDATE users SET deleted_at = null, _email = left(_email, length(_email)-1) where right(_email, 1) = '_';", + execution_options=None) + op.execute("drop extension citext;", + execution_options=None) diff --git a/migrations/versions/91ee86a38001_.py b/migrations/versions/91ee86a38001_.py new file mode 100644 index 0000000000..612ba80342 --- /dev/null +++ b/migrations/versions/91ee86a38001_.py @@ -0,0 +1,29 @@ +"""empty message + +Revision ID: 91ee86a38001 +Revises: 41818fe31207 +Create Date: 2019-01-25 13:40:31.819688 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '91ee86a38001' +down_revision = 'e3caa0f2a16c' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('orders', sa.Column('company', sa.String(), nullable=True)) + op.add_column('orders', sa.Column('tax_business_info', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('orders', 'tax_business_info') + op.drop_column('orders', 'company') + # ### end Alembic commands ### diff --git a/migrations/versions/a0532f339abb_.py b/migrations/versions/a0532f339abb_.py new file mode 100644 index 0000000000..1a98fc8640 --- /dev/null +++ b/migrations/versions/a0532f339abb_.py @@ -0,0 +1,35 @@ +"""empty message + +Revision ID: a0532f339abb +Revises: e59e7a75f679 +Create Date: 2019-05-05 02:29:08.380691 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'a0532f339abb' +down_revision = 'e59e7a75f679' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('settings', sa.Column('omise_live_public', sa.String(), nullable=True)) + op.add_column('settings', sa.Column('omise_live_secret', sa.String(), nullable=True)) + op.add_column('settings', sa.Column('omise_mode', sa.String(), nullable=True)) + op.add_column('settings', sa.Column('omise_test_public', sa.String(), nullable=True)) + op.add_column('settings', sa.Column('omise_test_secret', sa.String(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('settings', 'omise_test_secret') + op.drop_column('settings', 'omise_test_public') + op.drop_column('settings', 'omise_mode') + op.drop_column('settings', 'omise_live_secret') + op.drop_column('settings', 'omise_live_public') + # ### end Alembic commands ### diff --git a/migrations/versions/e3caa0f2a16c_.py b/migrations/versions/e3caa0f2a16c_.py new file mode 100644 index 0000000000..52422f819c --- /dev/null +++ b/migrations/versions/e3caa0f2a16c_.py @@ -0,0 +1,25 @@ +"""empty message + +Revision ID: e3caa0f2a16c +Revises: 41818fe31207 +Create Date: 2019-01-30 02:32:10.365941 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e3caa0f2a16c' +down_revision = '41818fe31207' + + +def upgrade(): + op.add_column('users', sa.Column('was_registered_with_order', sa.Boolean(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + op.drop_column('users', 'was_registered_with_order') + # ### end Alembic commands ### diff --git a/migrations/versions/e59e7a75f679_.py b/migrations/versions/e59e7a75f679_.py new file mode 100644 index 0000000000..1004e45c68 --- /dev/null +++ b/migrations/versions/e59e7a75f679_.py @@ -0,0 +1,30 @@ +"""empty message + +Revision ID: e59e7a75f679 +Revises: 5551af72812f +Create Date: 2019-04-19 13:27:05.212985 + +""" + +from alembic import op +import sqlalchemy as sa +import sqlalchemy_utils + + +# revision identifiers, used by Alembic. +revision = 'e59e7a75f679' +down_revision = '5551af72812f' + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('events', sa.Column('is_featured', sa.Boolean(), server_default='False', nullable=False)) + op.add_column('events_version', sa.Column('is_featured', sa.Boolean(), server_default='False', autoincrement=False, nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('events_version', 'is_featured') + op.drop_column('events', 'is_featured') + # ### end Alembic commands ### diff --git a/populate_db.py b/populate_db.py index 8657a4c83c..d24ab2702b 100644 --- a/populate_db.py +++ b/populate_db.py @@ -1,6 +1,6 @@ from app import current_app from app.models import db -from app.api.helpers.db import get_or_create # , save_to_db +from app.api.helpers.db import get_or_create, save_to_db # , save_to_db from envparse import env # Admin message settings @@ -40,6 +40,9 @@ # EventLocation from app.models.event_location import EventLocation +# Custom Placeholder +from app.models.custom_placeholder import CustomPlaceholder + # User Permissions from app.models.user_permission import UserPermission SALES = 'sales' @@ -111,8 +114,7 @@ def create_settings(): setting.tw_consumer_secret = tw_consumer_secret setting.in_client_id = in_client_id setting.in_client_secret = in_client_secret - db.session.add(setting) - db.session.commit() + save_to_db(setting) def create_event_image_sizes(): @@ -166,7 +168,7 @@ def create_event_sub_topics(): "Music": ["Cultural", "Pop", "Top 40", "EDM / Electronic", "R&B", "Other", "Classical"], "Performing & Visual Arts": ["Craft", "Comedy", "Fine Art", "Orchestra"], "Family & Education": ["Education", "Baby", "Reunion"], - "Business & Professional": ["Career", "Startups & Small Business", "Educators", "Design", "Finance"], + "Business & Professional": ["Career", "Startups & Small Business", "Educators", "Design", "Finance"], "Charity & Causes": ["Education", "Other", "Environment"], "Hobbies & Special Interest": ["Other", "Anime/Comics"], "Seasonal & Holiday": ["Easter", "Other"], @@ -174,7 +176,7 @@ def create_event_sub_topics(): "Religion & Spirituality": ["Mysticism and Occult"], "Government & Politics": ["Non-partisan"] } - eventopics=db.session.query(EventTopic).all() + eventopics = db.session.query(EventTopic).all() for keysub_topic in event_sub_topic: for subtopic in event_sub_topic[keysub_topic]: get_or_create(EventSubTopic, name=subtopic, event_topic_id=next(x for x in eventopics if x.name==keysub_topic).id) @@ -311,6 +313,15 @@ def create_admin_message_settings(): ) +def create_custom_placeholders(): + custom_placeholder, _ = get_or_create( + CustomPlaceholder, name='Hills', + original_image_url='https://www.w3schools.com/html/pic_mountain.jpg', + event_sub_topic_id=1 + ) + db.session.add(custom_placeholder) + + def populate(): """ Create defined Roles, Services and Permissions. @@ -347,6 +358,10 @@ def populate(): create_event_locations() print('Creating admin message settings...') create_admin_message_settings() + print('Creating custom placeholders...') + create_custom_placeholders() + + db.session.commit() def populate_without_print(): @@ -369,6 +384,7 @@ def populate_without_print(): create_event_types() create_event_locations() create_admin_message_settings() + create_custom_placeholders() db.session.commit() diff --git a/requirements/common.txt b/requirements/common.txt index 676034c23d..de04e3fbd8 100644 --- a/requirements/common.txt +++ b/requirements/common.txt @@ -8,7 +8,7 @@ Flask-Scrypt>=0.1.3.6,<0.2 Flask-JWT>=0.3.2,<0.4 requests-oauthlib>=0.7.0,<1 icalendar>=3.11,<4 -requests[security]>=2.12.4,<3 +requests[security]>=2.20.0,<3 psycopg2-binary SQLAlchemy-Utils>=0.32.12,<0.33 itsdangerous>=0.24,<0.30 diff --git a/scripts/test_multiple_heads.sh b/scripts/test_multiple_heads.sh index 0eee5dd77b..14f926ff7e 100644 --- a/scripts/test_multiple_heads.sh +++ b/scripts/test_multiple_heads.sh @@ -1,4 +1,4 @@ -lines=`python3 manage.py db heads | wc | awk '{print $1}'` +lines=`python3 manage.py db heads | grep -c "head" | wc | awk '{print $1}'` if [ $lines -ne 1 ] then echo "Error: Multiple Migration Heads" diff --git a/tests/all/integration/api/helpers/test_auth.py b/tests/all/integration/api/helpers/test_auth.py new file mode 100644 index 0000000000..3c01356c02 --- /dev/null +++ b/tests/all/integration/api/helpers/test_auth.py @@ -0,0 +1,49 @@ +from app import current_app as app +from tests.all.integration.auth_helper import create_user +from tests.all.integration.utils import OpenEventTestCase +from app.api.helpers import auth +from tests.all.integration.setup_database import Setup +from app.models import db +from app.models.user import User + +from flask_login import login_user, logout_user +import unittest + + +class TestAuthentication(OpenEventTestCase): + def setUp(self): + self.app = Setup.create_app() + + def test_load_user(self): + """Method to test the registered user details""" + + with app.test_request_context(): + auth_manager = auth.AuthManager() + auth_manager.init_login(app) + user = create_user(email='authtest@gmail.com', password='password') + self.assertEqual(user, db.session.query(User).get(user.id)) + + def test_verified_user(self): + """Method to test if user is verified""" + + with app.test_request_context(): + auth_manager = auth.AuthManager() + auth_manager.init_login(app) + user = create_user(email='authtest@gmail.com', password='password') + user.is_verified = False + login_user(user) + self.assertEqual(auth_manager.is_verified_user(), False) + + def test_is_accessible(self): + """Method to test if user is accessible(authenticated)""" + + with app.test_request_context(): + auth_manager = auth.AuthManager() + auth_manager.init_login(app) + user = create_user(email='test@test.com', password='password') + login_user(user) + logout_user() + self.assertEqual(auth_manager.is_accessible(), False) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/all/integration/api/helpers/test_csv_jobs_util.py b/tests/all/integration/api/helpers/test_csv_jobs_util.py new file mode 100644 index 0000000000..f63350247d --- /dev/null +++ b/tests/all/integration/api/helpers/test_csv_jobs_util.py @@ -0,0 +1,62 @@ +from app import current_app as app +from tests.all.integration.auth_helper import create_user +from tests.all.integration.utils import OpenEventTestCase +from tests.all.integration.setup_database import Setup +from app.api.helpers.csv_jobs_util import * +from app.factories.attendee import AttendeeFactory +from app.factories.order import OrderFactory +from app.factories.session import SessionFactory +from app.factories.speaker import SpeakerFactory +from app.models import db +import app.factories.common as common + +import unittest + + +class TestExportCSV(OpenEventTestCase): + + def setUp(self): + self.app = Setup.create_app() + + def test_export_orders_csv(self): + """Method to check the orders data export""" + + with app.test_request_context(): + test_order = OrderFactory() + test_order.amount = 2 + field_data = export_orders_csv([test_order]) + self.assertEqual(field_data[1][2], 'pending') + self.assertEqual(field_data[1][4], '2') + + def test_export_attendees_csv(self): + """Method to check the attendees data export""" + + with app.test_request_context(): + test_attendee = AttendeeFactory() + field_data = export_attendees_csv([test_attendee]) + self.assertEqual(field_data[1][3], common.string_) + self.assertEqual(field_data[1][5], 'user0@example.com') + + def test_export_sessions_csv(self): + """Method to check sessions data export""" + + with app.test_request_context(): + test_session = SessionFactory() + field_data = export_sessions_csv([test_session]) + self.assertEqual(field_data[1][6], common.int_) + self.assertEqual(field_data[1][7], 'accepted') + + def test_export_speakers_csv(self): + """Method to check speakers data export""" + + with app.test_request_context(): + test_speaker = SpeakerFactory() + user = create_user(email='export@example.com', password='password') + user.id = 2 + field_data = export_speakers_csv([test_speaker]) + self.assertEqual(field_data[1][0], common.string_) + self.assertEqual(field_data[1][1], 'user0@example.com') + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/all/integration/api/helpers/test_db.py b/tests/all/integration/api/helpers/test_db.py index abf8ace874..4c46f8a446 100644 --- a/tests/all/integration/api/helpers/test_db.py +++ b/tests/all/integration/api/helpers/test_db.py @@ -30,8 +30,7 @@ def test_safe_query(self): with app.test_request_context(): event = EventFactoryBasic() - db.session.add(event) - db.session.commit() + save_to_db(event) obj = safe_query(db, Event, 'id', event.id, 'event_id') self.assertEqual(obj.name, event.name) diff --git a/tests/all/integration/api/helpers/test_errors.py b/tests/all/integration/api/helpers/test_errors.py index 91b2db5222..333cd1b31f 100644 --- a/tests/all/integration/api/helpers/test_errors.py +++ b/tests/all/integration/api/helpers/test_errors.py @@ -1,9 +1,12 @@ import unittest +import json from tests.all.integration.utils import OpenEventTestCase -from app.api.helpers.errors import ForbiddenError, NotFoundError, ServerError, \ +from app.api.helpers.errors import ErrorResponse, ForbiddenError, NotFoundError, ServerError, \ UnprocessableEntityError, BadRequestError from tests.all.integration.setup_database import Setup +from flask_rest_jsonapi.errors import jsonapi_errors +from flask import make_response from app import current_app as app @@ -11,8 +14,18 @@ class TestErrorsHelperValidation(OpenEventTestCase): def setUp(self): self.app = Setup.create_app() + def test_error_response_base_respond(self): + """Method to test base error response methods""" + + with app.test_request_context(): + base_error_response = ErrorResponse(source="test source", detail="test detail") + json_object = json.dumps(jsonapi_errors([base_error_response.to_dict()])) + self.assertNotEqual(base_error_response.respond(), make_response(json_object, 200, + {'Content-Type': 'application/vnd.api+json'})) + + def test_errors(self): - """Method to test the status code of all errors.""" + """Method to test the status code of all errors""" with app.test_request_context(): # Forbidden Error diff --git a/tests/all/integration/api/helpers/test_export_helpers.py b/tests/all/integration/api/helpers/test_export_helpers.py new file mode 100644 index 0000000000..aadaca11ae --- /dev/null +++ b/tests/all/integration/api/helpers/test_export_helpers.py @@ -0,0 +1,40 @@ +import unittest + +from tests.all.integration.utils import OpenEventTestCase +from tests.all.integration.auth_helper import create_user +from tests.all.integration.setup_database import Setup +from app import current_app as app +from app.api.helpers.export_helpers import create_export_job +from app.factories.export_job import ExportJobFactory +from app.factories.event import EventFactoryBasic +from app.models.export_job import ExportJob +from app.api.helpers.db import save_to_db + +from flask_login import login_user + + +class TestExportJobHelpers(OpenEventTestCase): + def setUp(self): + self.app = Setup.create_app() + + def test_create_export_job(self): + """Method to test export job before creation""" + + with app.test_request_context(): + test_related_event = EventFactoryBasic() + save_to_db(test_related_event) + test_export_job = ExportJobFactory() + save_to_db(test_export_job) + test_export_job.event = test_related_event + export_event_id = test_export_job.event.id + test_task_id = test_export_job.task + user = create_user(email='user0@example.com', password='password') + login_user(user) + create_export_job(test_task_id, export_event_id) + export_job = ExportJob.query.filter_by(event=test_related_event).first() + self.assertEqual(export_job.event.name, 'example') + self.assertEqual(export_job.user_email, 'user0@example.com') + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/all/integration/api/helpers/test_files.py b/tests/all/integration/api/helpers/test_files.py index 4fe0974b40..65ee9893c2 100644 --- a/tests/all/integration/api/helpers/test_files.py +++ b/tests/all/integration/api/helpers/test_files.py @@ -12,7 +12,7 @@ from app.api.helpers.files import uploaded_image, uploaded_file from tests.all.integration.setup_database import Setup from tests.all.integration.utils import OpenEventTestCase - +from app.api.helpers.utilities import image_link class TestFilesHelperValidation(OpenEventTestCase): def setUp(self): @@ -64,7 +64,7 @@ def upload(): with app.test_request_context(): client = app.test_client() resp = client.post('/test_upload', data={'file': (BytesIO(b'1,2,3,4'), 'test_file.csv')}) - data = json.loads(resp.data) + data = resp.get_json() file_path = data['path'] filename = data['name'] actual_file_path = app.config.get('BASE_DIR') + '/static/uploads/' + filename @@ -100,7 +100,7 @@ def upload_multi(): resp = client.post('/test_upload_multi', data={'files[]': [(BytesIO(b'1,2,3,4'), 'test_file.csv'), (BytesIO(b'10,20,30,40'), 'test_file2.csv')]}) - datas = json.loads(resp.data)['files'] + datas = resp.get_json()['files'] for data in datas: file_path = data['path'] filename = data['name'] @@ -112,7 +112,7 @@ def test_create_save_resized_image(self): """Method to test create resized images""" with app.test_request_context(): - image_url_test = 'https://cdn.pixabay.com/photo/2017/06/17/10/55/hot-air-balloon-2411851_1280.jpg' + image_url_test = image_link width = 500 height = 200 aspect_ratio = False @@ -129,7 +129,7 @@ def test_create_save_image_sizes(self): """Method to test create image sizes""" with app.test_request_context(): - image_url_test = 'https://cdn.pixabay.com/photo/2017/06/17/10/55/hot-air-balloon-2411851_1280.jpg' + image_url_test = image_link image_sizes_type = "event-image" width_large = 1300 width_thumbnail = 500 diff --git a/tests/all/integration/api/helpers/test_icalexporter.py b/tests/all/integration/api/helpers/test_icalexporter.py new file mode 100644 index 0000000000..5d0c28a004 --- /dev/null +++ b/tests/all/integration/api/helpers/test_icalexporter.py @@ -0,0 +1,27 @@ +import unittest +import icalendar +from app import current_app as app + +from tests.all.integration.setup_database import Setup +from tests.all.integration.utils import OpenEventTestCase +from app.factories.session import SessionFactory +from app.api.helpers.ICalExporter import ICalExporter + + +class TestICalExporter(OpenEventTestCase): + def setUp(self): + self.app = Setup.create_app() + + def test_export(self): + """Test to export ical format event""" + with app.test_request_context(): + test_session = SessionFactory() + icalexport_object = ICalExporter() + test_cal_str = icalexport_object.export(test_session.event_id) + test_cal = icalendar.Calendar.from_ical(test_cal_str) + self.assertEqual(test_cal['x-wr-calname'], 'example') + self.assertEqual(test_cal['x-wr-caldesc'], 'Schedule for sessions at example') + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/all/integration/api/helpers/test_jwt.py b/tests/all/integration/api/helpers/test_jwt.py index f2bf8460b4..a0fbb1f56b 100644 --- a/tests/all/integration/api/helpers/test_jwt.py +++ b/tests/all/integration/api/helpers/test_jwt.py @@ -7,6 +7,7 @@ from app.factories.event import EventFactoryBasic from app.factories.user import UserFactory from app.models import db +from app.api.helpers.db import save_to_db from tests.all.integration.setup_database import Setup from tests.all.integration.utils import OpenEventTestCase @@ -20,8 +21,7 @@ def test_jwt_authenticate(self): with app.test_request_context(): user = UserFactory() - db.session.add(user) - db.session.commit() + save_to_db(user) # Valid Authentication authenticated_user = jwt_authenticate(user.email, 'password') @@ -36,13 +36,11 @@ def test_get_identity(self): with app.test_request_context(): user = UserFactory() - db.session.add(user) - db.session.commit() + save_to_db(user) event = EventFactoryBasic() event.user_id = user.id - db.session.add(event) - db.session.commit() + save_to_db(event) # Authenticate User self.auth = {'Authorization': "JWT " + str(_default_jwt_encode_handler(user), 'utf-8')} diff --git a/tests/all/integration/api/helpers/test_log.py b/tests/all/integration/api/helpers/test_log.py new file mode 100644 index 0000000000..dc10f5f358 --- /dev/null +++ b/tests/all/integration/api/helpers/test_log.py @@ -0,0 +1,38 @@ +from tests.all.integration.setup_database import Setup +from app import current_app as app +from tests.all.integration.utils import OpenEventTestCase +from app.api.helpers.log import record_activity +from app.models.activity import Activity +from tests.all.integration.auth_helper import create_user +from app.models import db + +import unittest + + +class TestLogging(OpenEventTestCase): + def setUp(self): + self.app = Setup.create_app() + + def test_record_activity_valid_template(self): + """Test to record activity for valid template""" + with app.test_request_context(): + test_user = create_user(email="logging@test.com", password="logpass") + record_activity('create_user', login_user=test_user, user=test_user) + user_id_format = ' (' + str(test_user.id) + ')' + test_actor = test_user.email + user_id_format + self.assertTrue('User logging@test.com' + user_id_format + ' created', + db.session.query(Activity).filter_by(actor=test_actor).first().action) + + def test_record_activity_invalid_template(self): + """Test to record activity for invalid template""" + with app.test_request_context(): + test_user = create_user(email="logging@test.com", password="logpass") + record_activity('invalid_template', login_user=test_user, user=test_user) + user_id_format = ' (' + str(test_user.id) + ')' + test_actor = test_user.email + user_id_format + self.assertTrue('[ERROR LOGGING] invalid_template', + db.session.query(Activity).filter_by(actor=test_actor).first().action) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/all/integration/api/helpers/test_order.py b/tests/all/integration/api/helpers/test_order.py index 153236b1ba..77d35fa371 100644 --- a/tests/all/integration/api/helpers/test_order.py +++ b/tests/all/integration/api/helpers/test_order.py @@ -2,11 +2,13 @@ from datetime import timedelta, datetime, timezone from app import current_app as app, db +from app.api.helpers.db import save_to_db from app.api.helpers.order import set_expiry_for_order, delete_related_attendees_for_order from app.factories.attendee import AttendeeFactory from app.factories.event import EventFactoryBasic from app.factories.order import OrderFactory from app.models.order import Order +from app.api.helpers.db import save_to_db from tests.all.integration.setup_database import Setup from tests.all.integration.utils import OpenEventTestCase @@ -42,13 +44,11 @@ def test_should_delete_related_attendees(self): with app.test_request_context(): attendee = AttendeeFactory() - db.session.add(attendee) - db.session.commit() + save_to_db(attendee) obj = OrderFactory() obj.ticket_holders = [attendee, ] - db.session.add(obj) - db.session.commit() + save_to_db(obj) delete_related_attendees_for_order(obj) order = db.session.query(Order).filter(Order.id == obj.id).first() diff --git a/tests/all/integration/api/helpers/test_pentabarfxml.py b/tests/all/integration/api/helpers/test_pentabarfxml.py new file mode 100644 index 0000000000..222213b229 --- /dev/null +++ b/tests/all/integration/api/helpers/test_pentabarfxml.py @@ -0,0 +1,29 @@ +import unittest + +from tests.all.integration.setup_database import Setup +from tests.all.integration.utils import OpenEventTestCase +from app.api.helpers.pentabarfxml import PentabarfExporter +from xml.etree.ElementTree import fromstring, tostring +from app import current_app as app +from app.api.helpers.db import save_to_db + +from app.factories.event import EventFactoryBasic + +class TestPentabarfXML(OpenEventTestCase): + def setUp(self): + self.app = Setup.create_app() + + def test_export(self): + """Test to check event contents in pentabarfxml format""" + with app.test_request_context(): + test_event = EventFactoryBasic() + save_to_db(test_event) + pentabarf_export = PentabarfExporter() + pentabarf_string = pentabarf_export.export(test_event.id) + pentabarf_original = fromstring(pentabarf_string) + self.assertEqual(fromstring(tostring(pentabarf_original))[0][0].text, "example") + self.assertEqual(fromstring(tostring(pentabarf_original))[0][1].text, "2099-12-13") + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/all/integration/api/helpers/test_permission_manager.py b/tests/all/integration/api/helpers/test_permission_manager.py index 042d01423c..f2fdb76672 100644 --- a/tests/all/integration/api/helpers/test_permission_manager.py +++ b/tests/all/integration/api/helpers/test_permission_manager.py @@ -19,13 +19,11 @@ def setUp(self): self.app = Setup.create_app() with app.test_request_context(): user = UserFactory() - db.session.add(user) - db.session.commit() + save_to_db(user) event = EventFactoryBasic() event.user_id = user.id - db.session.add(event) - db.session.commit() + save_to_db(event) # Authenticate User self.auth = {'Authorization': "JWT " + str(_default_jwt_encode_handler(user), 'utf-8')} diff --git a/tests/all/integration/api/helpers/test_storage.py b/tests/all/integration/api/helpers/test_storage.py index ea2ca81020..29d594b5ae 100644 --- a/tests/all/integration/api/helpers/test_storage.py +++ b/tests/all/integration/api/helpers/test_storage.py @@ -1,34 +1,32 @@ -"""Test file for storage functions.""" -from unittest import TestCase +import unittest +from unittest.mock import patch -from app.api.helpers.storage import create_url +from app import current_app as app +from tests.all.integration.utils import OpenEventTestCase +from tests.all.integration.setup_database import Setup +from app.api.helpers.storage import upload_local -class TestStorageHelperValidation(TestCase): - """Test class for testing storage helper functions.""" +class TestStorage(OpenEventTestCase): + """Contains test for Storage Helpers""" - def test_arbitrary_url(self): - """Method to test a url with arbitrary port.""" + def Setup(self): + self.app = Setup.create_app() - request_url = 'https://localhost:5000' - expected_file_url = 'https://localhost:5000/some/path/image.png' + """Test local file upload.""" + @patch('app.api.helpers.storage.upload_local') + @patch('app.api.helpers.storage.generate_hash', return_value='hash') + @patch('app.api.helpers.storage.get_settings', return_value={'static_domain': 'https://next.eventyay.com'}) + @patch('app.api.helpers.storage.UploadedFile') + def test_upload_local(self, uploadedfile_object, settings, generated_hash, uploadlocal): + expected_response = 'https://next.eventyay.com/media/upload_key/hash/test.pdf' + uploadedfile_object.filename = 'test.pdf' - self.assertEqual( - expected_file_url, create_url(request_url, '/some/path/image.png') - ) + with app.test_request_context(): + app.config['BASE_DIR'] = 'testdir' + actual_response = upload_local(uploadedfile_object, 'upload_key') + self.assertEqual(expected_response, actual_response) - def test_http_url(self): - """Method to test a url with port 80.""" - request_url = 'http://localhost:80' - expected_file_url = 'http://localhost/some/path/image.png' - self.assertEqual( - expected_file_url, create_url(request_url, '/some/path/image.png') - ) - def test_https_url(self): - """Method to test a url with port 443.""" - request_url = 'https://localhost:443' - expected_file_url = 'https://localhost/some/path/image.png' - self.assertEqual( - expected_file_url, create_url(request_url, '/some/path/image.png') - ) +if __name__ == '__main__': + unittest.main() diff --git a/tests/all/integration/api/helpers/test_systemnotifications.py b/tests/all/integration/api/helpers/test_systemnotifications.py new file mode 100644 index 0000000000..c26314aa4e --- /dev/null +++ b/tests/all/integration/api/helpers/test_systemnotifications.py @@ -0,0 +1,315 @@ +import unittest + +from app import current_app as app +from tests.all.integration.utils import OpenEventTestCase +from app.api.helpers.system_notifications import ( + get_event_exported_actions, + get_event_imported_actions, + get_monthly_payment_notification_actions, + get_monthly_payment_follow_up_notification_actions, + get_ticket_purchased_notification_actions, + get_ticket_purchased_attendee_notification_actions, + get_ticket_purchased_organizer_notification_actions, + get_event_published_notification_actions, + get_event_role_notification_actions, + get_new_session_notification_actions, + get_session_schedule_notification_actions, + get_next_event_notification_actions, + get_session_accept_reject_notification_actions, + get_invite_papers_notification_actions +) +from tests.all.integration.setup_database import Setup +from app.models.notification import NotificationAction + + +class TestSystemNotificationHelperValidation(OpenEventTestCase): + def setUp(self): + self.app = Setup.create_app() + + def test_event_exported(self): + """Method to test the actions associated with a notification about an event being successfully exported.""" + + with app.test_request_context(): + request_url = 'https://localhost/some/path/image.png' + response = get_event_exported_actions(request_url) + expected_action = NotificationAction( + subject='event-export', + link=request_url, + action_type='download' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_event_imported(self): + """Method to test the actions associated with a notification about an event being successfully exported.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/345525' + request_event_id = 1 + response = get_event_imported_actions(request_event_id, request_url) + expected_action = NotificationAction( + # subject is still 'event' since the action will be to view the imported event. + subject='event', + link=request_url, + subject_id=request_event_id, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_monthly_payment_notification(self): + """Method to test the actions associated with a notification of monthly payments""" + + with app.test_request_context(): + request_url = 'https://localhost/e/345525/payment' + request_event_id = 1 + response = get_monthly_payment_notification_actions(request_event_id, request_url) + expected_action = NotificationAction( + subject='event', + link=request_url, + subject_id=request_event_id, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_monthly_pay_followup_notification(self): + """Method to test the actions associated with a follow up notification of monthly payments.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/345525/payment' + request_event_id = 1 + response = get_monthly_payment_follow_up_notification_actions(request_event_id, request_url) + expected_action = NotificationAction( + subject='invoice', + link=request_url, + subject_id=request_event_id, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_ticket_purchased_notification(self): + """Method to test the actions associated with a notification of tickets purchased.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/345525/order' + request_order_id = 1 + response = get_ticket_purchased_notification_actions(request_order_id, request_url) + expected_action = NotificationAction( + subject='order', + link=request_url, + subject_id=request_order_id, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_ticket_purchased_attendee(self): + """Method to test the actions associated with a notification of tickets purchased for an attendee that is + not the buyer.""" + + with app.test_request_context(): + request_pdfurl = 'https://localhost/pdf/e/24324/' + response = get_ticket_purchased_attendee_notification_actions(request_pdfurl) + expected_action = NotificationAction( + subject='tickets-pdf', + link=request_pdfurl, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_ticket_purchase_organizer(self): + """Method to test the actions associated with a notification of tickets purchased for the event organizer.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/345525/order' + request_order_id = 1 + response = get_ticket_purchased_organizer_notification_actions(request_order_id, request_url) + expected_action = NotificationAction( + subject='order', + subject_id=request_order_id, + link=request_url, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_event_published_notification(self): + """Method to test the actions associated with a notification of an event getting published.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/345525' + request_event_id = 1 + response = get_event_published_notification_actions(request_event_id, request_url) + expected_action = NotificationAction( + subject='event', + subject_id=request_event_id, + link=request_url, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_event_role_notification(self): + """Method to test the actions associated with a notification of an event role.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/345525/invitation' + request_event_id = 1 + response = get_event_role_notification_actions(request_event_id, request_url) + expected_action = NotificationAction( + subject='event-role', + subject_id=request_event_id, + link=request_url, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_new_session_notification(self): + """Method to test the actions associated with a notification of an event getting a new session proposal.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/session/345525' + request_session_id = 1 + response = get_new_session_notification_actions(request_session_id, request_url) + expected_action = NotificationAction( + subject='session', + link=request_url, + subject_id=request_session_id, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_session_schedule_notification(self): + """Method to test the actions associated with a notification of change in schedule of a session.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/session/345525' + request_session_id = 1 + response = get_session_schedule_notification_actions(request_session_id, request_url) + expected_action = NotificationAction( + subject='session', + link=request_url, + subject_id=request_session_id, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_next_event_notification(self): + """Method to test the actions associated with a notification of next event.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/345525' + request_session_id = 1 + response = get_next_event_notification_actions(request_session_id, request_url) + expected_action = NotificationAction( + subject='event', + link=request_url, + subject_id=request_session_id, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_session_accept_reject_notif(self): + """Method to test the actions associated with a notification of a session getting accepted/rejected.""" + + with app.test_request_context(): + request_url = 'https://localhost/e/session/345525' + request_session_id = 1 + response = get_session_accept_reject_notification_actions(request_session_id, request_url) + expected_action = NotificationAction( + subject='session', + link=request_url, + subject_id=request_session_id, + action_type='view' + ) + expected_action = [expected_action] + expected_length = len(expected_action) + response_length = len(response) + self.assertIsInstance(response, list) + self.assertEqual(expected_action[0].subject, response[0].subject) + self.assertEqual(expected_length, response_length) + + def test_invite_papers_notification(self): + """Method to test the actions associated with an invite to submit papers.""" + + with app.test_request_context(): + request_cfs_url = 'https://localhost/e/cfs/345525' + request_submit_url = 'https://localhost/e/cfs/345525/submit' + response = get_invite_papers_notification_actions(request_cfs_url, request_submit_url) + expected_cfs_action = NotificationAction( + subject='call-for-speakers', + link=request_cfs_url, + action_type='view' + ) + expected_submit_action = NotificationAction( + subject='call-for-speakers', + link=request_submit_url, + action_type='submit' + ) + expected_response = [expected_cfs_action, expected_submit_action] + expected_response_length = len(expected_response) + response_length = len(response) + + self.assertIsInstance(response, list) + self.assertEqual(expected_cfs_action.subject, response[0].subject) + self.assertEqual(expected_submit_action.subject, response[1].subject) + self.assertEqual(expected_response_length, response_length) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/all/integration/api/helpers/test_utilities.py b/tests/all/integration/api/helpers/test_utilities.py index 90227b3a21..073ea44f85 100644 --- a/tests/all/integration/api/helpers/test_utilities.py +++ b/tests/all/integration/api/helpers/test_utilities.py @@ -1,10 +1,11 @@ import unittest import string +import datetime from app import current_app as app from app.api.helpers.exceptions import UnprocessableEntity from tests.all.integration.utils import OpenEventTestCase -from app.api.helpers.utilities import dasherize, require_relationship, string_empty, str_generator +from app.api.helpers.utilities import dasherize, require_relationship, string_empty, str_generator, monthdelta, represents_int from tests.all.integration.setup_database import Setup @@ -40,6 +41,21 @@ def test_string_empty(self): self.assertFalse(string_empty(int)) self.assertFalse(string_empty(None)) + def test_monthdelta(self): + """Method to test difference in months result""" + + with app.test_request_context(): + test_date = datetime.datetime(2000, 6, 18) + test_future_date = monthdelta(test_date, 3) + self.assertEqual(test_future_date, datetime.datetime(2000, 9, 18)) + + def test_represents_int(self): + """Method to test representation of int""" + + with app.test_request_context(): + self.assertTrue(represents_int(4)) + self.assertFalse(represents_int('test')) + def test_str_generator(self): """Method to test str_generator.""" diff --git a/tests/all/integration/api/helpers/test_xcal.py b/tests/all/integration/api/helpers/test_xcal.py new file mode 100644 index 0000000000..e6da9cb236 --- /dev/null +++ b/tests/all/integration/api/helpers/test_xcal.py @@ -0,0 +1,30 @@ +import unittest + +from tests.all.integration.setup_database import Setup +from tests.all.integration.utils import OpenEventTestCase +from app import current_app as app +from app.api.helpers.xcal import XCalExporter +from xml.etree.ElementTree import fromstring, tostring + +from app.factories.event import EventFactoryBasic +from app.api.helpers.db import save_to_db + + +class TestXCalExport(OpenEventTestCase): + def setUp(self): + self.app = Setup.create_app() + + def test_export(self): + """Test to check event contents in xCal format""" + with app.test_request_context(): + test_event = EventFactoryBasic() + save_to_db(test_event) + xcal = XCalExporter() + xcal_string = xcal.export(test_event.id) + xcal_original = fromstring(xcal_string) + self.assertEqual(fromstring(tostring(xcal_original))[0][3].text, "example") + self.assertEqual(fromstring(tostring(xcal_original))[0][2].text, "Schedule for sessions at example") + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/all/integration/api/validation/test_access_codes.py b/tests/all/integration/api/validation/test_access_codes.py new file mode 100644 index 0000000000..41771f4df0 --- /dev/null +++ b/tests/all/integration/api/validation/test_access_codes.py @@ -0,0 +1,117 @@ +import unittest + +from app import current_app as app +from tests.all.integration.utils import OpenEventTestCase +from app.api.helpers.exceptions import UnprocessableEntity +from app.api.schema.access_codes import AccessCodeSchema +from app.factories.access_code import AccessCodeFactory +from app.api.helpers.db import save_to_db +from tests.all.integration.setup_database import Setup +import datetime + +class TestAccessCodeValidation(OpenEventTestCase): + def setUp(self): + self.app = Setup.create_app() + + def test_quantity_pass(self): + """ + Acces Code Validate Quantity - Tests if the function runs without an exception + :return: + """ + schema = AccessCodeSchema() + original_data = { + 'data': {} + } + data = { + 'min_quantity': 5, + 'max_quantity': 10, + 'tickets_number': 30 + } + AccessCodeSchema.validate_order_quantity(schema, data, original_data) + + def test_quantity_min_gt_max(self): + """ + Acces Code Validate Quantity - Tests if the exception is raised when min tickets > max tickets + :return: + """ + schema = AccessCodeSchema() + original_data = { + 'data': {} + } + data = { + 'min_quantity': 10, + 'max_quantity': 5, + 'tickets_number': 30 + } + with self.assertRaises(UnprocessableEntity): + AccessCodeSchema.validate_order_quantity(schema, data, original_data) + + def test_quantity_max_gt_ticket(self): + """ + Acces Code Validate Quantity - Tests if the exception is raised when max_quantity greater than ticket_number + :return: + """ + schema = AccessCodeSchema() + original_data = { + 'data': {} + } + data = { + 'min_quantity': 10, + 'max_quantity': 20, + 'tickets_number': 15 + } + with self.assertRaises(UnprocessableEntity): + AccessCodeSchema.validate_order_quantity(schema, data, original_data) + + def test_date_valid_from_gt_valid_till(self): + """ + Acces Code Validate Date - Tests if the exception is raised when valid_from is greater than valid_till + :return: + """ + schema = AccessCodeSchema() + original_data = { + 'data': {} + } + data = { + 'valid_from': datetime.datetime(2019, 1, 1), + 'valid_till': datetime.datetime(2018, 1, 1) + } + with self.assertRaises(UnprocessableEntity): + AccessCodeSchema.validate_date(schema, data, original_data) + + def test_date_pass(self): + """ + Acces Code Validate Date - Tests if the date function runs without exception + :return: + """ + schema = AccessCodeSchema() + original_data = { + 'data': {} + } + data = { + 'valid_from': datetime.datetime(2018, 1, 1), + 'valid_till': datetime.datetime(2019, 1, 1) + } + AccessCodeSchema.validate_date(schema, data, original_data) + + def test_quantity_db_populate(self): + """ + Acces Code Validate Quantity - Tests if validation works on values stored in db and not given in 'data' + :return: + """ + with app.test_request_context(): + schema = AccessCodeSchema() + obj = AccessCodeFactory() + save_to_db(obj) + + original_data = { + 'data': { + 'id': 1 + } + } + data = {} + AccessCodeSchema.validate_order_quantity(schema, data, original_data) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/all/integration/api/validation/test_discount_codes.py b/tests/all/integration/api/validation/test_discount_codes.py index ea80d09174..bba04b0be9 100644 --- a/tests/all/integration/api/validation/test_discount_codes.py +++ b/tests/all/integration/api/validation/test_discount_codes.py @@ -6,6 +6,7 @@ from app.api.schema.discount_codes import DiscountCodeSchemaTicket from app.factories.discount_code import DiscountCodeFactory from app.models import db +from app.api.helpers.db import save_to_db from tests.all.integration.setup_database import Setup @@ -48,7 +49,7 @@ def test_quantity_min_gt_max(self): def test_quantity_max_gt_tickets_number(self): """ - Discount Code Validate Quantity - Tests if exception is raised when min_quantity greater than max + Discount Code Validate Quantity - Tests if exception is raised when max_quantity greater than ticket_number :return: """ schema = DiscountCodeSchemaTicket() @@ -71,8 +72,7 @@ def test_quantity_db_populate(self): with app.test_request_context(): schema = DiscountCodeSchemaTicket() obj = DiscountCodeFactory() - db.session.add(obj) - db.session.commit() + save_to_db(obj) original_data = { 'data': { diff --git a/tests/all/integration/api/validation/test_events.py b/tests/all/integration/api/validation/test_events.py index cc2542638f..fa7b374c65 100644 --- a/tests/all/integration/api/validation/test_events.py +++ b/tests/all/integration/api/validation/test_events.py @@ -8,6 +8,7 @@ from app.api.schema.events import EventSchema from app.factories.event import EventFactoryBasic from app.models import db +from app.api.helpers.db import save_to_db from tests.all.integration.setup_database import Setup @@ -25,8 +26,8 @@ def test_date_pass(self): 'data': {} } data = { - 'starts_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), - 'ends_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) + 'starts_at': datetime(2099, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'ends_at': datetime(2099, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) } EventSchema.validate_date(schema, data, original_data) @@ -40,8 +41,8 @@ def test_date_start_gt_end(self): 'data': {} } data = { - 'starts_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), - 'ends_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) + 'starts_at': datetime(2099, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'ends_at': datetime(2099, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) } with self.assertRaises(UnprocessableEntity): EventSchema.validate_date(schema, data, original_data) @@ -54,8 +55,7 @@ def test_date_db_populate(self): with app.test_request_context(): schema = EventSchema() obj = EventFactoryBasic() - db.session.add(obj) - db.session.commit() + save_to_db(obj) original_data = { 'data': { diff --git a/tests/all/integration/api/validation/test_sessions.py b/tests/all/integration/api/validation/test_sessions.py index b705978d4e..e9daacb5fa 100644 --- a/tests/all/integration/api/validation/test_sessions.py +++ b/tests/all/integration/api/validation/test_sessions.py @@ -8,6 +8,7 @@ from app.api.schema.sessions import SessionSchema from app.factories.session import SessionFactory from app.models import db +from app.api.helpers.db import save_to_db from tests.all.integration.setup_database import Setup @@ -25,8 +26,8 @@ def test_date_pass(self): 'data': {} } data = { - 'starts_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), - 'ends_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) + 'starts_at': datetime(2099, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'ends_at': datetime(2099, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) } SessionSchema.validate_date(schema, data, original_data) @@ -40,8 +41,8 @@ def test_date_start_gt_end(self): 'data': {} } data = { - 'starts_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), - 'ends_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) + 'starts_at': datetime(2099, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'ends_at': datetime(2099, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) } with self.assertRaises(UnprocessableEntity): SessionSchema.validate_date(schema, data, original_data) @@ -54,8 +55,7 @@ def test_date_db_populate(self): with app.test_request_context(): schema = SessionSchema() obj = SessionFactory() - db.session.add(obj) - db.session.commit() + save_to_db(obj) original_data = { 'data': { diff --git a/tests/all/integration/api/validation/test_speakers_call.py b/tests/all/integration/api/validation/test_speakers_call.py index c03dcde75e..ce5377b1ed 100644 --- a/tests/all/integration/api/validation/test_speakers_call.py +++ b/tests/all/integration/api/validation/test_speakers_call.py @@ -8,6 +8,7 @@ from app.api.schema.speakers_calls import SpeakersCallSchema from app.factories.speakers_call import SpeakersCallFactory from app.models import db +from app.api.helpers.db import save_to_db from tests.all.integration.setup_database import Setup @@ -26,7 +27,8 @@ def test_date_pass(self): } data = { 'starts_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), - 'ends_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) + 'ends_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'event_starts_at': datetime(2003, 9, 10, 12, 30, 45).replace(tzinfo=timezone('UTC')) } SpeakersCallSchema.validate_date(schema, data, original_data) @@ -41,7 +43,42 @@ def test_date_start_gt_end(self): } data = { 'starts_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), - 'ends_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) + 'ends_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'event_starts_at': datetime(2003, 9, 10, 12, 30, 45).replace(tzinfo=timezone('UTC')) + } + with self.assertRaises(UnprocessableEntity): + SpeakersCallSchema.validate_date(schema, data, original_data) + + def test_date_start_gt_event_end(self): + """ + Speakers Call Validate Date - Tests if exception is raised when speakers_call starts_at is after event starts_at + :return: + """ + schema = SpeakersCallSchema() + original_data = { + 'data': {} + } + data = { + 'starts_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'ends_at': datetime(2003, 9, 10, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'event_starts_at': datetime(2003, 9, 2, 12, 30, 45).replace(tzinfo=timezone('UTC')) + } + with self.assertRaises(UnprocessableEntity): + SpeakersCallSchema.validate_date(schema, data, original_data) + + def test_date_end_gt_event_end(self): + """ + Speakers Call Validate Date - Tests if exception is raised when speakers_call ends_at is after event starts_at + :return: + """ + schema = SpeakersCallSchema() + original_data = { + 'data': {} + } + data = { + 'starts_at': datetime(2003, 9, 2, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'ends_at': datetime(2003, 9, 10, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'event_starts_at': datetime(2003, 9, 5, 12, 30, 45).replace(tzinfo=timezone('UTC')) } with self.assertRaises(UnprocessableEntity): SpeakersCallSchema.validate_date(schema, data, original_data) @@ -54,8 +91,7 @@ def test_date_db_populate(self): with app.test_request_context(): schema = SpeakersCallSchema() obj = SpeakersCallFactory() - db.session.add(obj) - db.session.commit() + save_to_db(obj) original_data = { 'data': { diff --git a/tests/all/integration/api/validation/test_tickets.py b/tests/all/integration/api/validation/test_tickets.py index b4b1967b47..ff65f65c67 100644 --- a/tests/all/integration/api/validation/test_tickets.py +++ b/tests/all/integration/api/validation/test_tickets.py @@ -8,6 +8,7 @@ from app.api.schema.tickets import TicketSchema from app.factories.ticket import TicketFactory from app.models import db +from app.api.helpers.db import save_to_db from tests.all.integration.setup_database import Setup @@ -26,7 +27,8 @@ def test_date_pass(self): } data = { 'sales_starts_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), - 'sales_ends_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) + 'sales_ends_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'event_ends_at': datetime(2003, 9, 10, 12, 30, 45).replace(tzinfo=timezone('UTC')) } TicketSchema.validate_date(schema, data, original_data) @@ -41,7 +43,42 @@ def test_date_start_gt_end(self): } data = { 'sales_starts_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), - 'sales_ends_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')) + 'sales_ends_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'event_ends_at': datetime(2003, 8, 10, 12, 30, 45).replace(tzinfo=timezone('UTC')) + } + with self.assertRaises(UnprocessableEntity): + TicketSchema.validate_date(schema, data, original_data) + + def test_date_start_gt_event_end(self): + """ + Tickets Validate Date - Tests if exception is raised when sales_starts_at is after event ends_at + :return: + """ + schema = TicketSchema() + original_data = { + 'data': {} + } + data = { + 'sales_starts_at': datetime(2003, 8, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'sales_ends_at': datetime(2003, 9, 4, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'event_ends_at': datetime(2003, 8, 2, 12, 30, 45).replace(tzinfo=timezone('UTC')) + } + with self.assertRaises(UnprocessableEntity): + TicketSchema.validate_date(schema, data, original_data) + + def test_date_end_gt_event_end(self): + """ + Tickets Validate Date - Tests if exception is raised when sales_ends_at is after event ends_at + :return: + """ + schema = TicketSchema() + original_data = { + 'data': {} + } + data = { + 'sales_starts_at': datetime(2003, 8, 1, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'sales_ends_at': datetime(2003, 8, 10, 12, 30, 45).replace(tzinfo=timezone('UTC')), + 'event_ends_at': datetime(2003, 8, 2, 12, 30, 45).replace(tzinfo=timezone('UTC')) } with self.assertRaises(UnprocessableEntity): TicketSchema.validate_date(schema, data, original_data) @@ -54,8 +91,7 @@ def test_date_db_populate(self): with app.test_request_context(): schema = TicketSchema() obj = TicketFactory() - db.session.add(obj) - db.session.commit() + save_to_db(obj) original_data = { 'data': { diff --git a/tests/all/unit/api/helpers/test_errors.py b/tests/all/unit/api/helpers/test_errors.py new file mode 100644 index 0000000000..464fb09d5b --- /dev/null +++ b/tests/all/unit/api/helpers/test_errors.py @@ -0,0 +1,16 @@ +from unittest import TestCase +from app.api.helpers.errors import ErrorResponse + + +class TestErrorDetails(TestCase): + """Test for error responses""" + + def test_error_response_dict_details(self): + """To test details in the form of dict""" + + error_response = ErrorResponse(source="test source", detail="test detail") + expected_dict = {'status': error_response.status, + 'source': error_response.source, + 'title': error_response.title, + 'detail': error_response.detail} + self.assertEqual(error_response.to_dict(), expected_dict) diff --git a/tests/all/unit/api/helpers/test_export_helpers.py b/tests/all/unit/api/helpers/test_export_helpers.py new file mode 100644 index 0000000000..ff6ea12ef9 --- /dev/null +++ b/tests/all/unit/api/helpers/test_export_helpers.py @@ -0,0 +1,106 @@ +import unittest + +from collections import OrderedDict +from app.api.helpers.export_helpers import sorted_dict, make_filename, handle_unserializable_data +from datetime import datetime + + +class TestExportHelperValidation(unittest.TestCase): + + def test_sorted_dict(self): + """Method to test sorting of a json (dict/list->dict) returns OrderedDict""" + + request_dictdata = {"twokey": 1, "keyone": 3} + request_ordereddict_data = OrderedDict([('twokey', 1), ('keyone', 3)]) + request_list_data = [{"twokey": 1, "keyone": 3}, + {"threekey": 0, "keytwo": 2}] + + expected_dictdata = OrderedDict( + [('keyone', 3), ('twokey', 1)]) + expected_ordereddict_data = OrderedDict([('keyone', 3), ('twokey', 1)]) + expected_list_data = [OrderedDict([('keyone', 3), ('twokey', 1)]), OrderedDict([ + ('keytwo', 2), ('threekey', 0)])] + + response_dictdata = sorted_dict(request_dictdata) + response_ordereddict_data = sorted_dict(request_ordereddict_data) + response_list_data = sorted_dict(request_list_data) + + self.assertEqual(expected_dictdata, response_dictdata) + self.assertEqual(expected_ordereddict_data, response_ordereddict_data) + self.assertEqual(expected_list_data, response_list_data) + + def test_make_filename(self): + """Method to test speaker image filename for export""" + + correct_data = 'correctfilename.png' + correct_response = 'Correctfilename.Png' + actual_response = make_filename(correct_data) + self.assertEqual(correct_response, actual_response) + + data_with_lt = 'datawith Event Statistics Details > Show Event Statistics") +def event_statistics_get(transaction): + """ + GET /admin/statistics/events + :param transaction: + :return: + """ + with stash['app'].app_context(): + event = EventFactoryBasic() + db.session.add(event) + db.session.commit() + + +@hooks.before("Admin Statistics > Event Types Statistics Details > Show Event Types Statistics") +def event_type_statistics_get(transaction): + """ + GET /admin/statistics/event-types + :param transaction: + :return: + """ + with stash['app'].app_context(): + event_type = EventTypeFactory() + db.session.add(event_type) + db.session.commit() + + +@hooks.before("Admin Statistics > Event Topics Statistics Details > Show Event Topics Statistics") +def event_topic_statistics_get(transaction): + """ + GET /admin/statistics/event-topics + :param transaction: + :return: + """ + with stash['app'].app_context(): + event_topic = EventTopicFactory() + db.session.add(event_topic) + db.session.commit() + + +@hooks.before("Admin Statistics > User Statistics Details > Show User Statistics") +def user_statistics_get(transaction): + """ + GET /admin/statistics/users + :param transaction: + :return: + """ + with stash['app'].app_context(): + user = UserFactory() + db.session.add(user) + db.session.commit() + + +@hooks.before("Admin Statistics > Session Statistics Details > Show Session Statistics") +def session_statistics_get(transaction): + """ + GET /admin/statistics/sessions + :param transaction: + :return: + """ + with stash['app'].app_context(): + session = SessionFactory() + db.session.add(session) + db.session.commit() + + +@hooks.before("Admin Statistics > Mail Statistics Details > Show Mail Statistics") +def mail_statistics_get(transaction): + """ + GET /admin/statistics/mails + :param transaction: + :return: + """ + with stash['app'].app_context(): + mail = MailFactory() + db.session.add(mail) + db.session.commit()