diff --git a/torquedata/.gitignore b/torquedata/.gitignore index 36ca8c9c..e69de29b 100644 --- a/torquedata/.gitignore +++ b/torquedata/.gitignore @@ -1,2 +0,0 @@ -djangosettings.py -flasksettings.py diff --git a/torquedata/Pipfile b/torquedata/Pipfile index 59ce0839..967df831 100644 --- a/torquedata/Pipfile +++ b/torquedata/Pipfile @@ -1,14 +1,12 @@ [[source]] -url = "https://pypi.python.org/simple" -verify_ssl = true name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] [packages] -flask = "*" -whoosh = "*" django = "*" -[dev-packages] - [requires] python_version = "3.7" diff --git a/torquedata/Pipfile.lock b/torquedata/Pipfile.lock index 070c6f43..0d0423b6 100644 --- a/torquedata/Pipfile.lock +++ b/torquedata/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "96dfc90682af8ae49b1d866d0f55672c4218b4521a7338f9f862f29c49f4316a" + "sha256": "627ef89f247ecee27e9ef0dabe116108d09c47abf171c900a8817befa64f9dd2" }, "pipfile-spec": 6, "requires": { @@ -10,7 +10,7 @@ "sources": [ { "name": "pypi", - "url": "https://pypi.python.org/simple", + "url": "https://pypi.org/simple", "verify_ssl": true } ] @@ -24,14 +24,6 @@ "markers": "python_version >= '3.5'", "version": "==3.2.10" }, - "click": { - "hashes": [ - "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", - "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==7.1.2" - }, "django": { "hashes": [ "sha256:59c8125ca873ed3bdae9c12b146fbbd6ed8d0f743e4cf5f5817af50c51f1fc2f", @@ -40,69 +32,6 @@ "index": "pypi", "version": "==3.1.1" }, - "flask": { - "hashes": [ - "sha256:4efa1ae2d7c9865af48986de8aeb8504bf32c7f3d6fdc9353d34b21f4b127060", - "sha256:8a4fdd8936eba2512e9c85df320a37e694c93945b33ef33c89946a340a238557" - ], - "index": "pypi", - "version": "==1.1.2" - }, - "itsdangerous": { - "hashes": [ - "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", - "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.1.0" - }, - "jinja2": { - "hashes": [ - "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", - "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==2.11.2" - }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", - "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.1.1" - }, "pytz": { "hashes": [ "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed", @@ -117,23 +46,6 @@ ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==0.3.1" - }, - "werkzeug": { - "hashes": [ - "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43", - "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==1.0.1" - }, - "whoosh": { - "hashes": [ - "sha256:7ca5633dbfa9e0e0fa400d3151a8a0c4bec53bd2ecedc0a67705b17565c31a83", - "sha256:aa39c3c3426e3fd107dcb4bde64ca1e276a65a889d9085a6e4b54ba82420a852", - "sha256:e0857375f63e9041e03fedd5b7541f97cf78917ac1b6b06c1fcc9b45375dda69" - ], - "index": "pypi", - "version": "==2.7.4" } }, "develop": {} diff --git a/torquedata/torquedata/db/__init__.py b/torquedata/core/__init__.py similarity index 100% rename from torquedata/torquedata/db/__init__.py rename to torquedata/core/__init__.py diff --git a/torquedata/core/apps.py b/torquedata/core/apps.py new file mode 100644 index 00000000..26f78a8e --- /dev/null +++ b/torquedata/core/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class CoreConfig(AppConfig): + name = 'core' diff --git a/torquedata/torquedata/db/management/commands/upload_attachment.py b/torquedata/core/management/commands/upload_attachment.py similarity index 100% rename from torquedata/torquedata/db/management/commands/upload_attachment.py rename to torquedata/core/management/commands/upload_attachment.py diff --git a/torquedata/torquedata/db/management/commands/upload_spreadsheet.py b/torquedata/core/management/commands/upload_spreadsheet.py similarity index 100% rename from torquedata/torquedata/db/management/commands/upload_spreadsheet.py rename to torquedata/core/management/commands/upload_spreadsheet.py diff --git a/torquedata/torquedata/db/management/commands/upload_toc.py b/torquedata/core/management/commands/upload_toc.py similarity index 100% rename from torquedata/torquedata/db/management/commands/upload_toc.py rename to torquedata/core/management/commands/upload_toc.py diff --git a/torquedata/torquedata/db/migrations/__init__.py b/torquedata/core/migrations/__init__.py similarity index 100% rename from torquedata/torquedata/db/migrations/__init__.py rename to torquedata/core/migrations/__init__.py diff --git a/torquedata/torquedata/db/models.py b/torquedata/core/models.py similarity index 100% rename from torquedata/torquedata/db/models.py rename to torquedata/core/models.py diff --git a/torquedata/core/views.py b/torquedata/core/views.py new file mode 100644 index 00000000..91ea44a2 --- /dev/null +++ b/torquedata/core/views.py @@ -0,0 +1,3 @@ +from django.shortcuts import render + +# Create your views here. diff --git a/torquedata/djangosettings.py.tmpl b/torquedata/djangosettings.py.tmpl deleted file mode 100644 index 4fb14a50..00000000 --- a/torquedata/djangosettings.py.tmpl +++ /dev/null @@ -1,13 +0,0 @@ -# Using sqlite3 for debugging purposes at the moment. To be changed to postgres in a later commit. -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': 'db.sqlite3', - } -} - -INSTALLED_APPS = ( - 'torquedata.db', -) - -SECRET_KEY = 'REPLACE_ME' diff --git a/torquedata/flasksettings.py.tmpl b/torquedata/flasksettings.py.tmpl deleted file mode 100644 index c65db95a..00000000 --- a/torquedata/flasksettings.py.tmpl +++ /dev/null @@ -1,2 +0,0 @@ -# Where to find the data spreadsheets -SPREADSHEET_FOLDER = "__SPREADSHEET_LOCATION__" diff --git a/torquedata/manage.py b/torquedata/manage.py index b1555b7f..ccd2e8b5 100644 --- a/torquedata/manage.py +++ b/torquedata/manage.py @@ -1,8 +1,22 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" import os import sys -if __name__ == '__main__': - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djangosettings') - from django.core.management import execute_from_command_line + +def main(): + """Run administrative tasks.""" + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'torquedata.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/torquedata/torquedata/__init__.py b/torquedata/torquedata/__init__.py index eea2aa5d..e69de29b 100644 --- a/torquedata/torquedata/__init__.py +++ b/torquedata/torquedata/__init__.py @@ -1,151 +0,0 @@ -from flask import Flask - -import csv -import configparser -import os -import pickle -import json -from whoosh import index -from whoosh.index import create_in -from whoosh.fields import * - -app = Flask(__name__) - -try: - app.config.from_object('config') -except: - pass - -sheet_config = configparser.ConfigParser() -sheet_config.read(os.path.join(app.config['SPREADSHEET_FOLDER'], "sheets")) - -try: - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "permissions"), 'rb') as f: - permissions = pickle.load(f) -except Exception: - permissions = {} - -try: - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "templates"), 'rb') as f: - templates = pickle.load(f) -except Exception: - templates = {} - -try: - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "attachment_config"), 'rb') as f: - attachment_config = pickle.load(f) -except Exception: - attachment_config = {} - -try: - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "users"), 'rb') as f: - users = pickle.load(f) -except Exception: - users = {} - -data = {} -indices = {} -def cull_invalid_columns(o, valid_fields): - return {k:v for (k,v) in o.items() if (k in valid_fields)} - -def cull_invalid_objects(group, sheet_name, wiki_key): - if sheet_name not in permissions.keys(): - return [] - if wiki_key not in permissions[sheet_name].keys(): - return [] - if group not in permissions[sheet_name][wiki_key].keys(): - return [] - elif "valid_ids" in permissions[sheet_name][wiki_key][group].keys(): - valid_ids = permissions[sheet_name][wiki_key][group]["valid_ids"]; - return [ o for o in data[sheet_name].values() if (o[sheet_config[sheet_name]["key_column"]] in valid_ids) ] - else: - return list(data[sheet_name].values()) - -def permissions_sha(sheet_name, wiki_key, group): - import hashlib - - return hashlib.sha224( - sheet_name.encode('utf-8') + - str(permissions[sheet_name][wiki_key][group]["valid_ids"]).encode('utf-8') + - str(permissions[sheet_name][wiki_key][group]["columns"]).encode('utf-8') - ).hexdigest() - -def index_search(group, sheet_name, wiki_key): - sha = permissions_sha(sheet_name, wiki_key, group) - dir = os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "indices", sha) - - if(index.exists_in(dir)): - print("Index already exists for " + sheet_name + " / " + wiki_key + " / " + group + " (or comparable)") - ix = index.open_dir(dir) - indices[sha] = ix - return - - try: - os.mkdir(dir) - except FileExistsError: - pass - - print("Reindexing for " + sheet_name + " / " + group) - schema = Schema(key=ID(stored=True, unique=True), content=TEXT) - ix = create_in(dir, schema) - writer = ix.writer() - for o in cull_invalid_objects(group, sheet_name, wiki_key): - writer.add_document( - key=o[sheet_config[sheet_name]["key_column"]], - content=" ".join([str(c) for c in cull_invalid_columns(o, permissions[sheet_name][wiki_key][group]["columns"]).values()]) - ) - writer.commit() - - indices[sha] = ix - - return "" - - -def load_sheet(sheet_name): - data[sheet_name] = {} - reader = csv.reader( - open(os.path.join(app.config.get("SPREADSHEET_FOLDER"), sheet_name, sheet_name + ".csv"), encoding='utf-8'), - delimiter=',', - quotechar='"' - ) - - if sheet_name not in templates: - templates[sheet_name] = {} - - if sheet_name not in permissions: - permissions[sheet_name] = {} - - header = next(reader) - column_types = next(reader) - for row in reader: - o = {} - for (field, column_type, cell) in zip(header, column_types, row): - if column_type == 'list': - # This may be reversed as a decision at some point, but the empty cell - # from the csv comes through as the empty string, meaning that the user - # probably wants the list to be empty as well. - if cell == '': - cell = [] - else: - cell = cell.strip().split("\n") - elif column_type == 'json': - if cell == '': - cell = {} - else: - cell = json.loads(cell) - o[field] = cell - data[sheet_name][o[sheet_config[sheet_name]["key_column"]]] = o - - for wiki_key in permissions[sheet_name].keys(): - for group in permissions[sheet_name][wiki_key].keys(): - sha = permissions_sha(sheet_name, wiki_key, group) - dir = os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, wiki_key, "indices", sha) - index_search(group, sheet_name, wiki_key) - -for sheet_name in sheet_config.sections(): - if sheet_name is "DEFAULT": - continue - - load_sheet(sheet_name) - -from torquedata import routes diff --git a/torquedata/torquedata/asgi.py b/torquedata/torquedata/asgi.py new file mode 100644 index 00000000..7a8703f5 --- /dev/null +++ b/torquedata/torquedata/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for torquedata project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'torquedata.settings') + +application = get_asgi_application() diff --git a/torquedata/torquedata/db/migrations/0001_initial.py b/torquedata/torquedata/db/migrations/0001_initial.py deleted file mode 100644 index 136a7615..00000000 --- a/torquedata/torquedata/db/migrations/0001_initial.py +++ /dev/null @@ -1,63 +0,0 @@ -# Generated by Django 3.1.1 on 2020-09-07 12:38 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.CreateModel( - name='Spreadsheet', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255)), - ('object_name', models.CharField(max_length=255)), - ('_columns', models.TextField()), - ('key_column', models.TextField()), - ], - ), - migrations.CreateModel( - name='User', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('username', models.CharField(max_length=255)), - ], - ), - migrations.CreateModel( - name='TableOfContents', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('json_file', models.FileField(upload_to='')), - ('template_file', models.FileField(upload_to='')), - ('sheet', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='templates', to='db.spreadsheet')), - ], - ), - migrations.CreateModel( - name='Row', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('key', models.TextField()), - ('row_number', models.PositiveIntegerField()), - ('_raw', models.TextField()), - ('sheet', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rows', to='db.spreadsheet')), - ], - ), - migrations.CreateModel( - name='Attachment', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('file', models.FileField(upload_to='')), - ('sheet', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attachments', to='db.spreadsheet')), - ], - ), - migrations.AddConstraint( - model_name='row', - constraint=models.UniqueConstraint(fields=('sheet', 'row_number'), name='unique_row_number'), - ), - ] diff --git a/torquedata/torquedata/routes.py b/torquedata/torquedata/routes.py deleted file mode 100644 index f05da96a..00000000 --- a/torquedata/torquedata/routes.py +++ /dev/null @@ -1,404 +0,0 @@ -from torquedata import * -from jinja2 import Template -from flask import request, send_file, abort -from werkzeug.utils import secure_filename - -import threading -import json -import os -import re -import io -import pickle - -import shutil -import whoosh -from whoosh.qparser import QueryParser - -@app.route("/search/") -def search(sheet_name): - q = request.args.get("q") - group = request.args.get("group") - wiki_key = request.args.get("wiki_key") - sha = permissions_sha(sheet_name, wiki_key, group) - - if sha in indices: - ix = indices[sha] - with ix.searcher() as searcher: - parser = QueryParser("content", ix.schema) - query = parser.parse(q) - # 2000 is arbitrarily large enough, and we must have a limit declared - results = searcher.search(query, limit=2000) - - search_templates = templates[sheet_name][wiki_key]['Search'] - search_template = search_templates['templates'][search_templates['default']] - template = Template(search_template) - - config = sheet_config[sheet_name] - - resp = "" - - if results.scored_length() == 0: - return "There were no results matching the query." - else: - resp = "== %d results for '%s' ==\n\n" % (results.scored_length(), q) - for r in results: - o = data[sheet_name][r["key"]] - culled_o = cull_invalid_columns(data[sheet_name][r["key"]], permissions[sheet_name][wiki_key][group]["columns"]) - resp += template.render({config["object_name"]: culled_o}) - resp += "\n\n" - - return resp - else: - return "" - -@app.route('/api/.') -def sheet(sheet_name, fmt): - group = request.args.get("group") - wiki_key = request.args.get("wiki_key") - - if fmt == "json": - valid_objects = cull_invalid_objects(group, sheet_name, wiki_key) - return json.dumps({sheet_name: [cull_invalid_columns(o, permissions[sheet_name][wiki_key][group]["columns"]) for o in valid_objects ]}) - else: - raise Exception("Only json format valid for full list") - -@app.route('/api//toc/.') -def sheet_toc(sheet_name, toc_name, fmt): - group = request.args.get("group") - wiki_key = request.args.get("wiki_key") - - if not group: - abort(403, "Group " + group + " invalid"); - - if not 'TOC' in templates[sheet_name][wiki_key]: - abort(403, "No TOC defined for " + sheet_name + " for wiki " + wiki_key); - - valid_objects = cull_invalid_objects(group, sheet_name, wiki_key) - - if fmt == "mwiki": - toc_str = "" - config = sheet_config[sheet_name] - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "tocs", toc_name + ".j2")) as f: - template_str = f.read() - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "tocs", toc_name + ".json")) as f: - template_data = json.loads(f.read()) - template_data[sheet_name] = { o[config["key_column"]]:o for o in valid_objects } - - toc_templates = templates[sheet_name][wiki_key]['TOC'] - - toc_template = toc_templates['templates'][toc_templates['default']] - - # Because TorqueDataConnect will sometimes override the global view and send it along, - # and because that view may not be a valid TOC view, only use the view argument to override - # the default if it's valid. - if request.args.get("view") and request.args.get("view") in toc_templates['templates']: - toc_template = toc_templates['templates'][request.args.get("view")] - - template = Template(toc_template) - template_data['toc_lines'] = { - o[config['key_column']]: - template.render({ - config['object_name']: - cull_invalid_columns(o, permissions[sheet_name][wiki_key][group]["columns"]) - }) - for o - in valid_objects - } - - return Template(template_str).render(template_data) - else: - raise Exception("Only mwiki format valid for toc") - -@app.route('/api//id/.') -def row(sheet_name, key, fmt): - group = request.args.get("group") - wiki_key = request.args.get("wiki_key") - - valid_id = False - - if wiki_key in permissions[sheet_name].keys() and group in permissions[sheet_name][wiki_key].keys(): - if "valid_ids" not in permissions[sheet_name][wiki_key][group].keys(): - valid_id = True - else: - valid_id = (key in permissions[sheet_name][wiki_key][group]["valid_ids"]) - - if not valid_id: - if fmt == "json": - return json.dumps({"error": "Invalid " + sheet_config[sheet_name]["key_column"]}) - else: - abort(403, "Invalid " + sheet_config[sheet_name]["key_column"]); - - row = data[sheet_name][key] - - if "columns" in permissions[sheet_name][wiki_key][group]: - row = cull_invalid_columns(row, permissions[sheet_name][wiki_key][group]["columns"]) - - if fmt == "json": - return json.dumps(row) - elif fmt == "mwiki": - mwiki_templates = templates[sheet_name][wiki_key]['View'] - chosen_view = request.args.get("view", mwiki_templates['default']) - config = sheet_config[sheet_name] - mwiki_template = mwiki_templates['templates'][chosen_view] - template = Template(mwiki_template) - - return template.render({config["object_name"]: row}) - else: - raise Exception("Invalid format: " + fmt) - -@app.route('/api//attachment//') -def attachment(sheet_name, key, attachment): - group = request.args.get("group") - wiki_key = request.args.get("wiki_key") - - import urllib.parse - attachment = urllib.parse.unquote_plus(attachment) - - valid_id = False - - if wiki_key in permissions[sheet_name].keys() and group in permissions[sheet_name][wiki_key].keys(): - if "valid_ids" not in permissions[sheet_name][wiki_key][group].keys(): - valid_id = True - else: - valid_id = (key in permissions[sheet_name][wiki_key][group]["valid_ids"]) - - if not valid_id: - return "Invalid " + sheet_config[sheet_name]["key_column"] - - attachment_name = secure_filename(attachment) - if not attachment_name in attachment_config.keys(): - return "Invalid attachment: " + attachment_name - - if attachment_config[attachment_name]["object_id"] != key: - return "Invalid " + sheet_config[sheet_name]["key_column"] - - attachment_permissions_column = attachment_config[attachment_name]["permissions_column"] - if attachment_permissions_column not in permissions[sheet_name][wiki_key][group]["columns"]: - return "Invalid attachment: " + attachment_name - - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "attachments", attachment_name), "rb") as file: - return send_file(io.BytesIO(file.read()), attachment_filename = attachment_name) - -@app.route('/upload/sheet', methods=['POST']) -def upload_sheet(): - if 'data_file' not in request.files: - raise Exception("Must have file in data upload") - if 'object_name' not in request.form: - raise Exception("Must have the object_name") - if 'sheet_name' not in request.form: - raise Exception("Must have the sheet_name") - if 'key_column' not in request.form: - raise Exception("Must have the key_column name") - - file = request.files['data_file'] - if file.filename == '': - raise Exception("File must have a filename associated with it") - if file: - sheet_name = secure_filename(request.form['sheet_name']) - - try: - os.mkdir(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name)) - except FileExistsError: - pass - - try: - os.mkdir(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "tocs")) - except FileExistsError: - pass - - try: - dir = os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "indices") - shutil.rmtree(dir, True) - os.mkdir(dir) - except FileExistsError: - pass - - try: - os.mkdir(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "attachments")) - except FileExistsError: - pass - - file.save(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, sheet_name + ".csv")) - - sheet_config[sheet_name] = {} - sheet_config[sheet_name]["object_name"] = request.form['object_name'] - sheet_config[sheet_name]["sheet_name"] = request.form['sheet_name'] - sheet_config[sheet_name]["key_column"] = request.form['key_column'] - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "sheets"), 'w') as f: - sheet_config.write(f) - - load_sheet(sheet_name) - - return "" - -@app.route('/config///reset') -def reset_config(sheet_name, wiki_key): - global templates, permissions - permissions[sheet_name][wiki_key] = {} - templates[sheet_name][wiki_key] = {} - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "permissions"), 'wb') as f: - pickle.dump(permissions, f) - - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "templates"), 'wb') as f: - pickle.dump(templates, f) - - return '' - -@app.route('/config///group', methods=['POST']) -def set_group_config(sheet_name, wiki_key): - global permissions - new_config = request.json - - if sheet_name not in permissions.keys(): - permissions[sheet_name] = {} - - if wiki_key not in permissions[sheet_name].keys(): - permissions[sheet_name][wiki_key] = {} - - if 'group' not in new_config: - raise Exception("Must have a group name") - - group_name = new_config['group'] - if group_name not in permissions[sheet_name][wiki_key].keys(): - permissions[sheet_name][wiki_key][group_name] = {} - - if 'valid_ids' in new_config: - permissions[sheet_name][wiki_key][group_name]['valid_ids'] = new_config['valid_ids'] - - if 'columns' in new_config: - permissions[sheet_name][wiki_key][group_name]['columns'] = new_config['columns'] - - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "permissions"), 'wb') as f: - pickle.dump(permissions, f) - - index_search(group_name, sheet_name, wiki_key) - - return '' - -@app.route('/config///template', methods=['POST']) -def set_template_config(sheet_name, wiki_key): - new_config = request.json - - if 'name' not in new_config: - raise Exception("Must have a template name") - - if 'type' not in new_config: - raise Exception("Must have a template type") - - if sheet_name not in templates.keys(): - templates[sheet_name] = {} - - if wiki_key not in templates[sheet_name].keys(): - templates[sheet_name][wiki_key] = {} - - template_type = new_config['type'] - if template_type not in templates[sheet_name][wiki_key]: - templates[sheet_name][wiki_key][template_type] = { - 'default': None, - 'templates': {} - } - - name = new_config['name'] - if name not in templates[sheet_name][wiki_key][template_type]['templates']: - # We just make the first one we get the default for the type - if not templates[sheet_name][wiki_key][template_type]['default']: - templates[sheet_name][wiki_key][template_type]['default'] = name - templates[sheet_name][wiki_key][template_type]['templates'][name] = new_config['template'] - - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "templates"), 'wb') as f: - pickle.dump(templates, f) - - return '' - -@app.route('/upload/toc', methods=['POST']) -def upload_toc(): - from werkzeug.utils import secure_filename - - if 'json' not in request.files: - raise Exception("Must have json file") - if 'template' not in request.files: - raise Exception("Must have template file") - if 'sheet_name' not in request.form: - raise Exception("Must have the sheet_name") - if 'toc_name' not in request.form: - raise Exception("Must have the toc_name") - - json_file = request.files['json'] - template_file = request.files['template'] - sheet_name = secure_filename(request.form['sheet_name']) - toc_name = secure_filename(request.form['toc_name']) - - if json_file.filename == '': - raise Exception("json_file must have a filename associated with it") - if template_file.filename == '': - raise Exception("template_file must have a filename associated with it") - if sheet_name not in sheet_config.keys(): - raise Exception(sheet_name + " is not an existing sheet") - if json_file and template_file: - secure_sheet_name = secure_filename(request.form['sheet_name']) - - json_file.save(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "tocs", toc_name + ".json")) - template_file.save(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "tocs", toc_name + ".j2")) - - return "" - -@app.route('/upload/attachment', methods=['POST']) -def upload_attachment(): - from werkzeug.utils import secure_filename - - if 'attachment' not in request.files: - raise Exception("Must have file in data upload") - if 'attachment_name' not in request.form: - raise Exception("Must have the attachment_name") - if 'sheet_name' not in request.form: - raise Exception("Must have the sheet_name") - if 'permissions_column' not in request.form: - raise Exception("Must have the permissions_column") - if 'object_id' not in request.form: - raise Exception("Must have the object_id name") - - attachment = request.files['attachment'] - if attachment.filename == '': - raise Exception("attachment must have a filename associated with it") - - sheet_name = secure_filename(request.form['sheet_name']) - if sheet_name not in sheet_config.keys(): - raise Exception(sheet_name + " is not an existing sheet") - - attachment_name = secure_filename(request.form['attachment_name']) - permissions_column = request.form['permissions_column'] - object_id = request.form['object_id'] - - attachment_config[attachment_name] = { - 'permissions_column': permissions_column, - 'object_id': object_id - } - - with open(os.path.join(app.config['SPREADSHEET_FOLDER'], "attachment_config"), 'wb') as f: - pickle.dump(attachment_config, f) - - attachment.save(os.path.join(app.config['SPREADSHEET_FOLDER'], sheet_name, "attachments", attachment_name)) - - return "" - -# We need a lock because we have incrementing ids. -# This should really be in a database.... -userlock = threading.Lock() - -@app.route('/users/username/') -def find_user_by_username(username): - if username not in users.keys(): - with userlock: - # We start on 14 because then no one is user 13 - users[username] = {"username": username, "id": len(users.items()) + 14} - - # With help from https://stackoverflow.com/questions/2333872/atomic-writing-to-file-with-python - filepath = os.path.join(app.config['SPREADSHEET_FOLDER'], "users") - tmppath = filepath + '~' - with open(tmppath, 'wb') as f: - pickle.dump(users, f) - f.flush() - os.fsync(f.fileno()) - os.rename(tmppath, filepath) - - return json.dumps(users[username]) diff --git a/torquedata/torquedata/settings.py b/torquedata/torquedata/settings.py new file mode 100644 index 00000000..8e221924 --- /dev/null +++ b/torquedata/torquedata/settings.py @@ -0,0 +1,72 @@ +""" +Django settings for torquedata project. + +Generated by 'django-admin startproject' using Django 3.1.1. + +For more information on this file, see +https://docs.djangoproject.com/en/3.1/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/3.1/ref/settings/ +""" + +from pathlib import Path + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = 'jl*u4x5xbp3nsy#%#jeik_0_e&^_iq@nolxpkkg_#d4_f4a#t-' + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True + +ALLOWED_HOSTS = ['*'] + + +# Application definition + +INSTALLED_APPS = [ + 'django.contrib.contenttypes', + 'core' +] + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +ROOT_URLCONF = 'torquedata.urls' + +WSGI_APPLICATION = 'torquedata.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/3.1/ref/settings/#databases + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': BASE_DIR / 'db.sqlite3', + } +} + + +# Internationalization +# https://docs.djangoproject.com/en/3.1/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'UTC' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True diff --git a/torquedata/torquedata/urls.py b/torquedata/torquedata/urls.py new file mode 100644 index 00000000..25df1071 --- /dev/null +++ b/torquedata/torquedata/urls.py @@ -0,0 +1,19 @@ +"""torquedata URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.1/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.urls import path + +urlpatterns = [ +] diff --git a/torquedata/torquedata/wsgi.py b/torquedata/torquedata/wsgi.py new file mode 100644 index 00000000..7677a29c --- /dev/null +++ b/torquedata/torquedata/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for torquedata project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'torquedata.settings') + +application = get_wsgi_application()