SQLite
This commit is contained in:
parent
dc801464ed
commit
0401acd31b
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -98,4 +98,7 @@ ENV/
|
||||||
/site
|
/site
|
||||||
|
|
||||||
# mypy
|
# mypy
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
|
|
||||||
|
# custom
|
||||||
|
wayback.db
|
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
|
@ -1,4 +1,4 @@
|
||||||
{
|
{
|
||||||
"python.pythonPath": "/usr/bin/python3.6",
|
"python.pythonPath": "C:\\Users\\Emily\\AppData\\Local\\Programs\\Python\\Python36\\python.exe",
|
||||||
"python.linting.pylintEnabled": true
|
"python.linting.pylintEnabled": true
|
||||||
}
|
}
|
|
@ -1,12 +1,57 @@
|
||||||
|
import json
|
||||||
|
|
||||||
import tornado.gen
|
import tornado.gen
|
||||||
import tornado.web
|
import tornado.web
|
||||||
|
|
||||||
|
from web import asyncTornado
|
||||||
|
from constants import argumentTypes
|
||||||
|
|
||||||
from objects import glob
|
from objects import glob
|
||||||
|
|
||||||
allowed_args = ["file_hash", "file_version", "timestamp"]
|
ARGS = {
|
||||||
|
("file_hash", "file_version", "timestamp"): argumentTypes.one_required
|
||||||
|
}
|
||||||
|
|
||||||
def handle(requestsManager.asyncRequestHandler):
|
SQL_STRUCT = {
|
||||||
return {}
|
"main": "SELECT * FROM updates WHERE %s LIMIT 1",
|
||||||
|
"file_hash": "%s = '%s'",
|
||||||
|
"file_version": "%s = %s",
|
||||||
|
"timestamp": "timestamp <= '%s' ORDER BY timestamp DESC"
|
||||||
|
}
|
||||||
|
|
||||||
def callback(method, data):
|
class handler(asyncTornado.asyncRequestHandler):
|
||||||
return None
|
@tornado.web.asynchronous
|
||||||
|
@tornado.gen.engine
|
||||||
|
def asyncGet(self):
|
||||||
|
status_code = 400
|
||||||
|
data = {}
|
||||||
|
try:
|
||||||
|
args_filter = asyncTornado.check_arguments(self.request.arguments, ARGS)
|
||||||
|
if False in args_filter:
|
||||||
|
raise Exception("Missing required arguments")
|
||||||
|
|
||||||
|
method = args_filter[0]
|
||||||
|
method_value = self.request.arguments[method]
|
||||||
|
|
||||||
|
cur = glob.sql.cursor()
|
||||||
|
|
||||||
|
sql = SQL_STRUCT["main"] % SQL_STRUCT["method"]
|
||||||
|
if method == "timestamp":
|
||||||
|
sql = sql % method_value
|
||||||
|
else:
|
||||||
|
sql = sql % (method, method_value)
|
||||||
|
|
||||||
|
cur.execute(sql)
|
||||||
|
data = cur.fetchone()
|
||||||
|
|
||||||
|
status_code = 200
|
||||||
|
except Exception as e:
|
||||||
|
status_code = 400
|
||||||
|
data["status"] = status_code
|
||||||
|
data["message"] = e
|
||||||
|
finally:
|
||||||
|
cur.close()
|
||||||
|
|
||||||
|
self.write( json.dumps(data) )
|
||||||
|
self.set_header("Content-Type", "application/json")
|
||||||
|
self.set_status(status_code)
|
||||||
|
|
10
config.json
10
config.json
|
@ -1,9 +1,6 @@
|
||||||
{
|
{
|
||||||
"web": {
|
"server": {
|
||||||
"debug": true,
|
"host": "0.0.0.0",
|
||||||
"use_reloader": true,
|
|
||||||
"threaded": true,
|
|
||||||
"host": "127.0.0.1",
|
|
||||||
"port": 3003
|
"port": 3003
|
||||||
},
|
},
|
||||||
"sql": {
|
"sql": {
|
||||||
|
@ -23,5 +20,6 @@
|
||||||
"zipper": {
|
"zipper": {
|
||||||
"temp_folder": "/home/wayback/tmp",
|
"temp_folder": "/home/wayback/tmp",
|
||||||
"output_folder": "/home/wayback/archive"
|
"output_folder": "/home/wayback/archive"
|
||||||
}
|
},
|
||||||
|
"threads": 4
|
||||||
}
|
}
|
0
constants/__init__.py
Normal file
0
constants/__init__.py
Normal file
4
constants/argumentTypes.py
Normal file
4
constants/argumentTypes.py
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
optional = 0
|
||||||
|
required = 1
|
||||||
|
one_required = 2
|
||||||
|
only_one = 3
|
17
database_structs.sql
Normal file
17
database_structs.sql
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
CREATE TABLE `updates` (
|
||||||
|
`file_version` int(11) NOT NULL,
|
||||||
|
`filename` varchar(32) NOT NULL,
|
||||||
|
`file_hash` varchar(32) NOT NULL,
|
||||||
|
`filesize` int(11) NOT NULL,
|
||||||
|
`timestamp` int(11) NOT NULL,
|
||||||
|
`patch_id` int(11) DEFAULT NULL,
|
||||||
|
`url_full` varchar(128) NOT NULL,
|
||||||
|
`url_patch` varchar(128) DEFAULT NULL,
|
||||||
|
PRIMARY KEY (`file_version`)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE `osu_builds` (
|
||||||
|
`id` int(11) NOT NULL,
|
||||||
|
`version` varchar(128) NOT NULL,
|
||||||
|
PRIMARY KEY (`id`)
|
||||||
|
);
|
36
main.py
36
main.py
|
@ -1,4 +1,6 @@
|
||||||
from os import listdir
|
from os import listdir, path
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
import tornado.gen
|
import tornado.gen
|
||||||
import tornado.httpserver
|
import tornado.httpserver
|
||||||
|
@ -6,6 +8,9 @@ import tornado.ioloop
|
||||||
import tornado.web
|
import tornado.web
|
||||||
import tornado.netutil
|
import tornado.netutil
|
||||||
|
|
||||||
|
|
||||||
|
from objects import glob
|
||||||
|
|
||||||
def make_app():
|
def make_app():
|
||||||
"""
|
"""
|
||||||
Make tornado application instance
|
Make tornado application instance
|
||||||
|
@ -35,7 +40,7 @@ def make_app():
|
||||||
api = api.rstrip(".py")
|
api = api.rstrip(".py")
|
||||||
routes.append(
|
routes.append(
|
||||||
(
|
(
|
||||||
r"/%s/%s" % (dir, api), __import__("%s.%s" % (dir.replace("/", "."), api), fromlist=[""]).handle
|
r"/%s/%s" % (dir, api), __import__("%s.%s" % (dir.replace("/", "."), api), fromlist=[""])
|
||||||
))
|
))
|
||||||
else:
|
else:
|
||||||
routes += map_routes("%s/%s" % (dir, api))
|
routes += map_routes("%s/%s" % (dir, api))
|
||||||
|
@ -43,3 +48,30 @@ def make_app():
|
||||||
|
|
||||||
routes = map_routes("api")
|
routes = map_routes("api")
|
||||||
return tornado.web.Application(routes)
|
return tornado.web.Application(routes)
|
||||||
|
|
||||||
|
def build_database():
|
||||||
|
f = open("wayback.db", "w")
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
glob.sql = sqlite3.connect("wayback.db")
|
||||||
|
cur = glob.sql.cursor()
|
||||||
|
|
||||||
|
with open("database_structs.sql", "r") as f:
|
||||||
|
cur.executescript( f.read() )
|
||||||
|
|
||||||
|
cur.close()
|
||||||
|
print("[!] New sqlite database created")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
glob.app = make_app()
|
||||||
|
|
||||||
|
if not path.isfile("wayback.db"):
|
||||||
|
build_database()
|
||||||
|
|
||||||
|
if glob.sql == None:
|
||||||
|
glob.sql = sqlite3.connect("wayback.db")
|
||||||
|
|
||||||
|
print("Serving at %s:%s" % (glob.config["server"]["host"], glob.config["server"]["port"]))
|
||||||
|
print("To stop server press CTRL + C")
|
||||||
|
glob.app.listen(glob.config["server"]["port"], address=glob.config["server"]["host"])
|
||||||
|
tornado.ioloop.IOLoop.instance().start()
|
0
objects/__init__.py
Normal file
0
objects/__init__.py
Normal file
|
@ -1 +1,9 @@
|
||||||
sql = None
|
import json
|
||||||
|
from multiprocessing.pool import ThreadPool
|
||||||
|
|
||||||
|
with open("config.json", "r") as f:
|
||||||
|
config = json.load(f)
|
||||||
|
|
||||||
|
app = None
|
||||||
|
sql = None
|
||||||
|
pool = ThreadPool(config["threads"])
|
||||||
|
|
0
web/__init__.py
Normal file
0
web/__init__.py
Normal file
|
@ -1,6 +1,110 @@
|
||||||
import tornado
|
import tornado
|
||||||
import tornado.web
|
import tornado.web
|
||||||
import tornado.gen
|
import tornado.gen
|
||||||
|
from tornado.ioloop import IOLoop
|
||||||
|
|
||||||
|
from constants import argumentTypes
|
||||||
|
from objects import glob
|
||||||
|
|
||||||
|
# Copied from https://zxq.co/ripple/ripple-python-common/src/branch/master/web/requestsManager.py
|
||||||
class asyncRequestHandler(tornado.web.RequestHandler):
|
class asyncRequestHandler(tornado.web.RequestHandler):
|
||||||
|
"""
|
||||||
|
Tornado asynchronous request handler
|
||||||
|
create a class that extends this one (requestHelper.asyncRequestHandler)
|
||||||
|
use asyncGet() and asyncPost() instead of get() and post().
|
||||||
|
Done. I'm not kidding.
|
||||||
|
"""
|
||||||
|
@tornado.web.asynchronous
|
||||||
|
@tornado.gen.engine
|
||||||
|
def get(self, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
yield tornado.gen.Task(runBackground, (self.asyncGet, tuple(args), dict(kwargs)))
|
||||||
|
finally:
|
||||||
|
if not self._finished:
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
@tornado.web.asynchronous
|
||||||
|
@tornado.gen.engine
|
||||||
|
def post(self, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
yield tornado.gen.Task(runBackground, (self.asyncPost, tuple(args), dict(kwargs)))
|
||||||
|
finally:
|
||||||
|
if not self._finished:
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
def asyncGet(self, *args, **kwargs):
|
||||||
|
self.send_error(405)
|
||||||
|
|
||||||
|
def asyncPost(self, *args, **kwargs):
|
||||||
|
self.send_error(405)
|
||||||
|
|
||||||
|
def getRequestIP(self):
|
||||||
|
"""
|
||||||
|
Return CF-Connecting-IP (request IP when under cloudflare, you have to configure nginx to enable that)
|
||||||
|
If that fails, return X-Forwarded-For (request IP when not under Cloudflare)
|
||||||
|
if everything else fails, return remote IP
|
||||||
|
|
||||||
|
:return: Client IP address
|
||||||
|
"""
|
||||||
|
if "CF-Connecting-IP" in self.request.headers:
|
||||||
|
return self.request.headers.get("CF-Connecting-IP")
|
||||||
|
elif "X-Forwarded-For" in self.request.headers:
|
||||||
|
return self.request.headers.get("X-Forwarded-For")
|
||||||
|
else:
|
||||||
|
return self.request.remote_ip
|
||||||
|
|
||||||
|
|
||||||
|
def runBackground(data, callback):
|
||||||
|
"""
|
||||||
|
Run a function in the background.
|
||||||
|
Used to handle multiple requests at the same time
|
||||||
|
|
||||||
|
:param data: (func, args, kwargs)
|
||||||
|
:param callback: function to call when `func` (data[0]) returns
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
func, args, kwargs = data
|
||||||
|
def _callback(result):
|
||||||
|
IOLoop.instance().add_callback(lambda: callback(result))
|
||||||
|
glob.pool.apply_async(func, args, kwargs, _callback)
|
||||||
|
|
||||||
|
def check_arguments(arguments, arguments_filter):
|
||||||
|
filter_pass = []
|
||||||
|
for k, v in arguments_filter.items():
|
||||||
|
if v == argumentTypes.optional:
|
||||||
|
filter_pass.append( arg_filter_and(arguments, k) )
|
||||||
|
elif v == argumentTypes.required:
|
||||||
|
filter_pass.append( arg_filter_require_all(arguments, k) )
|
||||||
|
elif v == argumentTypes.one_required:
|
||||||
|
filter_pass.append( arg_filter_first(arguments, k, False) )
|
||||||
|
elif v == argumentTypes.only_one:
|
||||||
|
filter_pass.append( arg_filter_only_one(arguments, k) )
|
||||||
|
return filter_pass
|
||||||
|
|
||||||
|
def arg_filter_and(arguments, filter, can_false = False):
|
||||||
|
arg_filter = []
|
||||||
|
for i in filter:
|
||||||
|
if i in filter:
|
||||||
|
arg_filter.append(i)
|
||||||
|
if can_false:
|
||||||
|
return arg_filter if len(arg_filter) else False
|
||||||
|
return arg_filter
|
||||||
|
|
||||||
|
def arg_filter_require_all(arguments, required):
|
||||||
|
for i in required:
|
||||||
|
if i not in arguments:
|
||||||
|
return False
|
||||||
|
return required
|
||||||
|
|
||||||
|
def arg_filter_only_one(arguments, required):
|
||||||
|
arg_filter = []
|
||||||
|
for i in required:
|
||||||
|
if i in arguments:
|
||||||
|
arg_filter.append(i)
|
||||||
|
return True if len(arg_filter) == 1 else False
|
||||||
|
|
||||||
|
def arg_filter_first(arguments, filter, optional = True):
|
||||||
|
for i in filter:
|
||||||
|
if i in filter:
|
||||||
|
return i
|
||||||
|
return optional
|
||||||
|
|
Loading…
Reference in New Issue
Block a user