forked from Gardener/ShareX_Storage
finally bringing this into a repo
This commit is contained in:
commit
dee5744f89
28
config.yaml
Normal file
28
config.yaml
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# if this is false, everyone can upload stuff.
|
||||||
|
auth: True
|
||||||
|
|
||||||
|
# if auth is true, only people that provide one of the tokens in the http headers are allowed to upload.
|
||||||
|
tokens:
|
||||||
|
- 'example token'
|
||||||
|
|
||||||
|
# everyone who has it can delete files based on the download link.
|
||||||
|
del_crypt_key: 'secret delete link key'
|
||||||
|
|
||||||
|
# in bytes
|
||||||
|
max_filesize: 1024 ** 2 * 100
|
||||||
|
|
||||||
|
# uploaded files will be stored here
|
||||||
|
data_path: 'data'
|
||||||
|
|
||||||
|
# this can only be a multiple of 2
|
||||||
|
url_hash_len: 6
|
||||||
|
|
||||||
|
# just affects the printed links (e.g. for reverse proxies)
|
||||||
|
protocol: 'https'
|
||||||
|
|
||||||
|
# uri that routes to this
|
||||||
|
prefix: '/f'
|
||||||
|
|
||||||
|
# whether or not extensions are appended in the generated links.
|
||||||
|
# links without file extensions still work. (just like push)
|
||||||
|
show_ext: True
|
15
nginx_example.conf
Normal file
15
nginx_example.conf
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
upstream filehoster {
|
||||||
|
server file_hoster:80;
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 443 ssl http2;
|
||||||
|
|
||||||
|
server_name your-domain.net;
|
||||||
|
|
||||||
|
# ShareX_Storage prefix is /f
|
||||||
|
location /f/ {
|
||||||
|
client_max_body_size 100M;
|
||||||
|
proxy_pass http://filehoster$uri/$server_name;
|
||||||
|
}
|
||||||
|
}
|
169
sharex_server.py
Normal file
169
sharex_server.py
Normal file
@ -0,0 +1,169 @@
|
|||||||
|
import os
|
||||||
|
import io
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
import string
|
||||||
|
import hashlib
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from Cryptodome.Cipher import AES
|
||||||
|
from Cryptodome.Util import Padding
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
|
||||||
|
class AttrDict(dict):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.__dict__ = self
|
||||||
|
|
||||||
|
def update(self, *d, **kwargs):
|
||||||
|
for key, val in (d[0] if d else kwargs).items():
|
||||||
|
setattr(self, key, val)
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
return self.setdefault(item, AttrDict())
|
||||||
|
|
||||||
|
|
||||||
|
def cast_to_ad(d):
|
||||||
|
if not isinstance(d, AttrDict):
|
||||||
|
d = AttrDict(d)
|
||||||
|
for k, v in dict(d.items()).items():
|
||||||
|
if " " in k:
|
||||||
|
del d[k]
|
||||||
|
d[k.replace(" ", "_")] = v
|
||||||
|
if isinstance(v, dict):
|
||||||
|
d[k] = cast_to_ad(v)
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def sizeof_fmt(num, suffix='B'):
|
||||||
|
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
|
||||||
|
if abs(num) < 1024.0:
|
||||||
|
return f"{num:3.1f}{unit}{suffix}"
|
||||||
|
num /= 1024.0
|
||||||
|
return f"{num:.1f}Yi{suffix}"
|
||||||
|
|
||||||
|
|
||||||
|
async def prepare(_, handler):
|
||||||
|
async def prepare_handler(req):
|
||||||
|
if 'acc' not in req.match_info:
|
||||||
|
return web.Response(text='internal server error', status=500)
|
||||||
|
return await handler(req, req.match_info["acc"], file_db[req.match_info["acc"]])
|
||||||
|
return prepare_handler
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_upload(req, acc, acc_db):
|
||||||
|
if conf.auth and req.headers.get('auth') not in conf.auth_tokens:
|
||||||
|
return web.Response(text='access denied', status=403)
|
||||||
|
reader = await req.multipart()
|
||||||
|
file = await reader.next()
|
||||||
|
|
||||||
|
filename = os.path.basename(file.filename)
|
||||||
|
|
||||||
|
if not os.path.isdir(f'{conf.data_path}/{acc}'):
|
||||||
|
os.mkdir(f'{conf.data_path}/{acc}')
|
||||||
|
|
||||||
|
for _ in range(100):
|
||||||
|
hb = os.urandom(conf.url_hash_len//2)
|
||||||
|
h = hb.hex()
|
||||||
|
if h not in acc_db:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
return web.Response(text='server full', status=500)
|
||||||
|
|
||||||
|
acc_db[h] = filename
|
||||||
|
local_fname = f'{conf.data_path}/{acc}/{h}_{filename}'
|
||||||
|
ext = os.path.splitext(filename)[1] if conf.show_ext else ''
|
||||||
|
os.fdopen(os.open(local_fname, os.O_WRONLY | os.O_CREAT, 0o600)).close()
|
||||||
|
try:
|
||||||
|
valid_file = await recv_file(file, local_fname)
|
||||||
|
except IOError:
|
||||||
|
return web.Response(text='internal io error', status=500)
|
||||||
|
if valid_file:
|
||||||
|
c = AES.new(conf.del_crypt_key, AES.MODE_CBC)
|
||||||
|
hb = Padding.pad(hb, AES.block_size)
|
||||||
|
del_h = (c.encrypt(hb) + c.iv).hex()
|
||||||
|
return web.Response(text=f'{conf.protocol}://{acc}{conf.prefix}/{h[:conf.url_hash_len]}{ext}\n'
|
||||||
|
f'{conf.protocol}://{acc}{conf.prefix}/del/{del_h}')
|
||||||
|
os.unlink(local_fname)
|
||||||
|
del acc_db[h]
|
||||||
|
return web.Response(text=f'file is bigger than {sizeof_fmt(conf.max_filesize)}', status=413)
|
||||||
|
|
||||||
|
|
||||||
|
async def recv_file(file, local_fname):
|
||||||
|
size = 0
|
||||||
|
with io.BufferedWriter(open(local_fname, 'wb')) as f:
|
||||||
|
while True:
|
||||||
|
chunk = await file.read_chunk()
|
||||||
|
if not chunk:
|
||||||
|
return True
|
||||||
|
size += len(chunk)
|
||||||
|
if size > conf.max_filesize:
|
||||||
|
return False
|
||||||
|
f.write(chunk)
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_delete(req, acc, acc_db):
|
||||||
|
chashiv = req.match_info.get('hash', 'x')
|
||||||
|
if not set(chashiv).issubset(valid_hash_chars) or len(chashiv) != 64:
|
||||||
|
return web.Response(text='invalid delete link', status=400)
|
||||||
|
chashiv = bytes.fromhex(chashiv)
|
||||||
|
|
||||||
|
c = AES.new(conf.del_crypt_key, AES.MODE_CBC, iv=chashiv[AES.block_size:])
|
||||||
|
fhash = c.decrypt(chashiv[:AES.block_size])
|
||||||
|
try:
|
||||||
|
fhash = Padding.unpad(fhash, AES.block_size).hex()
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
if fhash not in acc_db or len(fhash) == 32:
|
||||||
|
return web.Response(text='this file doesn\'t exist on the server', status=404)
|
||||||
|
os.unlink(f"{conf.data_path}/{acc}/{fhash}_{acc_db[fhash]}")
|
||||||
|
del acc_db[fhash]
|
||||||
|
return web.Response(text='file deleted')
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_download(req, acc, acc_db):
|
||||||
|
fhash = req.match_info.get('hash', '').split('.', 1)[0]
|
||||||
|
if fhash not in acc_db:
|
||||||
|
return web.Response(text='file not found', status=404)
|
||||||
|
return web.FileResponse(f"{conf.data_path}/{acc}/{fhash}_{acc_db[fhash]}", headers={
|
||||||
|
'CONTENT-DISPOSITION': f'inline;filename={acc_db[fhash]}'
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if not os.path.isdir(conf.data_path):
|
||||||
|
os.mkdir(conf.data_path)
|
||||||
|
for acc in os.listdir(conf.data_path):
|
||||||
|
if not os.path.isdir(f'{conf.data_path}/{acc}'):
|
||||||
|
continue
|
||||||
|
for file in os.listdir(f"{conf.data_path}/{acc}"):
|
||||||
|
fhash, fname = file.split('_', 1)
|
||||||
|
file_db[acc][fhash] = fname
|
||||||
|
|
||||||
|
app = web.Application(middlewares=[prepare])
|
||||||
|
app.router.add_post(conf.prefix + '/post/{acc}', handle_upload)
|
||||||
|
app.router.add_get(conf.prefix + '/del/{hash}/{acc}', handle_delete)
|
||||||
|
app.router.add_get(conf.prefix + '/{hash}/{acc}', handle_download)
|
||||||
|
|
||||||
|
web.run_app(app, port=80)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
valid_hash_chars = set(string.hexdigits)
|
||||||
|
file_db = defaultdict(dict)
|
||||||
|
confname = os.path.isfile(sys.argv[1:]) and sys.argv[1] or 'config.yaml'
|
||||||
|
with open(confname) as cf:
|
||||||
|
conf = cast_to_ad(yaml.load(cf))
|
||||||
|
if conf.url_hash_len > 31:
|
||||||
|
raise ValueError('url_hash_len can\'t be bigger than 31')
|
||||||
|
if not set(conf.max_filesize.replace(' ', ''))\
|
||||||
|
.issubset(valid_hash_chars | {'*'}):
|
||||||
|
raise ValueError('max_filsize only can contain numbers and *')
|
||||||
|
conf.max_filesize = eval(conf.max_filesize)
|
||||||
|
conf.auth_tokens = set(conf.tokens)
|
||||||
|
conf.prefix = conf.prefix.strip("/")
|
||||||
|
if conf.prefix:
|
||||||
|
conf.prefix = f'/{conf.prefix}'
|
||||||
|
conf.del_crypt_key = hashlib.md5(conf.del_crypt_key.encode()).digest()[:16]
|
||||||
|
main()
|
Loading…
Reference in New Issue
Block a user