Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions config.env.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
'sqlite:///{}'.format(os.path.join(os.getcwd(), 'data.db')))
SQLALCHEMY_TRACK_MODIFICATIONS = False

S3_URI = os.environ.get('GALLERY_S3_URI', '')
S3_ACCESS_ID = os.environ.get('GALLERY_S3_ACCESS_ID','')
S3_SECRET_KEY = os.environ.get('GALLERY_S3_SECRET_KEY','')
S3_BUCKET_ID = os.environ.get('GALLERY_S3_BUCKET_ID','')
154 changes: 79 additions & 75 deletions gallery/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@
ClientMetadata,
)
from gallery._version import __version__, BUILD_REFERENCE, COMMIT_HASH
from gallery.s3 import S3
from gallery.file_store import (S3Storage, LocalStorage, FileStorage)
from gallery.ldap import LDAPWrapper
import flask_migrate
import requests
from werkzeug import secure_filename
Expand All @@ -54,6 +55,7 @@
app_config.from_pyfile(os.path.join(os.getcwd(), "config.py"))
else:
app_config.from_pyfile(os.path.join(os.getcwd(), "config.env.py"))
app.config.update(app_config)

db: SQLAlchemy = SQLAlchemy(app)
migrate = flask_migrate.Migrate(app, db)
Expand All @@ -63,21 +65,35 @@

auth = OIDCAuthentication({
'default': ProviderConfiguration(
issuer=app.config['OIDC_ISSUER'],
issuer=app_config['OIDC_ISSUER'],
client_metadata=ClientMetadata(
client_id=app.config['OIDC_CLIENT_ID'],
client_secret=app.config['OIDC_CLIENT_SECRET']
client_id=app_config['OIDC_CLIENT_ID'],
client_secret=app_config['OIDC_CLIENT_SECRET']
)
)
}, app)

ldap = CSHLDAP(app.config['LDAP_BIND_DN'],
app.config['LDAP_BIND_PW'])
if "LDAP_BIND_DN" in app.config:
ldap = LDAPWrapper(CSHLDAP(
app.config['LDAP_BIND_DN'],
app.config['LDAP_BIND_PW'],
))
else:
ldap = LDAPWrapper(
None,
app.config.get("EBOARD_UIDS", "").split(","),
app.config.get("RTP_UIDS", "").split(","),
)

s3 = S3('s3.csh.rit.edu',
access_key=app.config['S3_ACCESS_ID'],
secret_key=app.config['S3_SECRET_KEY'],
secure=True)
app.add_template_global(ldap, name="ldap")

storage_interface: FileStorage
if "LOCAL_STORAGE_PATH" in app.config:
storage_interface = LocalStorage(app)
elif "S3_URI" in app.config:
storage_interface = S3Storage(app)
else:
raise Exception("Please configure a storage provider")

# pylint: disable=C0413
from gallery.models import Directory
Expand All @@ -97,43 +113,6 @@
from gallery.file_modules import supported_mimetypes
from gallery.file_modules import FileModule

import gallery.ldap as gallery_ldap
from gallery.ldap import ldap_convert_uuid_to_displayname
from gallery.ldap import ldap_get_members

for func in inspect.getmembers(gallery_ldap):
if func[0].startswith("ldap_"):
unwrapped = inspect.unwrap(func[1])
if inspect.isfunction(unwrapped):
app.add_template_global(inspect.unwrap(unwrapped), name=func[0])

# Ensure that we have a root directory
# XXX there's definitely a better way to do this, I don't have access to the
# docs right now since I'm on a plane, but I'd wager the SQLAlchemy has a way to
# get the number of rows in a table without retrieving them (especially since
# Postgres definitely support this)
if len([d for d in Directory.query.all()]) == 0:
root_dir = Directory(None, "Gallery!",
"A Multimedia Gallery Written in Python with Flask!",
"root", DEFAULT_THUMBNAIL_NAME, "{\"g\":[]}")
db.session.add(root_dir)
db.session.flush()
db.session.commit()

# Upload the default thumbnail photo to S3 if it's not already up there
# XXX it's probably a good idea to move this outside of the root directory
# creation check. That way if a deployment is given incorrect S3 credentials
# when the root directory is created we can still recover from the case
# where there is not default thumbnail
default_thumbnail_path = "thumbnails/" + DEFAULT_THUMBNAIL_NAME + ".jpg"
file_stat = os.stat(default_thumbnail_path)

with open(default_thumbnail_path, "rb") as f_hnd:
s3.put_object(app.config['S3_BUCKET_ID'],
"files/" + DEFAULT_THUMBNAIL_NAME,
f_hnd,
file_stat.st_size)


@app.route("/")
@auth.oidc_auth('default')
Expand Down Expand Up @@ -203,20 +182,20 @@ def upload_file(auth_dict: Optional[Dict[str, Any]] = None):
# Upload File
file_stat = os.stat(filepath)
with open(filepath, "rb") as f_hnd:
s3.put_object(app.config['S3_BUCKET_ID'],
"files/" + file_model.s3_id,
f_hnd,
file_stat.st_size)
storage_interface.put(
"files/{}".format(file_model.s3_id),
f_hnd
)
os.remove(filepath)

# Upload Thumbnail
filepath = os.path.join(dir_path, file_model.thumbnail_uuid)
file_stat = os.stat(filepath)
with open(filepath, "rb") as f_hnd:
s3.put_object(app.config['S3_BUCKET_ID'],
"thumbnails/" + file_model.s3_id,
f_hnd,
file_stat.st_size)
storage_interface.put(
"thumbnails/" + file_model.s3_id,
f_hnd,
)
os.remove(filepath)
os.rmdir(dir_path)
if file_model is None:
Expand Down Expand Up @@ -335,6 +314,36 @@ def refresh_thumbnails():
refresh_thumbnail()


@app.cli.command()
def init_root():
click.echo("Initializing root directory")
# Ensure that we have a root directory
# XXX there's definitely a better way to do this, I don't have access to the
# docs right now since I'm on a plane, but I'd wager the SQLAlchemy has a way to
# get the number of rows in a table without retrieving them (especially since
# Postgres definitely support this)
if len([d for d in Directory.query.all()]) == 0:
root_dir = Directory(None, "Gallery!",
"A Multimedia Gallery Written in Python with Flask!",
"root", DEFAULT_THUMBNAIL_NAME, "{\"g\":[]}")
db.session.add(root_dir)
db.session.flush()
db.session.commit()

# Upload the default thumbnail photo to S3 if it's not already up there
# XXX it's probably a good idea to move this outside of the root directory
# creation check. That way if a deployment is given incorrect S3 credentials
# when the root directory is created we can still recover from the case
# where there is not default thumbnail
default_thumbnail_path = "thumbnails/" + DEFAULT_THUMBNAIL_NAME + ".jpg"

with open(default_thumbnail_path, "rb") as f_hnd:
storage_interface.put(
"files/{}".format(DEFAULT_THUMBNAIL_NAME),
f_hnd,
)


def add_directory(parent_id: str, name: str, description: str, owner: str):
dir_siblings = Directory.query.filter(Directory.parent == parent_id).all()
for sibling in dir_siblings:
Expand Down Expand Up @@ -433,10 +442,13 @@ def delete_file(file_id: int, auth_dict: Optional[Dict[str, Any]] = None):
).filter(
File.s3_id == file_model.s3_id
).first() is None:
s3.remove_object(app.config['S3_BUCKET_ID'],
"files/" + file_model.s3_id)
s3.remove_object(app.config['S3_BUCKET_ID'],
"thumbnails/" + file_model.s3_id)

storage_interface.remove(
"files/" + file_model.s3_id,
)
storage_interface.remove(
"thumbnails/" + file_model.s3_id,
)

current_tags = Tag.query.filter(Tag.file_id == file_id).all()
for tag in current_tags:
Expand Down Expand Up @@ -646,27 +658,22 @@ def tag_file(file_id: int):
@app.route("/api/file/get/<int:file_id>")
@auth.oidc_auth('default')
def display_file(file_id: int):
file_id = int(file_id)
file_model = File.query.filter(File.id == file_id).first()

if file_model is None:
return "file not found", 404

presigned_url = s3.presigned_get_object(app.config['S3_BUCKET_ID'],
"files/" + file_model.s3_id,
expires=timedelta(minutes=5))
return redirect(presigned_url)
link = storage_interface.get_link("files/{}".format(file_model.s3_id))
return redirect(link)


@app.route("/api/thumbnail/get/<int:file_id>")
@auth.oidc_auth('default')
def display_thumbnail(file_id: int):
file_model = File.query.filter(File.id == file_id).first()

presigned_url = s3.presigned_get_object(app.config['S3_BUCKET_ID'],
"thumbnails/" + file_model.s3_id,
expires=timedelta(minutes=5))
return redirect(presigned_url)
link = storage_interface.get_link("thumbnails/{}".format(file_model.s3_id))
return redirect(link)


@app.route("/api/thumbnail/get/dir/<int:dir_id>")
Expand All @@ -679,11 +686,8 @@ def display_dir_thumbnail(dir_id: int):
if len(thumbnail_uuid.split('.')) > 1:
thumbnail_uuid = thumbnail_uuid.split('.')[0]

presigned_url = s3.presigned_get_object(app.config['S3_BUCKET_ID'],
"thumbnails/" +
thumbnail_uuid,
expires=timedelta(minutes=5))
return redirect(presigned_url)
link = storage_interface.get_link("thumbnails/{}".format(thumbnail_uuid))
return redirect(link)


@app.route("/api/file/next/<int:file_id>")
Expand Down Expand Up @@ -746,7 +750,7 @@ def get_dir_children(dir_id: int) -> Any:
children.sort(key=lambda x: x['name'])
return children

root = Directory.query.filter(Directory.parent is None).first()
root = Directory.query.filter(Directory.parent == None).first()

tree = {}

Expand Down Expand Up @@ -904,7 +908,7 @@ def view_filtered(auth_dict: Optional[Dict[str, Any]] = None):
@app.route("/api/memberlist")
@auth.oidc_auth('default')
def get_member_list():
return jsonify(ldap_get_members())
return jsonify(ldap.get_members())


@app.errorhandler(404)
Expand Down
120 changes: 120 additions & 0 deletions gallery/file_store.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import os
import shutil
from datetime import timedelta
from typing import IO

import boto3
from botocore.client import Config
import flask
from flask import abort, send_from_directory, url_for
from itsdangerous import (
URLSafeTimedSerializer
)


class FileStorage(object):
"""
FileStorage represents the interface for interacting with some kind of
backing storage system. This system is used to store the actual image and
thumbnail data persistantly.
"""

def put(self, key: str, handle: IO[bytes]):
"""
put is used to stream data from a local file descriptor into the backing
storage implementation.
"""
pass

def remove(self, key: str):
"""
delete will remove a given file from the backing storage implementation
"""
pass

def get_link(self, key: str) -> str:
"""
get_link is used to return a publically facing link to a file in the
backing storage system.
"""
pass


class LocalStorage(FileStorage):
"""
LocalStorage uses a local filesystem path as the basis for storing and
serving photos and thumbnails. We generate temp links using the
itsdangerous library.
"""
def __init__(self, app: flask.Flask):
self._serializer = URLSafeTimedSerializer(
secret_key=app.config["SECRET_KEY"],
)
self._base_dir = app.config["LOCAL_STORAGE_PATH"]
self._link_expiration = timedelta(days=7)
app.route("/public/<token>")(self._temp_link_handler)

def _temp_link_handler(self, token: str):
try:
payload = self._serializer.loads(token, max_age=int(self._link_expiration.total_seconds()))
except Exception:
# NOTE(rossdylan): We are being broad here because in any case that
# this fails we just want to abort. This is because the token has
# expired, been tampered with, or is just invalid. Maybe we can
# log each case differently later...
abort(404)

if "key" not in payload:
abort(404)

# NOTE(rossdylan): We are relying on flask's protections to avoid
# traversals or any other weirdness here. /should/ be fine since we
# sign and verify the file path anyway
return send_from_directory(self._base_dir, payload["key"])

def put(self, key: str, handle: IO[bytes]):
local_path = os.path.join(self._base_dir, key)
os.makedirs(os.path.dirname(local_path), exist_ok=True)
with open(local_path, 'wb+') as f:
shutil.copyfileobj(handle, f)

def remove(self, key: str):
local_path = os.path.join(self._base_dir, key)
os.remove(local_path)

def get_link(self, key: str):
return url_for("_temp_link_handler", token=self._serializer.dumps({"key": key}))


class S3Storage(FileStorage):
"""
S3Storage is the main storage implementation that uses an s3-like system
to store thumbnails and photos. Links are generated using the presigned
url function of S3.
"""
def __init__(self, app: flask.Flask):
self._client = boto3.client(
's3',
aws_access_key_id=app.config['S3_ACCESS_ID'],
aws_secret_access_key=app.config['S3_SECRET_KEY'],
endpoint_url=app.config['S3_URI'],
config=Config(signature_version='s3v4'),
)
self._bucket = app.config['S3_BUCKET_ID']
self._link_expiration = timedelta(minutes=5)

def put(self, key: str, handle: IO[bytes]):
self._client.upload_fileobj(handle, self._bucket, key)

def remove(self, key: str):
self._client.delete_object(Bucket=self._bucket, Key=key)

def get_link(self, key: str) -> str:
return self._client.generate_presigned_url(
"get_object",
Params={
"Bucket": self._bucket,
"Key": key,
},
ExpiresIn=self._link_expiration.total_seconds(),
)
Loading