adjusting boto to store media in a bucket instead of locally on the server

This commit is contained in:
Oliver Marks 2018-09-14 07:25:42 +01:00
parent 77a38f1da8
commit 9c23136214
9 changed files with 69 additions and 28 deletions

View File

@ -414,7 +414,11 @@ INSTALLED_APPS += ('django_extensions', )
INSTALLED_APPS += ('storages', )
INSTALLED_APPS += ('gunicorn', )
STATICFILES_FINDERS += ("compressor.finders.CompressorFinder", )
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
#STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
DEFAULT_FILE_STORAGE = 'mhackspace.core.storage.MediaStorage'
STATICFILES_STORAGE = 'mhackspace.core.storage.StaticStorage'
#COMPRESS_STORAGE = STATICFILES_STORAGE
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = '^trustee/'
@ -516,8 +520,10 @@ TWITTER_CONSUMER_SECRET=env('TWITTER_CONSUMER_SECRET')
TWITTER_ACCESS_TOKEN=env('TWITTER_ACCESS_TOKEN')
TWITTER_ACCESS_SECRET=env('TWITTER_ACCESS_SECRET')
AWS_DEFAULT_ACL = None
LOCATION_PREFIX = env('BUCKET_PREFIX_PATH', default='')
MEDIAFILE_LOCATION = LOCATION_PREFIX + 'media'
STATICFILE_LOCATION = LOCATION_PREFIX + 'static'
AWS_DEFAULT_ACL = 'public-read'
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}

View File

@ -108,7 +108,7 @@ TEMPLATE_DEBUG = True
AWS_S3_SECURE_URLS = False
AWS_ACCESS_KEY_ID = env('MINIO_ACCESS_KEY')
AWS_SECRET_ACCESS_KEY = env('MINIO_SECRET_KEY')
AWS_STORAGE_BUCKET_NAME = 'static'
AWS_STORAGE_BUCKET_NAME = 'mhackspace-local'
AWS_S3_ENDPOINT_URL = 'http://%s:9000' % socket.gethostbyname('bucket')
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
@ -118,12 +118,11 @@ AWS_S3_SECURE_URLS = True
STATIC_URL = '%s/%s/' % (AWS_S3_ENDPOINT_URL, AWS_STORAGE_BUCKET_NAME)
#STATICFILES_STORAGE = 'mhackspace.core.storage.SassStorageFix'
# COMPRESSOR
# ------------------------------------------------------------------------------
COMPRESS_ENABLED = env.bool('COMPRESS_ENABLED', default=True)
COMPRESS_STORAGE = STATICFILES_STORAGE
#COMPRESS_ENABLED = env.bool('COMPRESS_ENABLED', default=True)
#COMPRESS_STORAGE = STATICFILES_STORAGE
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
}

View File

@ -194,7 +194,6 @@ AWS_S3_OBJECT_PARAMETERS = {
AWS_LOCATION = 'static'
STATIC_URL = '%s/%s/%s/' % (AWS_S3_ENDPOINT_URL, AWS_STORAGE_BUCKET_NAME, AWS_LOCATION)
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
# COMPRESSOR
# ------------------------------------------------------------------------------

View File

@ -169,7 +169,6 @@ AWS_S3_OBJECT_PARAMETERS = {
AWS_LOCATION = 'stage'
STATIC_URL = '%s/%s/%s/' % (AWS_S3_ENDPOINT_URL, AWS_STORAGE_BUCKET_NAME, AWS_LOCATION)
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
# COMPRESSOR
# ------------------------------------------------------------------------------

View File

@ -103,7 +103,7 @@ services:
image: minio/minio
env_file: .env
volumes:
- ./mhackspace:/data
- ./bucket:/data
command: server --config-dir /tmp/minio /data
directory:

16
mhackspace/blog/reader.py Normal file
View File

@ -0,0 +1,16 @@
import feedparser
urls = [
'https://feeds.feedburner.com/projects-jl',
'https://hackaday.com/tag/emf-camp-2018/feed/',
'https://maidstone-hackspace.org.uk/blog/rss/',
'http://webboggles.com/feed/',
'https://blog.digitaloctave.com/rss.xml',
]
for url in urls:
print(url)
parsed = feedparser.parse(url)
for post in parsed.entries:
print(post.title)

View File

@ -4,3 +4,13 @@ from django.conf import settings
class SassStorageFix(S3Boto3Storage):
base_url = settings.AWS_S3_ENDPOINT_URL
class MediaStorage(S3Boto3Storage):
base_url = settings.AWS_S3_ENDPOINT_URL
location = settings.MEDIAFILE_LOCATION
class StaticStorage(S3Boto3Storage):
base_url = settings.AWS_S3_ENDPOINT_URL
location = settings.STATICFILE_LOCATION

View File

@ -1,33 +1,43 @@
# -*- coding: utf-8 -*-
import os
import logging
import feedparser
from urllib.request import urlretrieve
from django.core.files import File
from django.utils.timezone import make_aware
from django.core.management import call_command
from stdimage.utils import render_variations
from scaffold.readers.rss_reader import feed_reader
# from scaffold.readers.rss_reader import feed_reader
from mhackspace.feeds.models import Feed, Article, image_variations
logger = logging.getLogger(__name__)
def feed_reader(feeds):
for feed in feeds:
print(feed)
yield feedparser.parse(feed['url'])
def import_feeds(feed=False):
remove_old_articles()
rss_articles = feed_reader(get_active_feeds(feed))
articles = []
for article in rss_articles:
articles.append(Article(
url=article['url'].decode(),
feed=Feed.objects.get(pk=article['id']),
title=article['title'].decode(),
original_image=article['image'],
description=article['description'].decode(),
date=make_aware(article['date'])
))
print(article)
articles.append(
Article(
url=article["url"].decode(),
feed=Feed.objects.get(pk=article["id"]),
title=article["title"].decode(),
original_image=article["image"],
description=article["description"].decode(),
date=make_aware(article["date"]),
)
)
articles = Article.objects.bulk_create(articles)
download_remote_images()
@ -48,14 +58,17 @@ def download_remote_images():
result = urlretrieve(article.original_image.__str__())
article.image.save(
os.path.basename(article.original_image.__str__()),
File(open(result[0], 'rb'))
File(open(result[0], "rb")),
)
render_variations(result[0], image_variations, replace=True)
article.save()
except:
except Exception as e:
logger.exception(result)
logger.exception(result[0])
logger.exception('Unable to download remote image for %s' % article.original_image)
logger.exception(
"Unable to download remote image for %s"
% article.original_image
)
def get_active_feeds(feed=False):
@ -68,9 +81,7 @@ def get_active_feeds(feed=False):
for feed in feeds:
if feed.enabled is False:
continue
rss_feeds.append({
'id': feed.id,
'author': feed.author,
'url': feed.feed_url
})
rss_feeds.append(
{"id": feed.id, "author": feed.author, "url": feed.feed_url}
)
return rss_feeds

View File

@ -104,3 +104,4 @@ python-magic==0.4.15
ldap3==2.5.1
bcrypt==3.1.4
python-twitter==3.4.2
feedparser