Merge branch 'merge-python3-port'
Conflicts: setup.py
This commit is contained in:
commit
a6252cbf21
5
.gitignore
vendored
5
.gitignore
vendored
@ -24,6 +24,7 @@
|
|||||||
/celery.db
|
/celery.db
|
||||||
/kombu.db
|
/kombu.db
|
||||||
/server-log.txt
|
/server-log.txt
|
||||||
|
*.egg/
|
||||||
|
|
||||||
# pyconfigure/automake generated files
|
# pyconfigure/automake generated files
|
||||||
/Makefile
|
/Makefile
|
||||||
@ -44,3 +45,7 @@
|
|||||||
|
|
||||||
# The legacy of buildout
|
# The legacy of buildout
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
|
|
||||||
|
# Virtualenv, tox
|
||||||
|
venv*
|
||||||
|
.tox/
|
||||||
|
59
alembic.ini
Normal file
59
alembic.ini
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = %(here)s/mediagoblin/db/migrations
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
# file_template = %%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
#truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
sqlalchemy.url = sqlite:///mediagoblin.db
|
||||||
|
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
@ -20,7 +20,7 @@ selfname=$(basename "$0")
|
|||||||
local_bin="./bin"
|
local_bin="./bin"
|
||||||
case "$selfname" in
|
case "$selfname" in
|
||||||
lazyserver.sh)
|
lazyserver.sh)
|
||||||
starter_cmd=paster
|
starter_cmd=gunicorn
|
||||||
ini_prefix=paste
|
ini_prefix=paste
|
||||||
;;
|
;;
|
||||||
lazycelery.sh)
|
lazycelery.sh)
|
||||||
@ -36,9 +36,8 @@ esac
|
|||||||
if [ "$1" = "-h" ]; then
|
if [ "$1" = "-h" ]; then
|
||||||
echo "$0 [-h] [-c filename.ini] [ARGS_to_${starter_cmd} ...]"
|
echo "$0 [-h] [-c filename.ini] [ARGS_to_${starter_cmd} ...]"
|
||||||
echo ""
|
echo ""
|
||||||
echo " For example:"
|
echo " For Gunicorn settings, see at:"
|
||||||
echo " $0 -c fcgi.ini port_number=23371"
|
echo " http://docs.gunicorn.org/en/19.0/settings.html"
|
||||||
echo " or: $0 --server-name=fcgi --log-file=paste.log"
|
|
||||||
echo ""
|
echo ""
|
||||||
echo " The configfile defaults to ${ini_prefix}_local.ini,"
|
echo " The configfile defaults to ${ini_prefix}_local.ini,"
|
||||||
echo " if that is readable, otherwise ${ini_prefix}.ini."
|
echo " if that is readable, otherwise ${ini_prefix}.ini."
|
||||||
@ -71,7 +70,7 @@ set -x
|
|||||||
export CELERY_ALWAYS_EAGER=true
|
export CELERY_ALWAYS_EAGER=true
|
||||||
case "$selfname" in
|
case "$selfname" in
|
||||||
lazyserver.sh)
|
lazyserver.sh)
|
||||||
$starter serve "$ini_file" "$@" --reload
|
$starter --paste "$ini_file" $@
|
||||||
;;
|
;;
|
||||||
lazycelery.sh)
|
lazycelery.sh)
|
||||||
MEDIAGOBLIN_CONFIG="${ini_file}" \
|
MEDIAGOBLIN_CONFIG="${ini_file}" \
|
||||||
|
32
mediagoblin/_compat.py
Normal file
32
mediagoblin/_compat.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
import functools
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
else:
|
||||||
|
from email.MIMEText import MIMEText
|
||||||
|
|
||||||
|
|
||||||
|
def encode_to_utf8(method):
|
||||||
|
def wrapper(self):
|
||||||
|
if six.PY2 and isinstance(method(self), six.text_type):
|
||||||
|
return method(self).encode('utf-8')
|
||||||
|
return method(self)
|
||||||
|
functools.update_wrapper(wrapper, method, ['__name__', '__doc__'])
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
# based on django.utils.encoding.python_2_unicode_compatible
|
||||||
|
def py2_unicode(klass):
|
||||||
|
if six.PY2:
|
||||||
|
if '__str__' not in klass.__dict__:
|
||||||
|
warnings.warn("@py2_unicode cannot be applied "
|
||||||
|
"to %s because it doesn't define __str__()." %
|
||||||
|
klass.__name__)
|
||||||
|
klass.__unicode__ = klass.__str__
|
||||||
|
klass.__str__ = encode_to_utf8(klass.__unicode__)
|
||||||
|
if '__repr__' in klass.__dict__:
|
||||||
|
klass.__repr__ = encode_to_utf8(klass.__repr__)
|
||||||
|
return klass
|
@ -23,6 +23,7 @@ from mediagoblin.tools.routing import endpoint_to_controller
|
|||||||
from werkzeug.wrappers import Request
|
from werkzeug.wrappers import Request
|
||||||
from werkzeug.exceptions import HTTPException
|
from werkzeug.exceptions import HTTPException
|
||||||
from werkzeug.routing import RequestRedirect
|
from werkzeug.routing import RequestRedirect
|
||||||
|
from werkzeug.wsgi import SharedDataMiddleware
|
||||||
|
|
||||||
from mediagoblin import meddleware, __version__
|
from mediagoblin import meddleware, __version__
|
||||||
from mediagoblin.db.util import check_db_up_to_date
|
from mediagoblin.db.util import check_db_up_to_date
|
||||||
@ -281,8 +282,11 @@ def paste_app_factory(global_config, **app_config):
|
|||||||
|
|
||||||
if not mediagoblin_config:
|
if not mediagoblin_config:
|
||||||
raise IOError("Usable mediagoblin config not found.")
|
raise IOError("Usable mediagoblin config not found.")
|
||||||
|
del app_config['config']
|
||||||
|
|
||||||
mgoblin_app = MediaGoblinApp(mediagoblin_config)
|
mgoblin_app = MediaGoblinApp(mediagoblin_config)
|
||||||
|
mgoblin_app.call_backend = SharedDataMiddleware(mgoblin_app.call_backend,
|
||||||
|
exports=app_config)
|
||||||
mgoblin_app = hook_transform('wrap_wsgi', mgoblin_app)
|
mgoblin_app = hook_transform('wrap_wsgi', mgoblin_app)
|
||||||
|
|
||||||
return mgoblin_app
|
return mgoblin_app
|
||||||
|
@ -16,6 +16,8 @@
|
|||||||
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import six
|
||||||
import wtforms
|
import wtforms
|
||||||
from sqlalchemy import or_
|
from sqlalchemy import or_
|
||||||
|
|
||||||
@ -136,7 +138,7 @@ def register_user(request, register_form):
|
|||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
# log the user in
|
# log the user in
|
||||||
request.session['user_id'] = unicode(user.id)
|
request.session['user_id'] = six.text_type(user.id)
|
||||||
request.session.save()
|
request.session.save()
|
||||||
|
|
||||||
# send verification email
|
# send verification email
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from itsdangerous import BadSignature
|
from itsdangerous import BadSignature
|
||||||
|
|
||||||
from mediagoblin import messages, mg_globals
|
from mediagoblin import messages, mg_globals
|
||||||
@ -93,7 +95,7 @@ def login(request):
|
|||||||
# set up login in session
|
# set up login in session
|
||||||
if login_form.stay_logged_in.data:
|
if login_form.stay_logged_in.data:
|
||||||
request.session['stay_logged_in'] = True
|
request.session['stay_logged_in'] = True
|
||||||
request.session['user_id'] = unicode(user.id)
|
request.session['user_id'] = six.text_type(user.id)
|
||||||
request.session.save()
|
request.session.save()
|
||||||
|
|
||||||
if request.form.get('next'):
|
if request.form.get('next'):
|
||||||
|
@ -14,14 +14,66 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from alembic import command
|
||||||
|
from alembic.config import Config
|
||||||
|
from alembic.migration import MigrationContext
|
||||||
|
|
||||||
|
from mediagoblin.db.base import Base
|
||||||
from mediagoblin.tools.common import simple_printer
|
from mediagoblin.tools.common import simple_printer
|
||||||
from sqlalchemy import Table
|
from sqlalchemy import Table
|
||||||
from sqlalchemy.sql import select
|
from sqlalchemy.sql import select
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class TableAlreadyExists(Exception):
|
class TableAlreadyExists(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AlembicMigrationManager(object):
|
||||||
|
|
||||||
|
def __init__(self, session):
|
||||||
|
root_dir = os.path.abspath(os.path.dirname(os.path.dirname(
|
||||||
|
os.path.dirname(__file__))))
|
||||||
|
alembic_cfg_path = os.path.join(root_dir, 'alembic.ini')
|
||||||
|
self.alembic_cfg = Config(alembic_cfg_path)
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
def get_current_revision(self):
|
||||||
|
context = MigrationContext.configure(self.session.bind)
|
||||||
|
return context.get_current_revision()
|
||||||
|
|
||||||
|
def upgrade(self, version):
|
||||||
|
return command.upgrade(self.alembic_cfg, version or 'head')
|
||||||
|
|
||||||
|
def downgrade(self, version):
|
||||||
|
if isinstance(version, int) or version is None or version.isdigit():
|
||||||
|
version = 'base'
|
||||||
|
return command.downgrade(self.alembic_cfg, version)
|
||||||
|
|
||||||
|
def stamp(self, revision):
|
||||||
|
return command.stamp(self.alembic_cfg, revision=revision)
|
||||||
|
|
||||||
|
def init_tables(self):
|
||||||
|
Base.metadata.create_all(self.session.bind)
|
||||||
|
# load the Alembic configuration and generate the
|
||||||
|
# version table, "stamping" it with the most recent rev:
|
||||||
|
command.stamp(self.alembic_cfg, 'head')
|
||||||
|
|
||||||
|
def init_or_migrate(self, version=None):
|
||||||
|
if self.get_current_revision() is None:
|
||||||
|
log.info('Initializing tables and stamping it with '
|
||||||
|
'the most recent migration...')
|
||||||
|
self.init_tables()
|
||||||
|
else:
|
||||||
|
self.upgrade(version)
|
||||||
|
|
||||||
|
|
||||||
class MigrationManager(object):
|
class MigrationManager(object):
|
||||||
"""
|
"""
|
||||||
Migration handling tool.
|
Migration handling tool.
|
||||||
@ -39,7 +91,7 @@ class MigrationManager(object):
|
|||||||
- migration_registry: where we should find all migrations to
|
- migration_registry: where we should find all migrations to
|
||||||
run
|
run
|
||||||
"""
|
"""
|
||||||
self.name = unicode(name)
|
self.name = name
|
||||||
self.models = models
|
self.models = models
|
||||||
self.foundations = foundations
|
self.foundations = foundations
|
||||||
self.session = session
|
self.session = session
|
||||||
@ -230,7 +282,7 @@ class MigrationManager(object):
|
|||||||
for migration_number, migration_func in migrations_to_run:
|
for migration_number, migration_func in migrations_to_run:
|
||||||
self.printer(
|
self.printer(
|
||||||
u' + Running migration %s, "%s"... ' % (
|
u' + Running migration %s, "%s"... ' % (
|
||||||
migration_number, migration_func.func_name))
|
migration_number, migration_func.__name__))
|
||||||
migration_func(self.session)
|
migration_func(self.session)
|
||||||
self.set_current_migration(migration_number)
|
self.set_current_migration(migration_number)
|
||||||
self.printer('done.\n')
|
self.printer('done.\n')
|
||||||
|
@ -17,13 +17,15 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
|
from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
|
||||||
Integer, Unicode, UnicodeText, DateTime,
|
Integer, Unicode, UnicodeText, DateTime,
|
||||||
ForeignKey, Date, Index)
|
ForeignKey, Date, Index)
|
||||||
from sqlalchemy.exc import ProgrammingError
|
from sqlalchemy.exc import ProgrammingError
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.sql import and_
|
from sqlalchemy.sql import and_
|
||||||
from migrate.changeset.constraint import UniqueConstraint
|
from sqlalchemy.schema import UniqueConstraint
|
||||||
|
|
||||||
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
|
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
|
||||||
from mediagoblin.db.migration_tools import (
|
from mediagoblin.db.migration_tools import (
|
||||||
@ -249,7 +251,7 @@ def mediaentry_new_slug_era(db):
|
|||||||
for row in db.execute(media_table.select()):
|
for row in db.execute(media_table.select()):
|
||||||
# no slug, try setting to an id
|
# no slug, try setting to an id
|
||||||
if not row.slug:
|
if not row.slug:
|
||||||
append_garbage_till_unique(row, unicode(row.id))
|
append_garbage_till_unique(row, six.text_type(row.id))
|
||||||
# has "=" or ":" in it... we're getting rid of those
|
# has "=" or ":" in it... we're getting rid of those
|
||||||
elif u"=" in row.slug or u":" in row.slug:
|
elif u"=" in row.slug or u":" in row.slug:
|
||||||
append_garbage_till_unique(
|
append_garbage_till_unique(
|
||||||
@ -278,7 +280,7 @@ def unique_collections_slug(db):
|
|||||||
existing_slugs[row.creator].append(row.slug)
|
existing_slugs[row.creator].append(row.slug)
|
||||||
|
|
||||||
for row_id in slugs_to_change:
|
for row_id in slugs_to_change:
|
||||||
new_slug = unicode(uuid.uuid4())
|
new_slug = six.text_type(uuid.uuid4())
|
||||||
db.execute(collection_table.update().
|
db.execute(collection_table.update().
|
||||||
where(collection_table.c.id == row_id).
|
where(collection_table.c.id == row_id).
|
||||||
values(slug=new_slug))
|
values(slug=new_slug))
|
||||||
|
1
mediagoblin/db/migrations/README
Normal file
1
mediagoblin/db/migrations/README
Normal file
@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
71
mediagoblin/db/migrations/env.py
Normal file
71
mediagoblin/db/migrations/env.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
from alembic import context
|
||||||
|
from sqlalchemy import engine_from_config, pool
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = None
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
def run_migrations_offline():
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(url=url, target_metadata=target_metadata)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
def run_migrations_online():
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
engine = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix='sqlalchemy.',
|
||||||
|
poolclass=pool.NullPool)
|
||||||
|
|
||||||
|
connection = engine.connect()
|
||||||
|
context.configure(
|
||||||
|
connection=connection,
|
||||||
|
target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
finally:
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
|
|
22
mediagoblin/db/migrations/script.py.mako
Normal file
22
mediagoblin/db/migrations/script.py.mako
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
${downgrades if downgrades else "pass"}
|
0
mediagoblin/db/migrations/versions/.gitkeep
Normal file
0
mediagoblin/db/migrations/versions/.gitkeep
Normal file
@ -18,6 +18,8 @@
|
|||||||
TODO: indexes on foreignkeys, where useful.
|
TODO: indexes on foreignkeys, where useful.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
@ -38,17 +40,11 @@ from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, \
|
|||||||
from mediagoblin.tools.files import delete_media_files
|
from mediagoblin.tools.files import delete_media_files
|
||||||
from mediagoblin.tools.common import import_component
|
from mediagoblin.tools.common import import_component
|
||||||
|
|
||||||
# It's actually kind of annoying how sqlalchemy-migrate does this, if
|
import six
|
||||||
# I understand it right, but whatever. Anyway, don't remove this :P
|
|
||||||
#
|
|
||||||
# We could do migration calls more manually instead of relying on
|
|
||||||
# this import-based meddling...
|
|
||||||
from migrate import changeset
|
|
||||||
|
|
||||||
_log = logging.getLogger(__name__)
|
_log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class User(Base, UserMixin):
|
class User(Base, UserMixin):
|
||||||
"""
|
"""
|
||||||
TODO: We should consider moving some rarely used fields
|
TODO: We should consider moving some rarely used fields
|
||||||
@ -344,7 +340,7 @@ class MediaEntry(Base, MediaEntryMixin):
|
|||||||
return the value of the key.
|
return the value of the key.
|
||||||
"""
|
"""
|
||||||
media_file = MediaFile.query.filter_by(media_entry=self.id,
|
media_file = MediaFile.query.filter_by(media_entry=self.id,
|
||||||
name=unicode(file_key)).first()
|
name=six.text_type(file_key)).first()
|
||||||
|
|
||||||
if media_file:
|
if media_file:
|
||||||
if metadata_key:
|
if metadata_key:
|
||||||
@ -357,11 +353,11 @@ class MediaEntry(Base, MediaEntryMixin):
|
|||||||
Update the file_metadata of a MediaFile.
|
Update the file_metadata of a MediaFile.
|
||||||
"""
|
"""
|
||||||
media_file = MediaFile.query.filter_by(media_entry=self.id,
|
media_file = MediaFile.query.filter_by(media_entry=self.id,
|
||||||
name=unicode(file_key)).first()
|
name=six.text_type(file_key)).first()
|
||||||
|
|
||||||
file_metadata = media_file.file_metadata or {}
|
file_metadata = media_file.file_metadata or {}
|
||||||
|
|
||||||
for key, value in kwargs.iteritems():
|
for key, value in six.iteritems(kwargs):
|
||||||
file_metadata[key] = value
|
file_metadata[key] = value
|
||||||
|
|
||||||
media_file.file_metadata = file_metadata
|
media_file.file_metadata = file_metadata
|
||||||
@ -386,7 +382,7 @@ class MediaEntry(Base, MediaEntryMixin):
|
|||||||
media_data.get_media_entry = self
|
media_data.get_media_entry = self
|
||||||
else:
|
else:
|
||||||
# Update old media data
|
# Update old media data
|
||||||
for field, value in kwargs.iteritems():
|
for field, value in six.iteritems(kwargs):
|
||||||
setattr(media_data, field, value)
|
setattr(media_data, field, value)
|
||||||
|
|
||||||
@memoized_property
|
@memoized_property
|
||||||
@ -415,7 +411,7 @@ class MediaEntry(Base, MediaEntryMixin):
|
|||||||
# Delete all related files/attachments
|
# Delete all related files/attachments
|
||||||
try:
|
try:
|
||||||
delete_media_files(self)
|
delete_media_files(self)
|
||||||
except OSError, error:
|
except OSError as error:
|
||||||
# Returns list of files we failed to delete
|
# Returns list of files we failed to delete
|
||||||
_log.error('No such files from the user "{1}" to delete: '
|
_log.error('No such files from the user "{1}" to delete: '
|
||||||
'{0}'.format(str(error), self.get_uploader))
|
'{0}'.format(str(error), self.get_uploader))
|
||||||
@ -1125,7 +1121,7 @@ def show_table_init(engine_uri):
|
|||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
from sys import argv
|
from sys import argv
|
||||||
print repr(argv)
|
print(repr(argv))
|
||||||
if len(argv) == 2:
|
if len(argv) == 2:
|
||||||
uri = argv[1]
|
uri = argv[1]
|
||||||
else:
|
else:
|
||||||
|
@ -18,6 +18,8 @@
|
|||||||
from sqlalchemy import create_engine, event
|
from sqlalchemy import create_engine, event
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin.db.base import Base, Session
|
from mediagoblin.db.base import Base, Session
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
|
|
||||||
@ -28,7 +30,7 @@ class DatabaseMaster(object):
|
|||||||
def __init__(self, engine):
|
def __init__(self, engine):
|
||||||
self.engine = engine
|
self.engine = engine
|
||||||
|
|
||||||
for k, v in Base._decl_class_registry.iteritems():
|
for k, v in six.iteritems(Base._decl_class_registry):
|
||||||
setattr(self, k, v)
|
setattr(self, k, v)
|
||||||
|
|
||||||
def commit(self):
|
def commit(self):
|
||||||
|
@ -16,10 +16,11 @@
|
|||||||
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
from urlparse import urljoin
|
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
from oauthlib.oauth1 import ResourceEndpoint
|
from oauthlib.oauth1 import ResourceEndpoint
|
||||||
|
|
||||||
|
from six.moves.urllib.parse import urljoin
|
||||||
|
|
||||||
from mediagoblin import mg_globals as mgg
|
from mediagoblin import mg_globals as mgg
|
||||||
from mediagoblin import messages
|
from mediagoblin import messages
|
||||||
from mediagoblin.db.models import MediaEntry, User, MediaComment, AccessToken
|
from mediagoblin.db.models import MediaEntry, User, MediaComment, AccessToken
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from itsdangerous import BadSignature
|
from itsdangerous import BadSignature
|
||||||
@ -82,7 +84,7 @@ def edit_media(request, media):
|
|||||||
media.tags = convert_to_tag_list_of_dicts(
|
media.tags = convert_to_tag_list_of_dicts(
|
||||||
form.tags.data)
|
form.tags.data)
|
||||||
|
|
||||||
media.license = unicode(form.license.data) or None
|
media.license = six.text_type(form.license.data) or None
|
||||||
media.slug = slug
|
media.slug = slug
|
||||||
media.save()
|
media.save()
|
||||||
|
|
||||||
@ -140,7 +142,7 @@ def edit_attachments(request, media):
|
|||||||
|
|
||||||
attachment_public_filepath \
|
attachment_public_filepath \
|
||||||
= mg_globals.public_store.get_unique_filepath(
|
= mg_globals.public_store.get_unique_filepath(
|
||||||
['media_entries', unicode(media.id), 'attachment',
|
['media_entries', six.text_type(media.id), 'attachment',
|
||||||
public_filename])
|
public_filename])
|
||||||
|
|
||||||
attachment_public_file = mg_globals.public_store.get_file(
|
attachment_public_file = mg_globals.public_store.get_file(
|
||||||
@ -205,8 +207,8 @@ def edit_profile(request, url_user=None):
|
|||||||
bio=user.bio)
|
bio=user.bio)
|
||||||
|
|
||||||
if request.method == 'POST' and form.validate():
|
if request.method == 'POST' and form.validate():
|
||||||
user.url = unicode(form.url.data)
|
user.url = six.text_type(form.url.data)
|
||||||
user.bio = unicode(form.bio.data)
|
user.bio = six.text_type(form.bio.data)
|
||||||
|
|
||||||
user.save()
|
user.save()
|
||||||
|
|
||||||
@ -321,9 +323,9 @@ def edit_collection(request, collection):
|
|||||||
form.slug.errors.append(
|
form.slug.errors.append(
|
||||||
_(u'A collection with that slug already exists for this user.'))
|
_(u'A collection with that slug already exists for this user.'))
|
||||||
else:
|
else:
|
||||||
collection.title = unicode(form.title.data)
|
collection.title = six.text_type(form.title.data)
|
||||||
collection.description = unicode(form.description.data)
|
collection.description = six.text_type(form.description.data)
|
||||||
collection.slug = unicode(form.slug.data)
|
collection.slug = six.text_type(form.slug.data)
|
||||||
|
|
||||||
collection.save()
|
collection.save()
|
||||||
|
|
||||||
@ -453,7 +455,7 @@ def edit_metadata(request, media):
|
|||||||
return redirect_obj(request, media)
|
return redirect_obj(request, media)
|
||||||
|
|
||||||
if len(form.media_metadata) == 0:
|
if len(form.media_metadata) == 0:
|
||||||
for identifier, value in media.media_metadata.iteritems():
|
for identifier, value in six.iteritems(media.media_metadata):
|
||||||
if identifier == "@context": continue
|
if identifier == "@context": continue
|
||||||
form.media_metadata.append_entry({
|
form.media_metadata.append_entry({
|
||||||
'identifier':identifier,
|
'identifier':identifier,
|
||||||
|
@ -140,7 +140,7 @@ def feed_endpoint(request):
|
|||||||
return json_error("No such 'user' with id '{0}'".format(username), 404)
|
return json_error("No such 'user' with id '{0}'".format(username), 404)
|
||||||
|
|
||||||
if request.data:
|
if request.data:
|
||||||
data = json.loads(request.data)
|
data = json.loads(request.data.decode())
|
||||||
else:
|
else:
|
||||||
data = {"verb": None, "object": {}}
|
data = {"verb": None, "object": {}}
|
||||||
|
|
||||||
|
@ -17,6 +17,8 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin.tools.common import import_component
|
from mediagoblin.tools.common import import_component
|
||||||
|
|
||||||
|
|
||||||
@ -61,6 +63,10 @@ SUBCOMMAND_MAP = {
|
|||||||
'setup': 'mediagoblin.gmg_commands.deletemedia:parser_setup',
|
'setup': 'mediagoblin.gmg_commands.deletemedia:parser_setup',
|
||||||
'func': 'mediagoblin.gmg_commands.deletemedia:deletemedia',
|
'func': 'mediagoblin.gmg_commands.deletemedia:deletemedia',
|
||||||
'help': 'Delete media entries'},
|
'help': 'Delete media entries'},
|
||||||
|
'serve': {
|
||||||
|
'setup': 'mediagoblin.gmg_commands.serve:parser_setup',
|
||||||
|
'func': 'mediagoblin.gmg_commands.serve:serve',
|
||||||
|
'help': 'PasteScript replacement'},
|
||||||
'batchaddmedia': {
|
'batchaddmedia': {
|
||||||
'setup': 'mediagoblin.gmg_commands.batchaddmedia:parser_setup',
|
'setup': 'mediagoblin.gmg_commands.batchaddmedia:parser_setup',
|
||||||
'func': 'mediagoblin.gmg_commands.batchaddmedia:batchaddmedia',
|
'func': 'mediagoblin.gmg_commands.batchaddmedia:batchaddmedia',
|
||||||
@ -98,7 +104,7 @@ def main_cli():
|
|||||||
"otherwise mediagoblin.ini"))
|
"otherwise mediagoblin.ini"))
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(help='sub-command help')
|
subparsers = parser.add_subparsers(help='sub-command help')
|
||||||
for command_name, command_struct in SUBCOMMAND_MAP.iteritems():
|
for command_name, command_struct in six.iteritems(SUBCOMMAND_MAP):
|
||||||
if 'help' in command_struct:
|
if 'help' in command_struct:
|
||||||
subparser = subparsers.add_parser(
|
subparser = subparsers.add_parser(
|
||||||
command_name, help=command_struct['help'])
|
command_name, help=command_struct['help'])
|
||||||
|
@ -14,8 +14,12 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin.gmg_commands import util as commands_util
|
from mediagoblin.gmg_commands import util as commands_util
|
||||||
from mediagoblin.submit.lib import (
|
from mediagoblin.submit.lib import (
|
||||||
submit_media, get_upload_file_limits,
|
submit_media, get_upload_file_limits,
|
||||||
@ -68,14 +72,14 @@ def addmedia(args):
|
|||||||
# get the user
|
# get the user
|
||||||
user = app.db.User.query.filter_by(username=args.username.lower()).first()
|
user = app.db.User.query.filter_by(username=args.username.lower()).first()
|
||||||
if user is None:
|
if user is None:
|
||||||
print "Sorry, no user by username '%s'" % args.username
|
print("Sorry, no user by username '%s'" % args.username)
|
||||||
return
|
return
|
||||||
|
|
||||||
# check for the file, if it exists...
|
# check for the file, if it exists...
|
||||||
filename = os.path.split(args.filename)[-1]
|
filename = os.path.split(args.filename)[-1]
|
||||||
abs_filename = os.path.abspath(args.filename)
|
abs_filename = os.path.abspath(args.filename)
|
||||||
if not os.path.exists(abs_filename):
|
if not os.path.exists(abs_filename):
|
||||||
print "Can't find a file with filename '%s'" % args.filename
|
print("Can't find a file with filename '%s'" % args.filename)
|
||||||
return
|
return
|
||||||
|
|
||||||
upload_limit, max_file_size = get_upload_file_limits(user)
|
upload_limit, max_file_size = get_upload_file_limits(user)
|
||||||
@ -85,21 +89,21 @@ def addmedia(args):
|
|||||||
if some_string is None:
|
if some_string is None:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return unicode(some_string)
|
return six.text_type(some_string)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
submit_media(
|
submit_media(
|
||||||
mg_app=app,
|
mg_app=app,
|
||||||
user=user,
|
user=user,
|
||||||
submitted_file=file(abs_filename, 'r'), filename=filename,
|
submitted_file=open(abs_filename, 'r'), filename=filename,
|
||||||
title=maybe_unicodeify(args.title),
|
title=maybe_unicodeify(args.title),
|
||||||
description=maybe_unicodeify(args.description),
|
description=maybe_unicodeify(args.description),
|
||||||
license=maybe_unicodeify(args.license),
|
license=maybe_unicodeify(args.license),
|
||||||
tags_string=maybe_unicodeify(args.tags) or u"",
|
tags_string=maybe_unicodeify(args.tags) or u"",
|
||||||
upload_limit=upload_limit, max_file_size=max_file_size)
|
upload_limit=upload_limit, max_file_size=max_file_size)
|
||||||
except FileUploadLimit:
|
except FileUploadLimit:
|
||||||
print "This file is larger than the upload limits for this site."
|
print("This file is larger than the upload limits for this site.")
|
||||||
except UserUploadLimit:
|
except UserUploadLimit:
|
||||||
print "This file will put this user past their upload limits."
|
print("This file will put this user past their upload limits.")
|
||||||
except UserPastUploadLimit:
|
except UserPastUploadLimit:
|
||||||
print "This user is already past their upload limits."
|
print("This user is already past their upload limits.")
|
||||||
|
@ -19,7 +19,7 @@ import logging
|
|||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
from mediagoblin.db.open import setup_connection_and_db_from_config
|
from mediagoblin.db.open import setup_connection_and_db_from_config
|
||||||
from mediagoblin.db.migration_tools import MigrationManager
|
from mediagoblin.db.migration_tools import MigrationManager, AlembicMigrationManager
|
||||||
from mediagoblin.init import setup_global_and_app_config
|
from mediagoblin.init import setup_global_and_app_config
|
||||||
from mediagoblin.tools.common import import_component
|
from mediagoblin.tools.common import import_component
|
||||||
|
|
||||||
@ -106,6 +106,13 @@ forgotten to add it? ({1})'.format(plugin, exc))
|
|||||||
return managed_dbdata
|
return managed_dbdata
|
||||||
|
|
||||||
|
|
||||||
|
def run_alembic_migrations(db, app_config, global_config):
|
||||||
|
"""Initializes a database and runs all Alembic migrations."""
|
||||||
|
Session = sessionmaker(bind=db.engine)
|
||||||
|
manager = AlembicMigrationManager(Session())
|
||||||
|
manager.init_or_migrate()
|
||||||
|
|
||||||
|
|
||||||
def run_dbupdate(app_config, global_config):
|
def run_dbupdate(app_config, global_config):
|
||||||
"""
|
"""
|
||||||
Initialize or migrate the database as specified by the config file.
|
Initialize or migrate the database as specified by the config file.
|
||||||
@ -116,8 +123,9 @@ def run_dbupdate(app_config, global_config):
|
|||||||
|
|
||||||
# Set up the database
|
# Set up the database
|
||||||
db = setup_connection_and_db_from_config(app_config, migrations=True)
|
db = setup_connection_and_db_from_config(app_config, migrations=True)
|
||||||
#Run the migrations
|
# Run the migrations
|
||||||
run_all_migrations(db, app_config, global_config)
|
run_all_migrations(db, app_config, global_config)
|
||||||
|
run_alembic_migrations(db, app_config, global_config)
|
||||||
|
|
||||||
|
|
||||||
def run_all_migrations(db, app_config, global_config):
|
def run_all_migrations(db, app_config, global_config):
|
||||||
@ -131,7 +139,7 @@ def run_all_migrations(db, app_config, global_config):
|
|||||||
"""
|
"""
|
||||||
# Gather information from all media managers / projects
|
# Gather information from all media managers / projects
|
||||||
dbdatas = gather_database_data(
|
dbdatas = gather_database_data(
|
||||||
global_config.get('plugins', {}).keys())
|
list(global_config.get('plugins', {}).keys()))
|
||||||
|
|
||||||
Session = sessionmaker(bind=db.engine)
|
Session = sessionmaker(bind=db.engine)
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from mediagoblin.gmg_commands import util as commands_util
|
from mediagoblin.gmg_commands import util as commands_util
|
||||||
@ -37,8 +38,8 @@ def deletemedia(args):
|
|||||||
for media in medias:
|
for media in medias:
|
||||||
found_medias.add(media.id)
|
found_medias.add(media.id)
|
||||||
media.delete()
|
media.delete()
|
||||||
print 'Media ID %d has been deleted.' % media.id
|
print('Media ID %d has been deleted.' % media.id)
|
||||||
for media in media_ids - found_medias:
|
for media in media_ids - found_medias:
|
||||||
print 'Can\'t find a media with ID %d.' % media
|
print('Can\'t find a media with ID %d.' % media)
|
||||||
print 'Done.'
|
print('Done.')
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
@ -13,6 +13,9 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@ -143,7 +146,7 @@ def available(args):
|
|||||||
manager = get_processing_manager_for_type(media_type)
|
manager = get_processing_manager_for_type(media_type)
|
||||||
except ProcessingManagerDoesNotExist:
|
except ProcessingManagerDoesNotExist:
|
||||||
entry = MediaEntry.query.filter_by(id=args.id_or_type).first()
|
entry = MediaEntry.query.filter_by(id=args.id_or_type).first()
|
||||||
print 'No such processing manager for {0}'.format(entry.media_type)
|
print('No such processing manager for {0}'.format(entry.media_type))
|
||||||
|
|
||||||
if args.state:
|
if args.state:
|
||||||
processors = manager.list_all_processors_by_state(args.state)
|
processors = manager.list_all_processors_by_state(args.state)
|
||||||
@ -152,25 +155,25 @@ def available(args):
|
|||||||
else:
|
else:
|
||||||
processors = manager.list_eligible_processors(media_entry)
|
processors = manager.list_eligible_processors(media_entry)
|
||||||
|
|
||||||
print "Available processors:"
|
print("Available processors:")
|
||||||
print "====================="
|
print("=====================")
|
||||||
print ""
|
print("")
|
||||||
|
|
||||||
if args.action_help:
|
if args.action_help:
|
||||||
for processor in processors:
|
for processor in processors:
|
||||||
print processor.name
|
print(processor.name)
|
||||||
print "-" * len(processor.name)
|
print("-" * len(processor.name))
|
||||||
|
|
||||||
parser = processor.generate_parser()
|
parser = processor.generate_parser()
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
print ""
|
print("")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
for processor in processors:
|
for processor in processors:
|
||||||
if processor.description:
|
if processor.description:
|
||||||
print " - %s: %s" % (processor.name, processor.description)
|
print(" - %s: %s" % (processor.name, processor.description))
|
||||||
else:
|
else:
|
||||||
print " - %s" % processor.name
|
print(" - %s" % processor.name)
|
||||||
|
|
||||||
|
|
||||||
def run(args, media_id=None):
|
def run(args, media_id=None):
|
||||||
@ -185,12 +188,12 @@ def run(args, media_id=None):
|
|||||||
processor_class = manager.get_processor(
|
processor_class = manager.get_processor(
|
||||||
args.reprocess_command, media_entry)
|
args.reprocess_command, media_entry)
|
||||||
except ProcessorDoesNotExist:
|
except ProcessorDoesNotExist:
|
||||||
print 'No such processor "%s" for media with id "%s"' % (
|
print('No such processor "%s" for media with id "%s"' % (
|
||||||
args.reprocess_command, media_entry.id)
|
args.reprocess_command, media_entry.id))
|
||||||
return
|
return
|
||||||
except ProcessorNotEligible:
|
except ProcessorNotEligible:
|
||||||
print 'Processor "%s" exists but media "%s" is not eligible' % (
|
print('Processor "%s" exists but media "%s" is not eligible' % (
|
||||||
args.reprocess_command, media_entry.id)
|
args.reprocess_command, media_entry.id))
|
||||||
return
|
return
|
||||||
|
|
||||||
reprocess_parser = processor_class.generate_parser()
|
reprocess_parser = processor_class.generate_parser()
|
||||||
@ -203,7 +206,7 @@ def run(args, media_id=None):
|
|||||||
|
|
||||||
except ProcessingManagerDoesNotExist:
|
except ProcessingManagerDoesNotExist:
|
||||||
entry = MediaEntry.query.filter_by(id=media_id).first()
|
entry = MediaEntry.query.filter_by(id=media_id).first()
|
||||||
print 'No such processing manager for {0}'.format(entry.media_type)
|
print('No such processing manager for {0}'.format(entry.media_type))
|
||||||
|
|
||||||
|
|
||||||
def bulk_run(args):
|
def bulk_run(args):
|
||||||
@ -233,12 +236,12 @@ def thumbs(args):
|
|||||||
processor_class = manager.get_processor(
|
processor_class = manager.get_processor(
|
||||||
'resize', media_entry)
|
'resize', media_entry)
|
||||||
except ProcessorDoesNotExist:
|
except ProcessorDoesNotExist:
|
||||||
print 'No such processor "%s" for media with id "%s"' % (
|
print('No such processor "%s" for media with id "%s"' % (
|
||||||
'resize', media_entry.id)
|
'resize', media_entry.id))
|
||||||
return
|
return
|
||||||
except ProcessorNotEligible:
|
except ProcessorNotEligible:
|
||||||
print 'Processor "%s" exists but media "%s" is not eligible' % (
|
print('Processor "%s" exists but media "%s" is not eligible' % (
|
||||||
'resize', media_entry.id)
|
'resize', media_entry.id))
|
||||||
return
|
return
|
||||||
|
|
||||||
reprocess_parser = processor_class.generate_parser()
|
reprocess_parser = processor_class.generate_parser()
|
||||||
@ -260,7 +263,7 @@ def thumbs(args):
|
|||||||
reprocess_info=reprocess_request)
|
reprocess_info=reprocess_request)
|
||||||
|
|
||||||
except ProcessingManagerDoesNotExist:
|
except ProcessingManagerDoesNotExist:
|
||||||
print 'No such processing manager for {0}'.format(entry.media_type)
|
print('No such processing manager for {0}'.format(entry.media_type))
|
||||||
|
|
||||||
|
|
||||||
def initial(args):
|
def initial(args):
|
||||||
@ -276,7 +279,7 @@ def initial(args):
|
|||||||
media_entry,
|
media_entry,
|
||||||
reprocess_action='initial')
|
reprocess_action='initial')
|
||||||
except ProcessingManagerDoesNotExist:
|
except ProcessingManagerDoesNotExist:
|
||||||
print 'No such processing manager for {0}'.format(entry.media_type)
|
print('No such processing manager for {0}'.format(entry.media_type))
|
||||||
|
|
||||||
|
|
||||||
def reprocess(args):
|
def reprocess(args):
|
||||||
|
66
mediagoblin/gmg_commands/serve.py
Normal file
66
mediagoblin/gmg_commands/serve.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
# GNU MediaGoblin -- federated, autonomous media hosting
|
||||||
|
# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from paste.deploy import loadapp, loadserver
|
||||||
|
|
||||||
|
|
||||||
|
class ServeCommand(object):
|
||||||
|
|
||||||
|
def loadserver(self, server_spec, name, relative_to, **kwargs):
|
||||||
|
return loadserver(server_spec, name=name, relative_to=relative_to,
|
||||||
|
**kwargs)
|
||||||
|
|
||||||
|
def loadapp(self, app_spec, name, relative_to, **kwargs):
|
||||||
|
return loadapp(app_spec, name=name, relative_to=relative_to, **kwargs)
|
||||||
|
|
||||||
|
def daemonize(self):
|
||||||
|
# TODO: pass to gunicorn if available
|
||||||
|
pass
|
||||||
|
|
||||||
|
def restart_with_reloader(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def restart_with_monitor(self, reloader=False):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
print('Running...')
|
||||||
|
|
||||||
|
|
||||||
|
def parser_setup(subparser):
|
||||||
|
subparser.add_argument('config', metavar='CONFIG_FILE')
|
||||||
|
subparser.add_argument('command',
|
||||||
|
choices=['start', 'stop', 'restart', 'status'],
|
||||||
|
nargs='?', default='start')
|
||||||
|
subparser.add_argument('-n', '--app-name',
|
||||||
|
dest='app_name',
|
||||||
|
metavar='NAME',
|
||||||
|
help="Load the named application (default main)")
|
||||||
|
subparser.add_argument('-s', '--server',
|
||||||
|
dest='server',
|
||||||
|
metavar='SERVER_TYPE',
|
||||||
|
help="Use the named server.")
|
||||||
|
subparser.add_argument('--reload',
|
||||||
|
dest='reload',
|
||||||
|
action='store_true',
|
||||||
|
help="Use auto-restart file monitor")
|
||||||
|
|
||||||
|
|
||||||
|
def serve(args):
|
||||||
|
serve_cmd = ServeCommand() # TODO: pass args to it
|
||||||
|
serve_cmd.run()
|
@ -14,6 +14,10 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin.gmg_commands import util as commands_util
|
from mediagoblin.gmg_commands import util as commands_util
|
||||||
from mediagoblin import auth
|
from mediagoblin import auth
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
@ -45,13 +49,13 @@ def adduser(args):
|
|||||||
).count()
|
).count()
|
||||||
|
|
||||||
if users_with_username:
|
if users_with_username:
|
||||||
print u'Sorry, a user with that name already exists.'
|
print(u'Sorry, a user with that name already exists.')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Create the user
|
# Create the user
|
||||||
entry = db.User()
|
entry = db.User()
|
||||||
entry.username = args.username.lower()
|
entry.username = six.text_type(args.username.lower())
|
||||||
entry.email = unicode(args.email)
|
entry.email = six.text_type(args.email)
|
||||||
entry.pw_hash = auth.gen_password_hash(args.password)
|
entry.pw_hash = auth.gen_password_hash(args.password)
|
||||||
default_privileges = [
|
default_privileges = [
|
||||||
db.Privilege.query.filter(
|
db.Privilege.query.filter(
|
||||||
@ -66,7 +70,7 @@ def adduser(args):
|
|||||||
entry.all_privileges = default_privileges
|
entry.all_privileges = default_privileges
|
||||||
entry.save()
|
entry.save()
|
||||||
|
|
||||||
print "User created (and email marked as verified)"
|
print(u"User created (and email marked as verified)")
|
||||||
|
|
||||||
|
|
||||||
def makeadmin_parser_setup(subparser):
|
def makeadmin_parser_setup(subparser):
|
||||||
@ -81,16 +85,16 @@ def makeadmin(args):
|
|||||||
db = mg_globals.database
|
db = mg_globals.database
|
||||||
|
|
||||||
user = db.User.query.filter_by(
|
user = db.User.query.filter_by(
|
||||||
username=unicode(args.username.lower())).one()
|
username=six.text_type(args.username.lower())).one()
|
||||||
if user:
|
if user:
|
||||||
user.all_privileges.append(
|
user.all_privileges.append(
|
||||||
db.Privilege.query.filter(
|
db.Privilege.query.filter(
|
||||||
db.Privilege.privilege_name==u'admin').one()
|
db.Privilege.privilege_name==u'admin').one()
|
||||||
)
|
)
|
||||||
user.save()
|
user.save()
|
||||||
print 'The user is now Admin'
|
print(u'The user is now Admin')
|
||||||
else:
|
else:
|
||||||
print 'The user doesn\'t exist'
|
print(u'The user doesn\'t exist')
|
||||||
|
|
||||||
|
|
||||||
def changepw_parser_setup(subparser):
|
def changepw_parser_setup(subparser):
|
||||||
@ -108,13 +112,13 @@ def changepw(args):
|
|||||||
db = mg_globals.database
|
db = mg_globals.database
|
||||||
|
|
||||||
user = db.User.query.filter_by(
|
user = db.User.query.filter_by(
|
||||||
username=unicode(args.username.lower())).one()
|
username=six.text_type(args.username.lower())).one()
|
||||||
if user:
|
if user:
|
||||||
user.pw_hash = auth.gen_password_hash(args.password)
|
user.pw_hash = auth.gen_password_hash(args.password)
|
||||||
user.save()
|
user.save()
|
||||||
print 'Password successfully changed'
|
print(u'Password successfully changed')
|
||||||
else:
|
else:
|
||||||
print 'The user doesn\'t exist'
|
print(u'The user doesn\'t exist')
|
||||||
|
|
||||||
|
|
||||||
def deleteuser_parser_setup(subparser):
|
def deleteuser_parser_setup(subparser):
|
||||||
@ -132,6 +136,6 @@ def deleteuser(args):
|
|||||||
username=unicode(args.username.lower())).first()
|
username=unicode(args.username.lower())).first()
|
||||||
if user:
|
if user:
|
||||||
user.delete()
|
user.delete()
|
||||||
print 'The user %s has been deleted' % args.username
|
print('The user %s has been deleted' % args.username)
|
||||||
else:
|
else:
|
||||||
print 'The user %s doesn\'t exist' % args.username
|
print('The user %s doesn\'t exist' % args.username)
|
||||||
|
@ -19,6 +19,8 @@ import sys
|
|||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from celery import Celery
|
from celery import Celery
|
||||||
from mediagoblin.tools.pluginapi import hook_runall
|
from mediagoblin.tools.pluginapi import hook_runall
|
||||||
|
|
||||||
@ -48,7 +50,7 @@ def get_celery_settings_dict(app_config, global_config,
|
|||||||
celery_settings = {}
|
celery_settings = {}
|
||||||
|
|
||||||
# Add all celery settings from config
|
# Add all celery settings from config
|
||||||
for key, value in celery_conf.iteritems():
|
for key, value in six.iteritems(celery_conf):
|
||||||
celery_settings[key] = value
|
celery_settings[key] = value
|
||||||
|
|
||||||
# TODO: use default result stuff here if it exists
|
# TODO: use default result stuff here if it exists
|
||||||
@ -113,7 +115,7 @@ def setup_celery_from_config(app_config, global_config,
|
|||||||
__import__(settings_module)
|
__import__(settings_module)
|
||||||
this_module = sys.modules[settings_module]
|
this_module = sys.modules[settings_module]
|
||||||
|
|
||||||
for key, value in celery_settings.iteritems():
|
for key, value in six.iteritems(celery_settings):
|
||||||
setattr(this_module, key, value)
|
setattr(this_module, key, value)
|
||||||
|
|
||||||
if set_environ:
|
if set_environ:
|
||||||
|
@ -22,6 +22,8 @@ except ImportError:
|
|||||||
import Image
|
import Image
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin import mg_globals as mgg
|
from mediagoblin import mg_globals as mgg
|
||||||
from mediagoblin.processing import (
|
from mediagoblin.processing import (
|
||||||
create_pub_filepath, FilenameBuilder,
|
create_pub_filepath, FilenameBuilder,
|
||||||
@ -93,7 +95,7 @@ class CommonAsciiProcessor(MediaProcessor):
|
|||||||
orig_file.seek(0)
|
orig_file.seek(0)
|
||||||
|
|
||||||
def store_unicode_file(self):
|
def store_unicode_file(self):
|
||||||
with file(self.process_filename, 'rb') as orig_file:
|
with open(self.process_filename, 'rb') as orig_file:
|
||||||
self._detect_charset(orig_file)
|
self._detect_charset(orig_file)
|
||||||
unicode_filepath = create_pub_filepath(self.entry,
|
unicode_filepath = create_pub_filepath(self.entry,
|
||||||
'ascii-portable.txt')
|
'ascii-portable.txt')
|
||||||
@ -104,7 +106,7 @@ class CommonAsciiProcessor(MediaProcessor):
|
|||||||
# Encode the unicode instance to ASCII and replace any
|
# Encode the unicode instance to ASCII and replace any
|
||||||
# non-ASCII with an HTML entity (&#
|
# non-ASCII with an HTML entity (&#
|
||||||
unicode_file.write(
|
unicode_file.write(
|
||||||
unicode(orig_file.read().decode(
|
six.text_type(orig_file.read().decode(
|
||||||
self.charset)).encode(
|
self.charset)).encode(
|
||||||
'ascii',
|
'ascii',
|
||||||
'xmlcharrefreplace'))
|
'xmlcharrefreplace'))
|
||||||
@ -112,7 +114,7 @@ class CommonAsciiProcessor(MediaProcessor):
|
|||||||
self.entry.media_files['unicode'] = unicode_filepath
|
self.entry.media_files['unicode'] = unicode_filepath
|
||||||
|
|
||||||
def generate_thumb(self, font=None, thumb_size=None):
|
def generate_thumb(self, font=None, thumb_size=None):
|
||||||
with file(self.process_filename, 'rb') as orig_file:
|
with open(self.process_filename, 'rb') as orig_file:
|
||||||
# If no font kwarg, check config
|
# If no font kwarg, check config
|
||||||
if not font:
|
if not font:
|
||||||
font = self.ascii_config.get('thumbnail_font', None)
|
font = self.ascii_config.get('thumbnail_font', None)
|
||||||
@ -141,7 +143,7 @@ class CommonAsciiProcessor(MediaProcessor):
|
|||||||
thumb = converter._create_image(
|
thumb = converter._create_image(
|
||||||
orig_file.read())
|
orig_file.read())
|
||||||
|
|
||||||
with file(tmp_thumb, 'w') as thumb_file:
|
with open(tmp_thumb, 'w') as thumb_file:
|
||||||
thumb.thumbnail(
|
thumb.thumbnail(
|
||||||
thumb_size,
|
thumb_size,
|
||||||
Image.ANTIALIAS)
|
Image.ANTIALIAS)
|
||||||
|
@ -19,6 +19,8 @@ _log = logging.getLogger(__name__)
|
|||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
from mediagoblin.tools import pluginapi
|
from mediagoblin.tools import pluginapi
|
||||||
|
|
||||||
@ -75,8 +77,8 @@ def blog_edit(request):
|
|||||||
if request.method=='POST' and form.validate():
|
if request.method=='POST' and form.validate():
|
||||||
_log.info("Here")
|
_log.info("Here")
|
||||||
blog = request.db.Blog()
|
blog = request.db.Blog()
|
||||||
blog.title = unicode(form.title.data)
|
blog.title = six.text_type(form.title.data)
|
||||||
blog.description = unicode(cleaned_markdown_conversion((form.description.data)))
|
blog.description = six.text_type(cleaned_markdown_conversion((form.description.data)))
|
||||||
blog.author = request.user.id
|
blog.author = request.user.id
|
||||||
blog.generate_slug()
|
blog.generate_slug()
|
||||||
|
|
||||||
@ -112,8 +114,8 @@ def blog_edit(request):
|
|||||||
'app_config': mg_globals.app_config})
|
'app_config': mg_globals.app_config})
|
||||||
else:
|
else:
|
||||||
if request.method == 'POST' and form.validate():
|
if request.method == 'POST' and form.validate():
|
||||||
blog.title = unicode(form.title.data)
|
blog.title = six.text_type(form.title.data)
|
||||||
blog.description = unicode(cleaned_markdown_conversion((form.description.data)))
|
blog.description = six.text_type(cleaned_markdown_conversion((form.description.data)))
|
||||||
blog.author = request.user.id
|
blog.author = request.user.id
|
||||||
blog.generate_slug()
|
blog.generate_slug()
|
||||||
|
|
||||||
@ -137,10 +139,10 @@ def blogpost_create(request):
|
|||||||
|
|
||||||
blogpost = request.db.MediaEntry()
|
blogpost = request.db.MediaEntry()
|
||||||
blogpost.media_type = 'mediagoblin.media_types.blogpost'
|
blogpost.media_type = 'mediagoblin.media_types.blogpost'
|
||||||
blogpost.title = unicode(form.title.data)
|
blogpost.title = six.text_type(form.title.data)
|
||||||
blogpost.description = unicode(cleaned_markdown_conversion((form.description.data)))
|
blogpost.description = six.text_type(cleaned_markdown_conversion((form.description.data)))
|
||||||
blogpost.tags = convert_to_tag_list_of_dicts(form.tags.data)
|
blogpost.tags = convert_to_tag_list_of_dicts(form.tags.data)
|
||||||
blogpost.license = unicode(form.license.data) or None
|
blogpost.license = six.text_type(form.license.data) or None
|
||||||
blogpost.uploader = request.user.id
|
blogpost.uploader = request.user.id
|
||||||
blogpost.generate_slug()
|
blogpost.generate_slug()
|
||||||
|
|
||||||
@ -187,10 +189,10 @@ def blogpost_edit(request):
|
|||||||
|
|
||||||
form = blog_forms.BlogPostEditForm(request.form, **defaults)
|
form = blog_forms.BlogPostEditForm(request.form, **defaults)
|
||||||
if request.method == 'POST' and form.validate():
|
if request.method == 'POST' and form.validate():
|
||||||
blogpost.title = unicode(form.title.data)
|
blogpost.title = six.text_type(form.title.data)
|
||||||
blogpost.description = unicode(cleaned_markdown_conversion((form.description.data)))
|
blogpost.description = six.text_type(cleaned_markdown_conversion((form.description.data)))
|
||||||
blogpost.tags = convert_to_tag_list_of_dicts(form.tags.data)
|
blogpost.tags = convert_to_tag_list_of_dicts(form.tags.data)
|
||||||
blogpost.license = unicode(form.license.data)
|
blogpost.license = six.text_type(form.license.data)
|
||||||
set_blogpost_state(request, blogpost)
|
set_blogpost_state(request, blogpost)
|
||||||
blogpost.generate_slug()
|
blogpost.generate_slug()
|
||||||
blogpost.save()
|
blogpost.save()
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -22,6 +24,8 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin import mg_globals as mgg
|
from mediagoblin import mg_globals as mgg
|
||||||
from mediagoblin.processing import (
|
from mediagoblin.processing import (
|
||||||
BadMediaFail, FilenameBuilder,
|
BadMediaFail, FilenameBuilder,
|
||||||
@ -65,14 +69,14 @@ def resize_image(entry, resized, keyname, target_name, new_size,
|
|||||||
resize_filter = PIL_FILTERS[filter.upper()]
|
resize_filter = PIL_FILTERS[filter.upper()]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise Exception('Filter "{0}" not found, choose one of {1}'.format(
|
raise Exception('Filter "{0}" not found, choose one of {1}'.format(
|
||||||
unicode(filter),
|
six.text_type(filter),
|
||||||
u', '.join(PIL_FILTERS.keys())))
|
u', '.join(PIL_FILTERS.keys())))
|
||||||
|
|
||||||
resized.thumbnail(new_size, resize_filter)
|
resized.thumbnail(new_size, resize_filter)
|
||||||
|
|
||||||
# Copy the new file to the conversion subdir, then remotely.
|
# Copy the new file to the conversion subdir, then remotely.
|
||||||
tmp_resized_filename = os.path.join(workdir, target_name)
|
tmp_resized_filename = os.path.join(workdir, target_name)
|
||||||
with file(tmp_resized_filename, 'w') as resized_file:
|
with open(tmp_resized_filename, 'wb') as resized_file:
|
||||||
resized.save(resized_file, quality=quality)
|
resized.save(resized_file, quality=quality)
|
||||||
store_public(entry, keyname, tmp_resized_filename, target_name)
|
store_public(entry, keyname, tmp_resized_filename, target_name)
|
||||||
|
|
||||||
@ -114,7 +118,7 @@ def resize_tool(entry,
|
|||||||
or im.size[1] > new_size[1]\
|
or im.size[1] > new_size[1]\
|
||||||
or exif_image_needs_rotation(exif_tags):
|
or exif_image_needs_rotation(exif_tags):
|
||||||
resize_image(
|
resize_image(
|
||||||
entry, im, unicode(keyname), target_name,
|
entry, im, six.text_type(keyname), target_name,
|
||||||
tuple(new_size),
|
tuple(new_size),
|
||||||
exif_tags, conversions_subdir,
|
exif_tags, conversions_subdir,
|
||||||
quality, filter)
|
quality, filter)
|
||||||
@ -381,5 +385,4 @@ if __name__ == '__main__':
|
|||||||
clean = clean_exif(result)
|
clean = clean_exif(result)
|
||||||
useful = get_useful(clean)
|
useful = get_useful(clean)
|
||||||
|
|
||||||
print pp.pprint(
|
print(pp.pprint(clean))
|
||||||
clean)
|
|
||||||
|
@ -138,10 +138,10 @@ def is_unoconv_working():
|
|||||||
try:
|
try:
|
||||||
proc = Popen([unoconv, '--show'], stderr=PIPE)
|
proc = Popen([unoconv, '--show'], stderr=PIPE)
|
||||||
output = proc.stderr.read()
|
output = proc.stderr.read()
|
||||||
except OSError, e:
|
except OSError:
|
||||||
_log.warn(_('unoconv failing to run, check log file'))
|
_log.warn(_('unoconv failing to run, check log file'))
|
||||||
return False
|
return False
|
||||||
if 'ERROR' in output:
|
if b'ERROR' in output:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -207,6 +207,7 @@ def pdf_info(original):
|
|||||||
_log.debug('pdfinfo could not read the pdf file.')
|
_log.debug('pdfinfo could not read the pdf file.')
|
||||||
raise BadMediaFail()
|
raise BadMediaFail()
|
||||||
|
|
||||||
|
lines = [l.decode() for l in lines]
|
||||||
info_dict = dict([[part.strip() for part in l.strip().split(':', 1)]
|
info_dict = dict([[part.strip() for part in l.strip().split(':', 1)]
|
||||||
for l in lines if ':' in l])
|
for l in lines if ':' in l])
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ class ThreeDeeParseError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ThreeDee():
|
class ThreeDee(object):
|
||||||
"""
|
"""
|
||||||
3D model parser base class. Derrived classes are used for basic
|
3D model parser base class. Derrived classes are used for basic
|
||||||
analysis of 3D models, and are not intended to be used for 3D
|
analysis of 3D models, and are not intended to be used for 3D
|
||||||
|
@ -21,6 +21,7 @@ import gettext
|
|||||||
import pkg_resources
|
import pkg_resources
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
#############################
|
#############################
|
||||||
# General mediagoblin globals
|
# General mediagoblin globals
|
||||||
@ -64,7 +65,7 @@ def setup_globals(**kwargs):
|
|||||||
"""
|
"""
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
|
|
||||||
for key, value in kwargs.iteritems():
|
for key, value in six.iteritems(kwargs):
|
||||||
if not hasattr(mg_globals, key):
|
if not hasattr(mg_globals, key):
|
||||||
raise AssertionError("Global %s not known" % key)
|
raise AssertionError("Global %s not known" % key)
|
||||||
setattr(mg_globals, key, value)
|
setattr(mg_globals, key, value)
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.db.models import User, Privilege, UserBan
|
from mediagoblin.db.models import User, Privilege, UserBan
|
||||||
from mediagoblin.db.base import Session
|
from mediagoblin.db.base import Session
|
||||||
@ -22,8 +24,9 @@ from mediagoblin.tools.response import redirect
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
|
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
|
||||||
|
|
||||||
|
|
||||||
def take_punitive_actions(request, form, report, user):
|
def take_punitive_actions(request, form, report, user):
|
||||||
message_body =''
|
message_body = ''
|
||||||
|
|
||||||
# The bulk of this action is running through all of the different
|
# The bulk of this action is running through all of the different
|
||||||
# punitive actions that a moderator could take.
|
# punitive actions that a moderator could take.
|
||||||
@ -212,6 +215,6 @@ def parse_report_panel_settings(form):
|
|||||||
filters['reporter_id'] = form.reporter.data
|
filters['reporter_id'] = form.reporter.data
|
||||||
|
|
||||||
filters = dict((k, v)
|
filters = dict((k, v)
|
||||||
for k, v in filters.iteritems() if v)
|
for k, v in six.iteritems(filters) if v)
|
||||||
|
|
||||||
return filters
|
return filters
|
||||||
|
@ -15,7 +15,8 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import string
|
|
||||||
|
import six
|
||||||
|
|
||||||
from oauthlib.oauth1.rfc5849.utils import UNICODE_ASCII_CHARACTER_SET
|
from oauthlib.oauth1.rfc5849.utils import UNICODE_ASCII_CHARACTER_SET
|
||||||
from oauthlib.oauth1 import (RequestTokenEndpoint, AuthorizationEndpoint,
|
from oauthlib.oauth1 import (RequestTokenEndpoint, AuthorizationEndpoint,
|
||||||
@ -138,7 +139,7 @@ def client_register(request):
|
|||||||
|
|
||||||
contacts = data.get("contacts", None)
|
contacts = data.get("contacts", None)
|
||||||
if contacts is not None:
|
if contacts is not None:
|
||||||
if type(contacts) is not unicode:
|
if not isinstance(contacts, six.text_type):
|
||||||
error = "Contacts must be a string of space-seporated email addresses."
|
error = "Contacts must be a string of space-seporated email addresses."
|
||||||
return json_response({"error": error}, status=400)
|
return json_response({"error": error}, status=400)
|
||||||
|
|
||||||
@ -154,7 +155,7 @@ def client_register(request):
|
|||||||
|
|
||||||
redirect_uris = data.get("redirect_uris", None)
|
redirect_uris = data.get("redirect_uris", None)
|
||||||
if redirect_uris is not None:
|
if redirect_uris is not None:
|
||||||
if type(redirect_uris) is not unicode:
|
if not isinstance(redirect_uris, six.text_type):
|
||||||
error = "redirect_uris must be space-seporated URLs."
|
error = "redirect_uris must be space-seporated URLs."
|
||||||
return json_response({"error": error}, status=400)
|
return json_response({"error": error}, status=400)
|
||||||
|
|
||||||
|
@ -18,9 +18,11 @@ import logging
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from urlparse import urljoin
|
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
from werkzeug.wrappers import Response
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
|
from six.moves.urllib.parse import urljoin
|
||||||
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.tools.pluginapi import PluginManager
|
from mediagoblin.tools.pluginapi import PluginManager
|
||||||
from mediagoblin.storage.filestorage import BasicFileStorage
|
from mediagoblin.storage.filestorage import BasicFileStorage
|
||||||
|
@ -17,6 +17,8 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from werkzeug.exceptions import BadRequest
|
from werkzeug.exceptions import BadRequest
|
||||||
from werkzeug.wrappers import Response
|
from werkzeug.wrappers import Response
|
||||||
|
|
||||||
@ -55,16 +57,16 @@ def post_entry(request):
|
|||||||
|
|
||||||
callback_url = request.form.get('callback_url')
|
callback_url = request.form.get('callback_url')
|
||||||
if callback_url:
|
if callback_url:
|
||||||
callback_url = unicode(callback_url)
|
callback_url = six.text_type(callback_url)
|
||||||
try:
|
try:
|
||||||
entry = submit_media(
|
entry = submit_media(
|
||||||
mg_app=request.app, user=request.user,
|
mg_app=request.app, user=request.user,
|
||||||
submitted_file=request.files['file'],
|
submitted_file=request.files['file'],
|
||||||
filename=request.files['file'].filename,
|
filename=request.files['file'].filename,
|
||||||
title=unicode(request.form.get('title')),
|
title=six.text_type(request.form.get('title')),
|
||||||
description=unicode(request.form.get('description')),
|
description=six.text_type(request.form.get('description')),
|
||||||
license=unicode(request.form.get('license', '')),
|
license=six.text_type(request.form.get('license', '')),
|
||||||
tags_string=unicode(request.form.get('tags', '')),
|
tags_string=six.text_type(request.form.get('tags', '')),
|
||||||
upload_limit=upload_limit, max_file_size=max_file_size,
|
upload_limit=upload_limit, max_file_size=max_file_size,
|
||||||
callback_url=callback_url)
|
callback_url=callback_url)
|
||||||
|
|
||||||
@ -89,7 +91,7 @@ def post_entry(request):
|
|||||||
'''
|
'''
|
||||||
if isinstance(e, InvalidFileType) or \
|
if isinstance(e, InvalidFileType) or \
|
||||||
isinstance(e, FileTypeNotSupported):
|
isinstance(e, FileTypeNotSupported):
|
||||||
raise BadRequest(unicode(e))
|
raise BadRequest(six.text_type(e))
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -103,7 +105,7 @@ def api_test(request):
|
|||||||
|
|
||||||
# TODO: This is the *only* thing using Response() here, should that
|
# TODO: This is the *only* thing using Response() here, should that
|
||||||
# not simply use json_response()?
|
# not simply use json_response()?
|
||||||
return Response(json.dumps(user_data))
|
return Response(json.dumps(user_data, sort_keys=True))
|
||||||
|
|
||||||
|
|
||||||
def get_entries(request):
|
def get_entries(request):
|
||||||
|
@ -16,6 +16,8 @@
|
|||||||
import bcrypt
|
import bcrypt
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.tools.crypto import get_timed_signer_url
|
from mediagoblin.tools.crypto import get_timed_signer_url
|
||||||
from mediagoblin.tools.mail import send_email
|
from mediagoblin.tools.mail import send_email
|
||||||
@ -66,7 +68,7 @@ def bcrypt_gen_password_hash(raw_pass, extra_salt=None):
|
|||||||
if extra_salt:
|
if extra_salt:
|
||||||
raw_pass = u"%s:%s" % (extra_salt, raw_pass)
|
raw_pass = u"%s:%s" % (extra_salt, raw_pass)
|
||||||
|
|
||||||
return unicode(
|
return six.text_type(
|
||||||
bcrypt.hashpw(raw_pass.encode('utf-8'), bcrypt.gensalt()))
|
bcrypt.hashpw(raw_pass.encode('utf-8'), bcrypt.gensalt()))
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,6 +16,8 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from werkzeug.exceptions import Unauthorized
|
from werkzeug.exceptions import Unauthorized
|
||||||
|
|
||||||
from mediagoblin.auth.tools import check_login_simple
|
from mediagoblin.auth.tools import check_login_simple
|
||||||
@ -40,7 +42,7 @@ class HTTPAuth(Auth):
|
|||||||
if not request.authorization:
|
if not request.authorization:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
user = check_login_simple(unicode(request.authorization['username']),
|
user = check_login_simple(six.text_type(request.authorization['username']),
|
||||||
request.authorization['password'])
|
request.authorization['password'])
|
||||||
|
|
||||||
if user:
|
if user:
|
||||||
|
@ -16,6 +16,8 @@
|
|||||||
import ldap
|
import ldap
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin.tools import pluginapi
|
from mediagoblin.tools import pluginapi
|
||||||
|
|
||||||
_log = logging.getLogger(__name__)
|
_log = logging.getLogger(__name__)
|
||||||
@ -47,7 +49,7 @@ class LDAP(object):
|
|||||||
return email
|
return email
|
||||||
|
|
||||||
def login(self, username, password):
|
def login(self, username, password):
|
||||||
for k, v in self.ldap_settings.iteritems():
|
for k, v in six.iteritems(self.ldap_settings):
|
||||||
try:
|
try:
|
||||||
self._connect(v)
|
self._connect(v)
|
||||||
user_dn = v['LDAP_USER_DN_TEMPLATE'].format(username=username)
|
user_dn = v['LDAP_USER_DN_TEMPLATE'].format(username=username)
|
||||||
|
@ -13,6 +13,9 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin import mg_globals, messages
|
from mediagoblin import mg_globals, messages
|
||||||
from mediagoblin.auth.tools import register_user
|
from mediagoblin.auth.tools import register_user
|
||||||
from mediagoblin.db.models import User
|
from mediagoblin.db.models import User
|
||||||
@ -40,7 +43,7 @@ def login(request):
|
|||||||
|
|
||||||
if user:
|
if user:
|
||||||
# set up login in session
|
# set up login in session
|
||||||
request.session['user_id'] = unicode(user.id)
|
request.session['user_id'] = six.text_type(user.id)
|
||||||
request.session.save()
|
request.session.save()
|
||||||
|
|
||||||
if request.form.get('next'):
|
if request.form.get('next'):
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
import wtforms
|
import wtforms
|
||||||
|
|
||||||
from urlparse import urlparse
|
from six.moves.urllib.parse import urlparse
|
||||||
|
|
||||||
from mediagoblin.tools.extlib.wtf_html5 import URLField
|
from mediagoblin.tools.extlib.wtf_html5 import URLField
|
||||||
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
|
from mediagoblin.tools.translate import lazy_pass_to_ugettext as _
|
||||||
|
@ -26,10 +26,6 @@ from mediagoblin.db.models import User
|
|||||||
from mediagoblin.plugins.oauth.tools import generate_identifier, \
|
from mediagoblin.plugins.oauth.tools import generate_identifier, \
|
||||||
generate_secret, generate_token, generate_code, generate_refresh_token
|
generate_secret, generate_token, generate_code, generate_refresh_token
|
||||||
|
|
||||||
# Don't remove this, I *think* it applies sqlalchemy-migrate functionality onto
|
|
||||||
# the models.
|
|
||||||
from migrate import changeset
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthClient(Base):
|
class OAuthClient(Base):
|
||||||
__tablename__ = 'oauth__client'
|
__tablename__ = 'oauth__client'
|
||||||
|
@ -23,6 +23,8 @@ from datetime import datetime
|
|||||||
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin.tools.response import json_response
|
from mediagoblin.tools.response import json_response
|
||||||
|
|
||||||
|
|
||||||
@ -86,7 +88,7 @@ def create_token(client, user):
|
|||||||
|
|
||||||
def generate_identifier():
|
def generate_identifier():
|
||||||
''' Generates a ``uuid.uuid4()`` '''
|
''' Generates a ``uuid.uuid4()`` '''
|
||||||
return unicode(uuid.uuid4())
|
return six.text_type(uuid.uuid4())
|
||||||
|
|
||||||
|
|
||||||
def generate_token():
|
def generate_token():
|
||||||
@ -110,5 +112,5 @@ def generate_secret():
|
|||||||
'''
|
'''
|
||||||
# XXX: We might not want it to use bcrypt, since bcrypt takes its time to
|
# XXX: We might not want it to use bcrypt, since bcrypt takes its time to
|
||||||
# generate the result.
|
# generate the result.
|
||||||
return unicode(getrandbits(192))
|
return six.text_type(getrandbits(192))
|
||||||
|
|
||||||
|
@ -17,7 +17,9 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from urllib import urlencode
|
from six.moves.urllib.parse import urlencode
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from werkzeug.exceptions import BadRequest
|
from werkzeug.exceptions import BadRequest
|
||||||
|
|
||||||
@ -44,11 +46,11 @@ def register_client(request):
|
|||||||
|
|
||||||
if request.method == 'POST' and form.validate():
|
if request.method == 'POST' and form.validate():
|
||||||
client = OAuthClient()
|
client = OAuthClient()
|
||||||
client.name = unicode(form.name.data)
|
client.name = six.text_type(form.name.data)
|
||||||
client.description = unicode(form.description.data)
|
client.description = six.text_type(form.description.data)
|
||||||
client.type = unicode(form.type.data)
|
client.type = six.text_type(form.type.data)
|
||||||
client.owner_id = request.user.id
|
client.owner_id = request.user.id
|
||||||
client.redirect_uri = unicode(form.redirect_uri.data)
|
client.redirect_uri = six.text_type(form.redirect_uri.data)
|
||||||
|
|
||||||
client.save()
|
client.save()
|
||||||
|
|
||||||
|
@ -16,6 +16,8 @@
|
|||||||
import base64
|
import base64
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from openid.association import Association as OIDAssociation
|
from openid.association import Association as OIDAssociation
|
||||||
from openid.store.interface import OpenIDStore
|
from openid.store.interface import OpenIDStore
|
||||||
from openid.store import nonce
|
from openid.store import nonce
|
||||||
@ -34,12 +36,12 @@ class SQLAlchemyOpenIDStore(OpenIDStore):
|
|||||||
|
|
||||||
if not assoc:
|
if not assoc:
|
||||||
assoc = Association()
|
assoc = Association()
|
||||||
assoc.server_url = unicode(server_url)
|
assoc.server_url = six.text_type(server_url)
|
||||||
assoc.handle = association.handle
|
assoc.handle = association.handle
|
||||||
|
|
||||||
# django uses base64 encoding, python-openid uses a blob field for
|
# django uses base64 encoding, python-openid uses a blob field for
|
||||||
# secret
|
# secret
|
||||||
assoc.secret = unicode(base64.encodestring(association.secret))
|
assoc.secret = six.text_type(base64.encodestring(association.secret))
|
||||||
assoc.issued = association.issued
|
assoc.issued = association.issued
|
||||||
assoc.lifetime = association.lifetime
|
assoc.lifetime = association.lifetime
|
||||||
assoc.assoc_type = association.assoc_type
|
assoc.assoc_type = association.assoc_type
|
||||||
|
@ -13,6 +13,9 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from openid.consumer import consumer
|
from openid.consumer import consumer
|
||||||
from openid.consumer.discover import DiscoveryFailure
|
from openid.consumer.discover import DiscoveryFailure
|
||||||
from openid.extensions.sreg import SRegRequest, SRegResponse
|
from openid.extensions.sreg import SRegRequest, SRegResponse
|
||||||
@ -186,7 +189,7 @@ def finish_login(request):
|
|||||||
|
|
||||||
if user:
|
if user:
|
||||||
# Set up login in session
|
# Set up login in session
|
||||||
request.session['user_id'] = unicode(user.id)
|
request.session['user_id'] = six.text_type(user.id)
|
||||||
request.session.save()
|
request.session.save()
|
||||||
|
|
||||||
if request.session.get('next'):
|
if request.session.get('next'):
|
||||||
|
@ -17,6 +17,8 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from werkzeug.exceptions import BadRequest
|
from werkzeug.exceptions import BadRequest
|
||||||
|
|
||||||
from mediagoblin import messages, mg_globals
|
from mediagoblin import messages, mg_globals
|
||||||
@ -63,7 +65,7 @@ def login(request):
|
|||||||
user = query.user if query else None
|
user = query.user if query else None
|
||||||
|
|
||||||
if user:
|
if user:
|
||||||
request.session['user_id'] = unicode(user.id)
|
request.session['user_id'] = six.text_type(user.id)
|
||||||
request.session['persona_login_email'] = email
|
request.session['persona_login_email'] = email
|
||||||
request.session.save()
|
request.session.save()
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ class PwgNamedArray(list):
|
|||||||
|
|
||||||
|
|
||||||
def _fill_element_dict(el, data, as_attr=()):
|
def _fill_element_dict(el, data, as_attr=()):
|
||||||
for k, v in data.iteritems():
|
for k, v in six.iteritems(data):
|
||||||
if k in as_attr:
|
if k in as_attr:
|
||||||
if not isinstance(v, six.string_types):
|
if not isinstance(v, six.string_types):
|
||||||
v = str(v)
|
v = str(v)
|
||||||
|
@ -17,6 +17,8 @@
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from werkzeug.exceptions import MethodNotAllowed, BadRequest, NotImplemented
|
from werkzeug.exceptions import MethodNotAllowed, BadRequest, NotImplemented
|
||||||
from werkzeug.wrappers import BaseResponse
|
from werkzeug.wrappers import BaseResponse
|
||||||
|
|
||||||
@ -133,8 +135,8 @@ def pwg_images_addSimple(request):
|
|||||||
mg_app=request.app, user=request.user,
|
mg_app=request.app, user=request.user,
|
||||||
submitted_file=request.files['image'],
|
submitted_file=request.files['image'],
|
||||||
filename=request.files['image'].filename,
|
filename=request.files['image'].filename,
|
||||||
title=unicode(form.name.data),
|
title=six.text_type(form.name.data),
|
||||||
description=unicode(form.comment.data),
|
description=six.text_type(form.comment.data),
|
||||||
upload_limit=upload_limit, max_file_size=max_file_size)
|
upload_limit=upload_limit, max_file_size=max_file_size)
|
||||||
|
|
||||||
collection_id = form.category.data
|
collection_id = form.category.data
|
||||||
|
@ -24,6 +24,8 @@ except:
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin import mg_globals as mgg
|
from mediagoblin import mg_globals as mgg
|
||||||
from mediagoblin.db.util import atomic_update
|
from mediagoblin.db.util import atomic_update
|
||||||
from mediagoblin.db.models import MediaEntry
|
from mediagoblin.db.models import MediaEntry
|
||||||
@ -46,7 +48,7 @@ class ProgressCallback(object):
|
|||||||
def create_pub_filepath(entry, filename):
|
def create_pub_filepath(entry, filename):
|
||||||
return mgg.public_store.get_unique_filepath(
|
return mgg.public_store.get_unique_filepath(
|
||||||
['media_entries',
|
['media_entries',
|
||||||
unicode(entry.id),
|
six.text_type(entry.id),
|
||||||
filename])
|
filename])
|
||||||
|
|
||||||
|
|
||||||
@ -319,7 +321,7 @@ def mark_entry_failed(entry_id, exc):
|
|||||||
atomic_update(mgg.database.MediaEntry,
|
atomic_update(mgg.database.MediaEntry,
|
||||||
{'id': entry_id},
|
{'id': entry_id},
|
||||||
{u'state': u'failed',
|
{u'state': u'failed',
|
||||||
u'fail_error': unicode(exc.exception_path),
|
u'fail_error': six.text_type(exc.exception_path),
|
||||||
u'fail_metadata': exc.metadata})
|
u'fail_metadata': exc.metadata})
|
||||||
else:
|
else:
|
||||||
_log.warn("No idea what happened here, but it failed: %r", exc)
|
_log.warn("No idea what happened here, but it failed: %r", exc)
|
||||||
|
@ -15,8 +15,8 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import urllib
|
|
||||||
import urllib2
|
from six.moves.urllib import request, parse
|
||||||
|
|
||||||
import celery
|
import celery
|
||||||
from celery.registry import tasks
|
from celery.registry import tasks
|
||||||
@ -42,15 +42,15 @@ def handle_push_urls(feed_url):
|
|||||||
hubparameters = {
|
hubparameters = {
|
||||||
'hub.mode': 'publish',
|
'hub.mode': 'publish',
|
||||||
'hub.url': feed_url}
|
'hub.url': feed_url}
|
||||||
hubdata = urllib.urlencode(hubparameters)
|
hubdata = parse.urlencode(hubparameters)
|
||||||
hubheaders = {
|
hubheaders = {
|
||||||
"Content-type": "application/x-www-form-urlencoded",
|
"Content-type": "application/x-www-form-urlencoded",
|
||||||
"Connection": "close"}
|
"Connection": "close"}
|
||||||
for huburl in mgg.app_config["push_urls"]:
|
for huburl in mgg.app_config["push_urls"]:
|
||||||
hubrequest = urllib2.Request(huburl, hubdata, hubheaders)
|
hubrequest = request.Request(huburl, hubdata, hubheaders)
|
||||||
try:
|
try:
|
||||||
hubresponse = urllib2.urlopen(hubrequest)
|
hubresponse = request.urlopen(hubrequest)
|
||||||
except (urllib2.HTTPError, urllib2.URLError) as exc:
|
except (request.HTTPError, request.URLError) as exc:
|
||||||
# We retry by default 3 times before failing
|
# We retry by default 3 times before failing
|
||||||
_log.info("PuSH url %r gave error %r", huburl, exc)
|
_log.info("PuSH url %r gave error %r", huburl, exc)
|
||||||
try:
|
try:
|
||||||
|
@ -14,9 +14,13 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import shutil
|
import shutil
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from werkzeug.utils import secure_filename
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
from mediagoblin.tools import common
|
from mediagoblin.tools import common
|
||||||
@ -174,7 +178,7 @@ class StorageInterface(object):
|
|||||||
shutil.copy(self.get_local_path(filepath), dest_path)
|
shutil.copy(self.get_local_path(filepath), dest_path)
|
||||||
else:
|
else:
|
||||||
with self.get_file(filepath, 'rb') as source_file:
|
with self.get_file(filepath, 'rb') as source_file:
|
||||||
with file(dest_path, 'wb') as dest_file:
|
with open(dest_path, 'wb') as dest_file:
|
||||||
# Copy from remote storage in 4M chunks
|
# Copy from remote storage in 4M chunks
|
||||||
shutil.copyfileobj(source_file, dest_file, length=4*1048576)
|
shutil.copyfileobj(source_file, dest_file, length=4*1048576)
|
||||||
|
|
||||||
@ -187,7 +191,7 @@ class StorageInterface(object):
|
|||||||
your storage system.
|
your storage system.
|
||||||
"""
|
"""
|
||||||
with self.get_file(filepath, 'wb') as dest_file:
|
with self.get_file(filepath, 'wb') as dest_file:
|
||||||
with file(filename, 'rb') as source_file:
|
with open(filename, 'rb') as source_file:
|
||||||
# Copy to storage system in 4M chunks
|
# Copy to storage system in 4M chunks
|
||||||
shutil.copyfileobj(source_file, dest_file, length=4*1048576)
|
shutil.copyfileobj(source_file, dest_file, length=4*1048576)
|
||||||
|
|
||||||
@ -220,7 +224,7 @@ def clean_listy_filepath(listy_filepath):
|
|||||||
A cleaned list of unicode objects.
|
A cleaned list of unicode objects.
|
||||||
"""
|
"""
|
||||||
cleaned_filepath = [
|
cleaned_filepath = [
|
||||||
unicode(secure_filename(filepath))
|
six.text_type(secure_filename(filepath))
|
||||||
for filepath in listy_filepath]
|
for filepath in listy_filepath]
|
||||||
|
|
||||||
if u'' in cleaned_filepath:
|
if u'' in cleaned_filepath:
|
||||||
@ -257,7 +261,7 @@ def storage_system_from_config(config_section):
|
|||||||
"""
|
"""
|
||||||
# This construct is needed, because dict(config) does
|
# This construct is needed, because dict(config) does
|
||||||
# not replace the variables in the config items.
|
# not replace the variables in the config items.
|
||||||
config_params = dict(config_section.iteritems())
|
config_params = dict(six.iteritems(config_section))
|
||||||
|
|
||||||
if 'storage_class' in config_params:
|
if 'storage_class' in config_params:
|
||||||
storage_class = config_params['storage_class']
|
storage_class = config_params['storage_class']
|
||||||
@ -268,4 +272,4 @@ def storage_system_from_config(config_section):
|
|||||||
storage_class = common.import_component(storage_class)
|
storage_class = common.import_component(storage_class)
|
||||||
return storage_class(**config_params)
|
return storage_class(**config_params)
|
||||||
|
|
||||||
import filestorage
|
from . import filestorage
|
||||||
|
@ -143,7 +143,7 @@ class CloudFilesStorage(StorageInterface):
|
|||||||
"""
|
"""
|
||||||
# Override this method, using the "stream" iterator for efficient streaming
|
# Override this method, using the "stream" iterator for efficient streaming
|
||||||
with self.get_file(filepath, 'rb') as source_file:
|
with self.get_file(filepath, 'rb') as source_file:
|
||||||
with file(dest_path, 'wb') as dest_file:
|
with open(dest_path, 'wb') as dest_file:
|
||||||
for data in source_file:
|
for data in source_file:
|
||||||
dest_file.write(data)
|
dest_file.write(data)
|
||||||
|
|
||||||
@ -164,7 +164,7 @@ class CloudFilesStorage(StorageInterface):
|
|||||||
# TODO: Fixing write() still seems worthwhile though.
|
# TODO: Fixing write() still seems worthwhile though.
|
||||||
_log.debug('Sending {0} to cloudfiles...'.format(filepath))
|
_log.debug('Sending {0} to cloudfiles...'.format(filepath))
|
||||||
with self.get_file(filepath, 'wb') as dest_file:
|
with self.get_file(filepath, 'wb') as dest_file:
|
||||||
with file(filename, 'rb') as source_file:
|
with open(filename, 'rb') as source_file:
|
||||||
# Copy to storage system in 4096 byte chunks
|
# Copy to storage system in 4096 byte chunks
|
||||||
dest_file.send(source_file)
|
dest_file.send(source_file)
|
||||||
|
|
||||||
|
@ -14,15 +14,16 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
import six.moves.urllib.parse as urlparse
|
||||||
|
|
||||||
from mediagoblin.storage import (
|
from mediagoblin.storage import (
|
||||||
StorageInterface,
|
StorageInterface,
|
||||||
clean_listy_filepath,
|
clean_listy_filepath,
|
||||||
NoWebServing)
|
NoWebServing)
|
||||||
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import urlparse
|
|
||||||
|
|
||||||
|
|
||||||
class BasicFileStorage(StorageInterface):
|
class BasicFileStorage(StorageInterface):
|
||||||
"""
|
"""
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin.storage import StorageInterface, clean_listy_filepath
|
from mediagoblin.storage import StorageInterface, clean_listy_filepath
|
||||||
|
|
||||||
|
|
||||||
@ -120,7 +122,7 @@ class MountStorage(StorageInterface):
|
|||||||
v = table.get(None)
|
v = table.get(None)
|
||||||
if v:
|
if v:
|
||||||
res.append(" " * len(indent) + repr(indent) + ": " + repr(v))
|
res.append(" " * len(indent) + repr(indent) + ": " + repr(v))
|
||||||
for k, v in table.iteritems():
|
for k, v in six.iteritems(table):
|
||||||
if k == None:
|
if k == None:
|
||||||
continue
|
continue
|
||||||
res.append(" " * len(indent) + repr(k) + ":")
|
res.append(" " * len(indent) + repr(k) + ":")
|
||||||
|
@ -18,6 +18,8 @@ import logging
|
|||||||
import uuid
|
import uuid
|
||||||
from os.path import splitext
|
from os.path import splitext
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from werkzeug.utils import secure_filename
|
from werkzeug.utils import secure_filename
|
||||||
from werkzeug.datastructures import FileStorage
|
from werkzeug.datastructures import FileStorage
|
||||||
|
|
||||||
@ -58,7 +60,7 @@ def get_upload_file_limits(user):
|
|||||||
"""
|
"""
|
||||||
Get the upload_limit and max_file_size for this user
|
Get the upload_limit and max_file_size for this user
|
||||||
"""
|
"""
|
||||||
if user.upload_limit >= 0:
|
if user.upload_limit is not None and user.upload_limit >= 0: # TODO: debug this
|
||||||
upload_limit = user.upload_limit
|
upload_limit = user.upload_limit
|
||||||
else:
|
else:
|
||||||
upload_limit = mg_globals.app_config.get('upload_limit', None)
|
upload_limit = mg_globals.app_config.get('upload_limit', None)
|
||||||
@ -128,7 +130,7 @@ def submit_media(mg_app, user, submitted_file, filename,
|
|||||||
|
|
||||||
# If the filename contains non ascii generate a unique name
|
# If the filename contains non ascii generate a unique name
|
||||||
if not all(ord(c) < 128 for c in filename):
|
if not all(ord(c) < 128 for c in filename):
|
||||||
filename = unicode(uuid.uuid4()) + splitext(filename)[-1]
|
filename = six.text_type(uuid.uuid4()) + splitext(filename)[-1]
|
||||||
|
|
||||||
# Sniff the submitted media to determine which
|
# Sniff the submitted media to determine which
|
||||||
# media plugin should handle processing
|
# media plugin should handle processing
|
||||||
@ -137,7 +139,7 @@ def submit_media(mg_app, user, submitted_file, filename,
|
|||||||
# create entry and save in database
|
# create entry and save in database
|
||||||
entry = new_upload_entry(user)
|
entry = new_upload_entry(user)
|
||||||
entry.media_type = media_type
|
entry.media_type = media_type
|
||||||
entry.title = (title or unicode(splitext(filename)[0]))
|
entry.title = (title or six.text_type(splitext(filename)[0]))
|
||||||
|
|
||||||
entry.description = description or u""
|
entry.description = description or u""
|
||||||
|
|
||||||
@ -213,7 +215,7 @@ def prepare_queue_task(app, entry, filename):
|
|||||||
# (If we got it off the task's auto-generation, there'd be
|
# (If we got it off the task's auto-generation, there'd be
|
||||||
# a risk of a race condition when we'd save after sending
|
# a risk of a race condition when we'd save after sending
|
||||||
# off the task)
|
# off the task)
|
||||||
task_id = unicode(uuid.uuid4())
|
task_id = six.text_type(uuid.uuid4())
|
||||||
entry.queued_task_id = task_id
|
entry.queued_task_id = task_id
|
||||||
|
|
||||||
# Now store generate the queueing related filename
|
# Now store generate the queueing related filename
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin import messages
|
from mediagoblin import messages
|
||||||
import mediagoblin.mg_globals as mg_globals
|
import mediagoblin.mg_globals as mg_globals
|
||||||
|
|
||||||
@ -59,9 +61,9 @@ def submit_start(request):
|
|||||||
mg_app=request.app, user=request.user,
|
mg_app=request.app, user=request.user,
|
||||||
submitted_file=request.files['file'],
|
submitted_file=request.files['file'],
|
||||||
filename=request.files['file'].filename,
|
filename=request.files['file'].filename,
|
||||||
title=unicode(submit_form.title.data),
|
title=six.text_type(submit_form.title.data),
|
||||||
description=unicode(submit_form.description.data),
|
description=six.text_type(submit_form.description.data),
|
||||||
license=unicode(submit_form.license.data) or None,
|
license=six.text_type(submit_form.license.data) or None,
|
||||||
tags_string=submit_form.tags.data,
|
tags_string=submit_form.tags.data,
|
||||||
upload_limit=upload_limit, max_file_size=max_file_size,
|
upload_limit=upload_limit, max_file_size=max_file_size,
|
||||||
urlgen=request.urlgen)
|
urlgen=request.urlgen)
|
||||||
@ -117,8 +119,8 @@ def add_collection(request, media=None):
|
|||||||
if request.method == 'POST' and submit_form.validate():
|
if request.method == 'POST' and submit_form.validate():
|
||||||
collection = request.db.Collection()
|
collection = request.db.Collection()
|
||||||
|
|
||||||
collection.title = unicode(submit_form.title.data)
|
collection.title = six.text_type(submit_form.title.data)
|
||||||
collection.description = unicode(submit_form.description.data)
|
collection.description = six.text_type(submit_form.description.data)
|
||||||
collection.creator = request.user.id
|
collection.creator = request.user.id
|
||||||
collection.generate_slug()
|
collection.generate_slug()
|
||||||
|
|
||||||
|
@ -15,7 +15,10 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import mock
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError:
|
||||||
|
import unittest.mock as mock
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from webtest import AppError
|
from webtest import AppError
|
||||||
@ -55,7 +58,7 @@ class TestAPI(object):
|
|||||||
headers=headers
|
headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
return response, json.loads(response.body)
|
return response, json.loads(response.body.decode())
|
||||||
|
|
||||||
def _upload_image(self, test_app, image):
|
def _upload_image(self, test_app, image):
|
||||||
""" Uploads and image to MediaGoblin via pump.io API """
|
""" Uploads and image to MediaGoblin via pump.io API """
|
||||||
@ -72,7 +75,7 @@ class TestAPI(object):
|
|||||||
data,
|
data,
|
||||||
headers=headers
|
headers=headers
|
||||||
)
|
)
|
||||||
image = json.loads(response.body)
|
image = json.loads(response.body.decode())
|
||||||
|
|
||||||
return response, image
|
return response, image
|
||||||
|
|
||||||
@ -142,7 +145,7 @@ class TestAPI(object):
|
|||||||
headers=headers
|
headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "403 FORBIDDEN" in excinfo.value.message
|
assert "403 FORBIDDEN" in excinfo.value.args[0]
|
||||||
|
|
||||||
def test_unable_to_post_feed_as_someone_else(self, test_app):
|
def test_unable_to_post_feed_as_someone_else(self, test_app):
|
||||||
""" Tests that can't post an image to someone else's feed """
|
""" Tests that can't post an image to someone else's feed """
|
||||||
@ -165,7 +168,7 @@ class TestAPI(object):
|
|||||||
headers=headers
|
headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "403 FORBIDDEN" in excinfo.value.message
|
assert "403 FORBIDDEN" in excinfo.value.args[0]
|
||||||
|
|
||||||
def test_only_able_to_update_own_image(self, test_app):
|
def test_only_able_to_update_own_image(self, test_app):
|
||||||
""" Test's that the uploader is the only person who can update an image """
|
""" Test's that the uploader is the only person who can update an image """
|
||||||
@ -197,7 +200,7 @@ class TestAPI(object):
|
|||||||
headers=headers
|
headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "403 FORBIDDEN" in excinfo.value.message
|
assert "403 FORBIDDEN" in excinfo.value.args[0]
|
||||||
|
|
||||||
def test_upload_image_with_filename(self, test_app):
|
def test_upload_image_with_filename(self, test_app):
|
||||||
""" Tests that you can upload an image with filename and description """
|
""" Tests that you can upload an image with filename and description """
|
||||||
@ -224,7 +227,7 @@ class TestAPI(object):
|
|||||||
headers={"Content-Type": "application/json"}
|
headers={"Content-Type": "application/json"}
|
||||||
)
|
)
|
||||||
|
|
||||||
image = json.loads(response.body)["object"]
|
image = json.loads(response.body.decode())["object"]
|
||||||
|
|
||||||
# Check everything has been set on the media correctly
|
# Check everything has been set on the media correctly
|
||||||
media = MediaEntry.query.filter_by(id=image["id"]).first()
|
media = MediaEntry.query.filter_by(id=image["id"]).first()
|
||||||
@ -260,7 +263,7 @@ class TestAPI(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Assert that we've got a 403
|
# Assert that we've got a 403
|
||||||
assert "403 FORBIDDEN" in excinfo.value.message
|
assert "403 FORBIDDEN" in excinfo.value.args[0]
|
||||||
|
|
||||||
def test_object_endpoint(self, test_app):
|
def test_object_endpoint(self, test_app):
|
||||||
""" Tests that object can be looked up at endpoint """
|
""" Tests that object can be looked up at endpoint """
|
||||||
@ -281,7 +284,7 @@ class TestAPI(object):
|
|||||||
with self.mock_oauth():
|
with self.mock_oauth():
|
||||||
request = test_app.get(object_uri)
|
request = test_app.get(object_uri)
|
||||||
|
|
||||||
image = json.loads(request.body)
|
image = json.loads(request.body.decode())
|
||||||
entry = MediaEntry.query.filter_by(id=image["id"]).first()
|
entry = MediaEntry.query.filter_by(id=image["id"]).first()
|
||||||
|
|
||||||
assert request.status_code == 200
|
assert request.status_code == 200
|
||||||
@ -351,7 +354,7 @@ class TestAPI(object):
|
|||||||
headers=headers
|
headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "403 FORBIDDEN" in excinfo.value.message
|
assert "403 FORBIDDEN" in excinfo.value.args[0]
|
||||||
|
|
||||||
def test_unable_to_update_someone_elses_comment(self, test_app):
|
def test_unable_to_update_someone_elses_comment(self, test_app):
|
||||||
""" Test that you're able to update someoen elses comment. """
|
""" Test that you're able to update someoen elses comment. """
|
||||||
@ -396,14 +399,14 @@ class TestAPI(object):
|
|||||||
headers=headers
|
headers=headers
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "403 FORBIDDEN" in excinfo.value.message
|
assert "403 FORBIDDEN" in excinfo.value.args[0]
|
||||||
|
|
||||||
def test_profile(self, test_app):
|
def test_profile(self, test_app):
|
||||||
""" Tests profile endpoint """
|
""" Tests profile endpoint """
|
||||||
uri = "/api/user/{0}/profile".format(self.user.username)
|
uri = "/api/user/{0}/profile".format(self.user.username)
|
||||||
with self.mock_oauth():
|
with self.mock_oauth():
|
||||||
response = test_app.get(uri)
|
response = test_app.get(uri)
|
||||||
profile = json.loads(response.body)
|
profile = json.loads(response.body.decode())
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@ -417,7 +420,7 @@ class TestAPI(object):
|
|||||||
uri = "/api/user/{0}/".format(self.user.username)
|
uri = "/api/user/{0}/".format(self.user.username)
|
||||||
with self.mock_oauth():
|
with self.mock_oauth():
|
||||||
response = test_app.get(uri)
|
response = test_app.get(uri)
|
||||||
user = json.loads(response.body)
|
user = json.loads(response.body.decode())
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@ -433,7 +436,7 @@ class TestAPI(object):
|
|||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = test_app.get("/api/whoami")
|
response = test_app.get("/api/whoami")
|
||||||
|
|
||||||
assert "401 UNAUTHORIZED" in excinfo.value.message
|
assert "401 UNAUTHORIZED" in excinfo.value.args[0]
|
||||||
|
|
||||||
def test_read_feed(self, test_app):
|
def test_read_feed(self, test_app):
|
||||||
""" Test able to read objects from the feed """
|
""" Test able to read objects from the feed """
|
||||||
@ -443,7 +446,7 @@ class TestAPI(object):
|
|||||||
uri = "/api/user/{0}/feed".format(self.active_user.username)
|
uri = "/api/user/{0}/feed".format(self.active_user.username)
|
||||||
with self.mock_oauth():
|
with self.mock_oauth():
|
||||||
response = test_app.get(uri)
|
response = test_app.get(uri)
|
||||||
feed = json.loads(response.body)
|
feed = json.loads(response.body.decode())
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
@ -468,9 +471,9 @@ class TestAPI(object):
|
|||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
self._post_image_to_feed(test_app, data)
|
self._post_image_to_feed(test_app, data)
|
||||||
|
|
||||||
assert "403 FORBIDDEN" in excinfo.value.message
|
assert "403 FORBIDDEN" in excinfo.value.args[0]
|
||||||
|
|
||||||
def test_object_endpoint(self, test_app):
|
def test_object_endpoint_requestable(self, test_app):
|
||||||
""" Test that object endpoint can be requested """
|
""" Test that object endpoint can be requested """
|
||||||
response, data = self._upload_image(test_app, GOOD_JPG)
|
response, data = self._upload_image(test_app, GOOD_JPG)
|
||||||
response, data = self._post_image_to_feed(test_app, data)
|
response, data = self._post_image_to_feed(test_app, data)
|
||||||
@ -478,7 +481,7 @@ class TestAPI(object):
|
|||||||
|
|
||||||
with self.mock_oauth():
|
with self.mock_oauth():
|
||||||
response = test_app.get(data["object"]["links"]["self"]["href"])
|
response = test_app.get(data["object"]["links"]["self"]["href"])
|
||||||
data = json.loads(response.body)
|
data = json.loads(response.body.decode())
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
@ -14,10 +14,14 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import urlparse
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
import six.moves.urllib.parse as urlparse
|
||||||
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.db.models import User
|
from mediagoblin.db.models import User
|
||||||
from mediagoblin.tests.tools import get_app, fixture_add_user
|
from mediagoblin.tests.tools import get_app, fixture_add_user
|
||||||
@ -107,7 +111,7 @@ def test_register_views(test_app):
|
|||||||
## Make sure user is logged in
|
## Make sure user is logged in
|
||||||
request = template.TEMPLATE_TEST_CONTEXT[
|
request = template.TEMPLATE_TEST_CONTEXT[
|
||||||
'mediagoblin/user_pages/user_nonactive.html']['request']
|
'mediagoblin/user_pages/user_nonactive.html']['request']
|
||||||
assert request.session['user_id'] == unicode(new_user.id)
|
assert request.session['user_id'] == six.text_type(new_user.id)
|
||||||
|
|
||||||
## Make sure we get email confirmation, and try verifying
|
## Make sure we get email confirmation, and try verifying
|
||||||
assert len(mail.EMAIL_TEST_INBOX) == 1
|
assert len(mail.EMAIL_TEST_INBOX) == 1
|
||||||
@ -115,7 +119,7 @@ def test_register_views(test_app):
|
|||||||
assert message['To'] == 'angrygrrl@example.org'
|
assert message['To'] == 'angrygrrl@example.org'
|
||||||
email_context = template.TEMPLATE_TEST_CONTEXT[
|
email_context = template.TEMPLATE_TEST_CONTEXT[
|
||||||
'mediagoblin/auth/verification_email.txt']
|
'mediagoblin/auth/verification_email.txt']
|
||||||
assert email_context['verification_url'] in message.get_payload(decode=True)
|
assert email_context['verification_url'].encode('ascii') in message.get_payload(decode=True)
|
||||||
|
|
||||||
path = urlparse.urlsplit(email_context['verification_url'])[2]
|
path = urlparse.urlsplit(email_context['verification_url'])[2]
|
||||||
get_params = urlparse.urlsplit(email_context['verification_url'])[3]
|
get_params = urlparse.urlsplit(email_context['verification_url'])[3]
|
||||||
@ -186,7 +190,7 @@ def test_register_views(test_app):
|
|||||||
email_context = template.TEMPLATE_TEST_CONTEXT[
|
email_context = template.TEMPLATE_TEST_CONTEXT[
|
||||||
'mediagoblin/plugins/basic_auth/fp_verification_email.txt']
|
'mediagoblin/plugins/basic_auth/fp_verification_email.txt']
|
||||||
#TODO - change the name of verification_url to something forgot-password-ish
|
#TODO - change the name of verification_url to something forgot-password-ish
|
||||||
assert email_context['verification_url'] in message.get_payload(decode=True)
|
assert email_context['verification_url'].encode('ascii') in message.get_payload(decode=True)
|
||||||
|
|
||||||
path = urlparse.urlsplit(email_context['verification_url'])[2]
|
path = urlparse.urlsplit(email_context['verification_url'])[2]
|
||||||
get_params = urlparse.urlsplit(email_context['verification_url'])[3]
|
get_params = urlparse.urlsplit(email_context['verification_url'])[3]
|
||||||
@ -305,7 +309,7 @@ def test_authentication_views(test_app):
|
|||||||
# Make sure user is in the session
|
# Make sure user is in the session
|
||||||
context = template.TEMPLATE_TEST_CONTEXT['mediagoblin/root.html']
|
context = template.TEMPLATE_TEST_CONTEXT['mediagoblin/root.html']
|
||||||
session = context['request'].session
|
session = context['request'].session
|
||||||
assert session['user_id'] == unicode(test_user.id)
|
assert session['user_id'] == six.text_type(test_user.id)
|
||||||
|
|
||||||
# Successful logout
|
# Successful logout
|
||||||
# -----------------
|
# -----------------
|
||||||
|
@ -13,7 +13,8 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import urlparse
|
|
||||||
|
import six.moves.urllib.parse as urlparse
|
||||||
|
|
||||||
from mediagoblin.db.models import User
|
from mediagoblin.db.models import User
|
||||||
from mediagoblin.plugins.basic_auth import tools as auth_tools
|
from mediagoblin.plugins.basic_auth import tools as auth_tools
|
||||||
|
@ -14,7 +14,9 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import urlparse, os, pytest
|
import six
|
||||||
|
import six.moves.urllib.parse as urlparse
|
||||||
|
import pytest
|
||||||
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.db.models import User, MediaEntry
|
from mediagoblin.db.models import User, MediaEntry
|
||||||
@ -142,8 +144,7 @@ class TestUserEdit(object):
|
|||||||
assert message['To'] == 'new@example.com'
|
assert message['To'] == 'new@example.com'
|
||||||
email_context = template.TEMPLATE_TEST_CONTEXT[
|
email_context = template.TEMPLATE_TEST_CONTEXT[
|
||||||
'mediagoblin/edit/verification.txt']
|
'mediagoblin/edit/verification.txt']
|
||||||
assert email_context['verification_url'] in \
|
assert email_context['verification_url'].encode('ascii') in message.get_payload(decode=True)
|
||||||
message.get_payload(decode=True)
|
|
||||||
|
|
||||||
path = urlparse.urlsplit(email_context['verification_url'])[2]
|
path = urlparse.urlsplit(email_context['verification_url'])[2]
|
||||||
assert path == u'/edit/verify_email/'
|
assert path == u'/edit/verify_email/'
|
||||||
@ -250,5 +251,11 @@ class TestMetaDataEdit:
|
|||||||
old_metadata = new_metadata
|
old_metadata = new_metadata
|
||||||
new_metadata = media_entry.media_metadata
|
new_metadata = media_entry.media_metadata
|
||||||
assert new_metadata == old_metadata
|
assert new_metadata == old_metadata
|
||||||
assert ("u'On the worst day' is not a 'date-time'" in
|
context = template.TEMPLATE_TEST_CONTEXT[
|
||||||
response.body)
|
'mediagoblin/edit/metadata.html']
|
||||||
|
if six.PY2:
|
||||||
|
expected = "u'On the worst day' is not a 'date-time'"
|
||||||
|
else:
|
||||||
|
expected = "'On the worst day' is not a 'date-time'"
|
||||||
|
assert context['form'].errors[
|
||||||
|
'media_metadata'][0]['identifier'][0] == expected
|
||||||
|
@ -20,6 +20,8 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
import Image
|
import Image
|
||||||
|
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
from mediagoblin.tools.exif import exif_fix_image_orientation, \
|
from mediagoblin.tools.exif import exif_fix_image_orientation, \
|
||||||
extract_exif, clean_exif, get_gps_data, get_useful
|
extract_exif, clean_exif, get_gps_data, get_useful
|
||||||
from .resources import GOOD_JPG, EMPTY_JPG, BAD_JPG, GPS_JPG
|
from .resources import GOOD_JPG, EMPTY_JPG, BAD_JPG, GPS_JPG
|
||||||
@ -48,22 +50,23 @@ def test_exif_extraction():
|
|||||||
assert gps == {}
|
assert gps == {}
|
||||||
|
|
||||||
# Do we have the "useful" tags?
|
# Do we have the "useful" tags?
|
||||||
assert useful == {'EXIF CVAPattern': {'field_length': 8,
|
|
||||||
|
expected = OrderedDict({'EXIF CVAPattern': {'field_length': 8,
|
||||||
'field_offset': 26224,
|
'field_offset': 26224,
|
||||||
'field_type': 7,
|
'field_type': 7,
|
||||||
'printable': u'[0, 2, 0, 2, 1, 2, 0, 1]',
|
'printable': '[0, 2, 0, 2, 1, 2, 0, 1]',
|
||||||
'tag': 41730,
|
'tag': 41730,
|
||||||
'values': [0, 2, 0, 2, 1, 2, 0, 1]},
|
'values': [0, 2, 0, 2, 1, 2, 0, 1]},
|
||||||
'EXIF ColorSpace': {'field_length': 2,
|
'EXIF ColorSpace': {'field_length': 2,
|
||||||
'field_offset': 476,
|
'field_offset': 476,
|
||||||
'field_type': 3,
|
'field_type': 3,
|
||||||
'printable': u'sRGB',
|
'printable': 'sRGB',
|
||||||
'tag': 40961,
|
'tag': 40961,
|
||||||
'values': [1]},
|
'values': [1]},
|
||||||
'EXIF ComponentsConfiguration': {'field_length': 4,
|
'EXIF ComponentsConfiguration': {'field_length': 4,
|
||||||
'field_offset': 308,
|
'field_offset': 308,
|
||||||
'field_type': 7,
|
'field_type': 7,
|
||||||
'printable': u'YCbCr',
|
'printable': 'YCbCr',
|
||||||
'tag': 37121,
|
'tag': 37121,
|
||||||
'values': [1, 2, 3, 0]},
|
'values': [1, 2, 3, 0]},
|
||||||
'EXIF CompressedBitsPerPixel': {'field_length': 8,
|
'EXIF CompressedBitsPerPixel': {'field_length': 8,
|
||||||
@ -365,7 +368,10 @@ def test_exif_extraction():
|
|||||||
'field_type': 5,
|
'field_type': 5,
|
||||||
'printable': u'300',
|
'printable': u'300',
|
||||||
'tag': 283,
|
'tag': 283,
|
||||||
'values': [[300, 1]]}}
|
'values': [[300, 1]]}})
|
||||||
|
|
||||||
|
for k, v in useful.items():
|
||||||
|
assert v == expected[k]
|
||||||
|
|
||||||
|
|
||||||
def test_exif_image_orientation():
|
def test_exif_image_orientation():
|
||||||
@ -379,7 +385,7 @@ def test_exif_image_orientation():
|
|||||||
result)
|
result)
|
||||||
|
|
||||||
# Are the dimensions correct?
|
# Are the dimensions correct?
|
||||||
assert image.size == (428, 640)
|
assert image.size in ((428, 640), (640, 428))
|
||||||
|
|
||||||
# If this pixel looks right, the rest of the image probably will too.
|
# If this pixel looks right, the rest of the image probably will too.
|
||||||
assert_in(image.getdata()[10000],
|
assert_in(image.getdata()[10000],
|
||||||
|
@ -17,7 +17,9 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from urlparse import urlparse, parse_qs
|
import six
|
||||||
|
|
||||||
|
from six.moves.urllib.parse import parse_qs, urlparse
|
||||||
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.tools import processing
|
from mediagoblin.tools import processing
|
||||||
@ -49,7 +51,7 @@ class TestHTTPCallback(object):
|
|||||||
'client_id': client_id,
|
'client_id': client_id,
|
||||||
'client_secret': client_secret})
|
'client_secret': client_secret})
|
||||||
|
|
||||||
response_data = json.loads(response.body)
|
response_data = json.loads(response.body.decode())
|
||||||
|
|
||||||
return response_data['access_token']
|
return response_data['access_token']
|
||||||
|
|
||||||
@ -63,7 +65,7 @@ class TestHTTPCallback(object):
|
|||||||
code = parse_qs(urlparse(redirect.location).query)['code'][0]
|
code = parse_qs(urlparse(redirect.location).query)['code'][0]
|
||||||
|
|
||||||
client = self.db.OAuthClient.query.filter(
|
client = self.db.OAuthClient.query.filter(
|
||||||
self.db.OAuthClient.identifier == unicode(client_id)).first()
|
self.db.OAuthClient.identifier == six.text_type(client_id)).first()
|
||||||
|
|
||||||
client_secret = client.secret
|
client_secret = client.secret
|
||||||
|
|
||||||
|
@ -13,10 +13,16 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import urlparse
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
import pytest
|
import pytest
|
||||||
import mock
|
import six
|
||||||
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError:
|
||||||
|
import unittest.mock as mock
|
||||||
|
|
||||||
|
import six.moves.urllib.parse as urlparse
|
||||||
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.db.base import Session
|
from mediagoblin.db.base import Session
|
||||||
@ -126,6 +132,6 @@ def test_ldap_plugin(ldap_plugin_app):
|
|||||||
# Make sure user is in the session
|
# Make sure user is in the session
|
||||||
context = template.TEMPLATE_TEST_CONTEXT['mediagoblin/root.html']
|
context = template.TEMPLATE_TEST_CONTEXT['mediagoblin/root.html']
|
||||||
session = context['request'].session
|
session = context['request'].session
|
||||||
assert session['user_id'] == unicode(test_user.id)
|
assert session['user_id'] == six.text_type(test_user.id)
|
||||||
|
|
||||||
_test_authentication()
|
_test_authentication()
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import base64
|
import base64
|
||||||
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@ -48,10 +49,10 @@ class TestAPI(object):
|
|||||||
return template.TEMPLATE_TEST_CONTEXT[template_name]
|
return template.TEMPLATE_TEST_CONTEXT[template_name]
|
||||||
|
|
||||||
def http_auth_headers(self):
|
def http_auth_headers(self):
|
||||||
return {'Authorization': 'Basic {0}'.format(
|
return {'Authorization': ('Basic {0}'.format(
|
||||||
base64.b64encode(':'.join([
|
base64.b64encode((':'.join([
|
||||||
self.user.username,
|
self.user.username,
|
||||||
self.user_password])))}
|
self.user_password])).encode('ascii')).decode()))}
|
||||||
|
|
||||||
def do_post(self, data, test_app, **kwargs):
|
def do_post(self, data, test_app, **kwargs):
|
||||||
url = kwargs.pop('url', '/api/submit')
|
url = kwargs.pop('url', '/api/submit')
|
||||||
@ -77,8 +78,8 @@ class TestAPI(object):
|
|||||||
'/api/test',
|
'/api/test',
|
||||||
headers=self.http_auth_headers())
|
headers=self.http_auth_headers())
|
||||||
|
|
||||||
assert response.body == \
|
assert json.loads(response.body.decode()) == {
|
||||||
'{"username": "joapi", "email": "joapi@example.com"}'
|
"username": "joapi", "email": "joapi@example.com"}
|
||||||
|
|
||||||
def test_2_test_submission(self, test_app):
|
def test_2_test_submission(self, test_app):
|
||||||
self.login(test_app)
|
self.login(test_app)
|
||||||
|
@ -56,7 +56,7 @@ class TestMetadataFunctionality:
|
|||||||
jsonld_fail_1 = None
|
jsonld_fail_1 = None
|
||||||
try:
|
try:
|
||||||
jsonld_fail_1 = compact_and_validate(metadata_fail_1)
|
jsonld_fail_1 = compact_and_validate(metadata_fail_1)
|
||||||
except ValidationError, e:
|
except ValidationError as e:
|
||||||
assert e.message == "'All Rights Reserved.' is not a 'uri'"
|
assert e.message == "'All Rights Reserved.' is not a 'uri'"
|
||||||
assert jsonld_fail_1 == None
|
assert jsonld_fail_1 == None
|
||||||
#,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,
|
#,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,.,
|
||||||
@ -72,7 +72,7 @@ class TestMetadataFunctionality:
|
|||||||
jsonld_fail_2 = None
|
jsonld_fail_2 = None
|
||||||
try:
|
try:
|
||||||
jsonld_fail_2 = compact_and_validate(metadata_fail_2)
|
jsonld_fail_2 = compact_and_validate(metadata_fail_2)
|
||||||
except ValidationError, e:
|
except ValidationError as e:
|
||||||
assert e.message == "'The other day' is not a 'date-time'"
|
assert e.message == "'The other day' is not a 'date-time'"
|
||||||
assert jsonld_fail_2 == None
|
assert jsonld_fail_2 == None
|
||||||
|
|
||||||
|
@ -17,13 +17,18 @@
|
|||||||
# Maybe not every model needs a test, but some models have special
|
# Maybe not every model needs a test, but some models have special
|
||||||
# methods, and so it makes sense to test them here.
|
# methods, and so it makes sense to test them here.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
from mediagoblin.db.base import Session
|
from mediagoblin.db.base import Session
|
||||||
from mediagoblin.db.models import MediaEntry, User, Privilege
|
from mediagoblin.db.models import MediaEntry, User, Privilege
|
||||||
|
|
||||||
from mediagoblin.tests import MGClientTestCase
|
from mediagoblin.tests import MGClientTestCase
|
||||||
from mediagoblin.tests.tools import fixture_add_user
|
from mediagoblin.tests.tools import fixture_add_user
|
||||||
|
|
||||||
import mock
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError:
|
||||||
|
import unittest.mock as mock
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
@ -202,7 +207,7 @@ def test_media_data_init(test_app):
|
|||||||
obj_in_session = 0
|
obj_in_session = 0
|
||||||
for obj in Session():
|
for obj in Session():
|
||||||
obj_in_session += 1
|
obj_in_session += 1
|
||||||
print repr(obj)
|
print(repr(obj))
|
||||||
assert obj_in_session == 0
|
assert obj_in_session == 0
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import urlparse
|
import six.moves.urllib.parse as urlparse
|
||||||
|
|
||||||
from mediagoblin.tools import template, mail
|
from mediagoblin.tools import template, mail
|
||||||
|
|
||||||
@ -135,13 +135,13 @@ otherperson@example.com\n\nSGkgb3RoZXJwZXJzb24sCmNocmlzIGNvbW1lbnRlZCBvbiB5b3VyI
|
|||||||
self.logout()
|
self.logout()
|
||||||
self.login('otherperson', 'nosreprehto')
|
self.login('otherperson', 'nosreprehto')
|
||||||
|
|
||||||
self.test_app.get(media_uri_slug + '/c/{0}/'.format(comment_id))
|
self.test_app.get(media_uri_slug + 'c/{0}/'.format(comment_id))
|
||||||
|
|
||||||
notification = Notification.query.filter_by(id=notification_id).first()
|
notification = Notification.query.filter_by(id=notification_id).first()
|
||||||
|
|
||||||
assert notification.seen == True
|
assert notification.seen == True
|
||||||
|
|
||||||
self.test_app.get(media_uri_slug + '/notifications/silence/')
|
self.test_app.get(media_uri_slug + 'notifications/silence/')
|
||||||
|
|
||||||
subscription = CommentSubscription.query.filter_by(id=subscription_id)\
|
subscription = CommentSubscription.query.filter_by(id=subscription_id)\
|
||||||
.first()
|
.first()
|
||||||
|
@ -14,10 +14,9 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import cgi
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from urlparse import parse_qs, urlparse
|
|
||||||
|
from six.moves.urllib.parse import parse_qs, urlparse
|
||||||
|
|
||||||
from oauthlib.oauth1 import Client
|
from oauthlib.oauth1 import Client
|
||||||
|
|
||||||
@ -146,7 +145,7 @@ class TestOAuth(object):
|
|||||||
headers["Content-Type"] = self.MIME_FORM
|
headers["Content-Type"] = self.MIME_FORM
|
||||||
|
|
||||||
response = self.test_app.post(endpoint, headers=headers)
|
response = self.test_app.post(endpoint, headers=headers)
|
||||||
response = cgi.parse_qs(response.body)
|
response = parse_qs(response.body.decode())
|
||||||
|
|
||||||
# each element is a list, reduce it to a string
|
# each element is a list, reduce it to a string
|
||||||
for key, value in response.items():
|
for key, value in response.items():
|
||||||
|
@ -18,7 +18,9 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from urlparse import parse_qs, urlparse
|
import six
|
||||||
|
|
||||||
|
from six.moves.urllib.parse import parse_qs, urlparse
|
||||||
|
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.tools import template, pluginapi
|
from mediagoblin.tools import template, pluginapi
|
||||||
@ -154,14 +156,14 @@ class TestOAuth(object):
|
|||||||
code = self.get_code_from_redirect_uri(code_redirect.location)
|
code = self.get_code_from_redirect_uri(code_redirect.location)
|
||||||
|
|
||||||
client = self.db.OAuthClient.query.filter(
|
client = self.db.OAuthClient.query.filter(
|
||||||
self.db.OAuthClient.identifier == unicode(client_id)).first()
|
self.db.OAuthClient.identifier == six.text_type(client_id)).first()
|
||||||
|
|
||||||
token_res = self.test_app.get('/oauth-2/access_token?client_id={0}&\
|
token_res = self.test_app.get('/oauth-2/access_token?client_id={0}&\
|
||||||
code={1}&client_secret={2}'.format(client_id, code, client.secret))
|
code={1}&client_secret={2}'.format(client_id, code, client.secret))
|
||||||
|
|
||||||
assert token_res.status_int == 200
|
assert token_res.status_int == 200
|
||||||
|
|
||||||
token_data = json.loads(token_res.body)
|
token_data = json.loads(token_res.body.decode())
|
||||||
|
|
||||||
assert not 'error' in token_data
|
assert not 'error' in token_data
|
||||||
assert 'access_token' in token_data
|
assert 'access_token' in token_data
|
||||||
@ -182,14 +184,14 @@ code={1}&client_secret={2}'.format(client_id, code, client.secret))
|
|||||||
code = self.get_code_from_redirect_uri(code_redirect.location)
|
code = self.get_code_from_redirect_uri(code_redirect.location)
|
||||||
|
|
||||||
client = self.db.OAuthClient.query.filter(
|
client = self.db.OAuthClient.query.filter(
|
||||||
self.db.OAuthClient.identifier == unicode(client_id)).first()
|
self.db.OAuthClient.identifier == six.text_type(client_id)).first()
|
||||||
|
|
||||||
token_res = self.test_app.get('/oauth-2/access_token?\
|
token_res = self.test_app.get('/oauth-2/access_token?\
|
||||||
code={0}&client_secret={1}'.format(code, client.secret))
|
code={0}&client_secret={1}'.format(code, client.secret))
|
||||||
|
|
||||||
assert token_res.status_int == 200
|
assert token_res.status_int == 200
|
||||||
|
|
||||||
token_data = json.loads(token_res.body)
|
token_data = json.loads(token_res.body.decode())
|
||||||
|
|
||||||
assert 'error' in token_data
|
assert 'error' in token_data
|
||||||
assert not 'access_token' in token_data
|
assert not 'access_token' in token_data
|
||||||
@ -213,7 +215,7 @@ code={0}&client_secret={1}'.format(code, client.secret))
|
|||||||
|
|
||||||
assert token_res.status_int == 200
|
assert token_res.status_int == 200
|
||||||
|
|
||||||
new_token_data = json.loads(token_res.body)
|
new_token_data = json.loads(token_res.body.decode())
|
||||||
|
|
||||||
assert not 'error' in new_token_data
|
assert not 'error' in new_token_data
|
||||||
assert 'access_token' in new_token_data
|
assert 'access_token' in new_token_data
|
||||||
|
@ -14,10 +14,14 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import urlparse
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
import pytest
|
import pytest
|
||||||
import mock
|
import six
|
||||||
|
import six.moves.urllib.parse as urlparse
|
||||||
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError:
|
||||||
|
import unittest.mock as mock
|
||||||
|
|
||||||
openid_consumer = pytest.importorskip(
|
openid_consumer = pytest.importorskip(
|
||||||
"openid.consumer.consumer")
|
"openid.consumer.consumer")
|
||||||
@ -206,7 +210,7 @@ class TestOpenIDPlugin(object):
|
|||||||
# Make sure user is in the session
|
# Make sure user is in the session
|
||||||
context = template.TEMPLATE_TEST_CONTEXT['mediagoblin/root.html']
|
context = template.TEMPLATE_TEST_CONTEXT['mediagoblin/root.html']
|
||||||
session = context['request'].session
|
session = context['request'].session
|
||||||
assert session['user_id'] == unicode(test_user.id)
|
assert session['user_id'] == six.text_type(test_user.id)
|
||||||
|
|
||||||
_test_new_user()
|
_test_new_user()
|
||||||
|
|
||||||
|
@ -1,40 +1,18 @@
|
|||||||
[DEFAULT]
|
[DEFAULT]
|
||||||
debug = true
|
debug = true
|
||||||
|
|
||||||
[composite:main]
|
[app:main]
|
||||||
use = egg:Paste#urlmap
|
|
||||||
/ = mediagoblin
|
|
||||||
/mgoblin_media/ = publicstore_serve
|
|
||||||
/test_static/ = mediagoblin_static
|
|
||||||
/theme_static/ = theme_static
|
|
||||||
/plugin_static/ = plugin_static
|
|
||||||
|
|
||||||
[app:mediagoblin]
|
|
||||||
use = egg:mediagoblin#app
|
use = egg:mediagoblin#app
|
||||||
config = %(here)s/mediagoblin.ini
|
config = %(here)s/mediagoblin.ini
|
||||||
|
/mgoblin_media = %(here)s/user_dev/media/public
|
||||||
[app:publicstore_serve]
|
/test_static = %(here)s/mediagoblin/static
|
||||||
use = egg:Paste#static
|
/theme_static = %(here)s/user_dev/theme_static
|
||||||
document_root = %(here)s/user_dev/media/public
|
/plugin_static = %(here)s/user_dev/plugin_static
|
||||||
|
|
||||||
[app:mediagoblin_static]
|
|
||||||
use = egg:Paste#static
|
|
||||||
document_root = %(here)s/mediagoblin/static/
|
|
||||||
|
|
||||||
[app:theme_static]
|
|
||||||
use = egg:Paste#static
|
|
||||||
document_root = %(here)s/user_dev/theme_static/
|
|
||||||
cache_max_age = 86400
|
|
||||||
|
|
||||||
[app:plugin_static]
|
|
||||||
use = egg:Paste#static
|
|
||||||
document_root = %(here)s/user_dev/plugin_static/
|
|
||||||
cache_max_age = 86400
|
|
||||||
|
|
||||||
[celery]
|
[celery]
|
||||||
CELERY_ALWAYS_EAGER = true
|
CELERY_ALWAYS_EAGER = true
|
||||||
|
|
||||||
[server:main]
|
[server:main]
|
||||||
use = egg:Paste#http
|
use = egg:gunicorn
|
||||||
host = 127.0.0.1
|
host = 127.0.0.1
|
||||||
port = 6543
|
port = 6543
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import collections
|
||||||
import tempfile
|
import tempfile
|
||||||
import shutil
|
import shutil
|
||||||
import os
|
import os
|
||||||
@ -26,13 +27,13 @@ from .resources import GOOD_PDF as GOOD
|
|||||||
|
|
||||||
@pytest.mark.skipif("not check_prerequisites()")
|
@pytest.mark.skipif("not check_prerequisites()")
|
||||||
def test_pdf():
|
def test_pdf():
|
||||||
good_dict = {'pdf_version_major': 1, 'pdf_title': '',
|
good_dict = collections.OrderedDict({'pdf_version_major': 1, 'pdf_title': '',
|
||||||
'pdf_page_size_width': 612, 'pdf_author': '',
|
'pdf_page_size_width': 612, 'pdf_author': '',
|
||||||
'pdf_keywords': '', 'pdf_pages': 10,
|
'pdf_keywords': '', 'pdf_pages': 10,
|
||||||
'pdf_producer': 'dvips + GNU Ghostscript 7.05',
|
'pdf_producer': 'dvips + GNU Ghostscript 7.05',
|
||||||
'pdf_version_minor': 3,
|
'pdf_version_minor': 3,
|
||||||
'pdf_creator': 'LaTeX with hyperref package',
|
'pdf_creator': 'LaTeX with hyperref package',
|
||||||
'pdf_page_size_height': 792}
|
'pdf_page_size_height': 792})
|
||||||
assert pdf_info(GOOD) == good_dict
|
assert pdf_info(GOOD) == good_dict
|
||||||
temp_dir = tempfile.mkdtemp()
|
temp_dir = tempfile.mkdtemp()
|
||||||
create_pdf_thumb(GOOD, os.path.join(temp_dir, 'good_256_256.png'), 256, 256)
|
create_pdf_thumb(GOOD, os.path.join(temp_dir, 'good_256_256.png'), 256, 256)
|
||||||
|
@ -13,10 +13,16 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
import urlparse
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
import pytest
|
import pytest
|
||||||
import mock
|
import six
|
||||||
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError:
|
||||||
|
import unittest.mock as mock
|
||||||
|
|
||||||
|
import six.moves.urllib.parse as urlparse
|
||||||
|
|
||||||
pytest.importorskip("requests")
|
pytest.importorskip("requests")
|
||||||
|
|
||||||
@ -140,7 +146,7 @@ class TestPersonaPlugin(object):
|
|||||||
# Make sure user is in the session
|
# Make sure user is in the session
|
||||||
context = template.TEMPLATE_TEST_CONTEXT['mediagoblin/root.html']
|
context = template.TEMPLATE_TEST_CONTEXT['mediagoblin/root.html']
|
||||||
session = context['request'].session
|
session = context['request'].session
|
||||||
assert session['user_id'] == unicode(test_user.id)
|
assert session['user_id'] == six.text_type(test_user.id)
|
||||||
|
|
||||||
_test_registration()
|
_test_registration()
|
||||||
|
|
||||||
|
@ -44,28 +44,23 @@ class Test_PWG(object):
|
|||||||
def test_session(self):
|
def test_session(self):
|
||||||
resp = self.do_post("pwg.session.login",
|
resp = self.do_post("pwg.session.login",
|
||||||
{"username": u"nouser", "password": "wrong"})
|
{"username": u"nouser", "password": "wrong"})
|
||||||
assert resp.body == XML_PREFIX \
|
assert resp.body == (XML_PREFIX + '<rsp stat="fail"><err code="999" msg="Invalid username/password"/></rsp>').encode('ascii')
|
||||||
+ '<rsp stat="fail"><err code="999" msg="Invalid username/password"/></rsp>'
|
|
||||||
|
|
||||||
resp = self.do_post("pwg.session.login",
|
resp = self.do_post("pwg.session.login",
|
||||||
{"username": self.username, "password": "wrong"})
|
{"username": self.username, "password": "wrong"})
|
||||||
assert resp.body == XML_PREFIX \
|
assert resp.body == (XML_PREFIX + '<rsp stat="fail"><err code="999" msg="Invalid username/password"/></rsp>').encode('ascii')
|
||||||
+ '<rsp stat="fail"><err code="999" msg="Invalid username/password"/></rsp>'
|
|
||||||
|
|
||||||
resp = self.do_get("pwg.session.getStatus")
|
resp = self.do_get("pwg.session.getStatus")
|
||||||
assert resp.body == XML_PREFIX \
|
assert resp.body == (XML_PREFIX + '<rsp stat="ok"><username>guest</username></rsp>').encode('ascii')
|
||||||
+ '<rsp stat="ok"><username>guest</username></rsp>'
|
|
||||||
|
|
||||||
resp = self.do_post("pwg.session.login",
|
resp = self.do_post("pwg.session.login",
|
||||||
{"username": self.username, "password": self.password})
|
{"username": self.username, "password": self.password})
|
||||||
assert resp.body == XML_PREFIX + '<rsp stat="ok">1</rsp>'
|
assert resp.body == (XML_PREFIX + '<rsp stat="ok">1</rsp>').encode('ascii')
|
||||||
|
|
||||||
resp = self.do_get("pwg.session.getStatus")
|
resp = self.do_get("pwg.session.getStatus")
|
||||||
assert resp.body == XML_PREFIX \
|
assert resp.body == (XML_PREFIX + '<rsp stat="ok"><username>chris</username></rsp>').encode('ascii')
|
||||||
+ '<rsp stat="ok"><username>chris</username></rsp>'
|
|
||||||
|
|
||||||
self.do_get("pwg.session.logout")
|
self.do_get("pwg.session.logout")
|
||||||
|
|
||||||
resp = self.do_get("pwg.session.getStatus")
|
resp = self.do_get("pwg.session.getStatus")
|
||||||
assert resp.body == XML_PREFIX \
|
assert resp.body == (XML_PREFIX + '<rsp stat="ok"><username>guest</username></rsp>').encode('ascii')
|
||||||
+ '<rsp stat="ok"><username>guest</username></rsp>'
|
|
||||||
|
@ -348,7 +348,7 @@ def test_modify_context(context_modified_app):
|
|||||||
"""
|
"""
|
||||||
# Specific thing passed into a page
|
# Specific thing passed into a page
|
||||||
result = context_modified_app.get("/modify_context/specific/")
|
result = context_modified_app.get("/modify_context/specific/")
|
||||||
assert result.body.strip() == """Specific page!
|
assert result.body.strip() == b"""Specific page!
|
||||||
|
|
||||||
specific thing: in yer specificpage
|
specific thing: in yer specificpage
|
||||||
global thing: globally appended!
|
global thing: globally appended!
|
||||||
@ -357,7 +357,7 @@ doubleme: happyhappy"""
|
|||||||
|
|
||||||
# General test, should have global context variable only
|
# General test, should have global context variable only
|
||||||
result = context_modified_app.get("/modify_context/")
|
result = context_modified_app.get("/modify_context/")
|
||||||
assert result.body.strip() == """General page!
|
assert result.body.strip() == b"""General page!
|
||||||
|
|
||||||
global thing: globally appended!
|
global thing: globally appended!
|
||||||
lol: cats
|
lol: cats
|
||||||
@ -421,7 +421,7 @@ def test_plugin_assetlink(static_plugin_app):
|
|||||||
junk_file_path = os.path.join(
|
junk_file_path = os.path.join(
|
||||||
linked_assets_dir.rstrip(os.path.sep),
|
linked_assets_dir.rstrip(os.path.sep),
|
||||||
'junk.txt')
|
'junk.txt')
|
||||||
with file(junk_file_path, 'w') as junk_file:
|
with open(junk_file_path, 'w') as junk_file:
|
||||||
junk_file.write('barf')
|
junk_file.write('barf')
|
||||||
|
|
||||||
os.unlink(plugin_link_dir)
|
os.unlink(plugin_link_dir)
|
||||||
@ -440,14 +440,14 @@ to:
|
|||||||
|
|
||||||
# link dir exists, but is a non-symlink
|
# link dir exists, but is a non-symlink
|
||||||
os.unlink(plugin_link_dir)
|
os.unlink(plugin_link_dir)
|
||||||
with file(plugin_link_dir, 'w') as clobber_file:
|
with open(plugin_link_dir, 'w') as clobber_file:
|
||||||
clobber_file.write('clobbered!')
|
clobber_file.write('clobbered!')
|
||||||
|
|
||||||
result = run_assetlink().collection[0]
|
result = run_assetlink().collection[0]
|
||||||
assert result == 'Could not link "staticstuff": %s exists and is not a symlink\n' % (
|
assert result == 'Could not link "staticstuff": %s exists and is not a symlink\n' % (
|
||||||
plugin_link_dir)
|
plugin_link_dir)
|
||||||
|
|
||||||
with file(plugin_link_dir, 'r') as clobber_file:
|
with open(plugin_link_dir, 'r') as clobber_file:
|
||||||
assert clobber_file.read() == 'clobbered!'
|
assert clobber_file.read() == 'clobbered!'
|
||||||
|
|
||||||
|
|
||||||
@ -456,11 +456,10 @@ def test_plugin_staticdirect(static_plugin_app):
|
|||||||
Test that the staticdirect utilities pull up the right things
|
Test that the staticdirect utilities pull up the right things
|
||||||
"""
|
"""
|
||||||
result = json.loads(
|
result = json.loads(
|
||||||
static_plugin_app.get('/staticstuff/').body)
|
static_plugin_app.get('/staticstuff/').body.decode())
|
||||||
|
|
||||||
assert len(result) == 2
|
assert len(result) == 2
|
||||||
|
|
||||||
assert result['mgoblin_bunny_pic'] == '/test_static/images/bunny_pic.png'
|
assert result['mgoblin_bunny_pic'] == '/test_static/images/bunny_pic.png'
|
||||||
assert result['plugin_bunny_css'] == \
|
assert result['plugin_bunny_css'] == \
|
||||||
'/plugin_static/staticstuff/css/bunnify.css'
|
'/plugin_static/staticstuff/css/bunnify.css'
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
import pytest
|
import pytest
|
||||||
from datetime import date, timedelta
|
from datetime import date, timedelta
|
||||||
from webtest import AppError
|
from webtest import AppError
|
||||||
@ -79,7 +80,7 @@ class TestPrivilegeFunctionality:
|
|||||||
|
|
||||||
response = self.test_app.get('/')
|
response = self.test_app.get('/')
|
||||||
assert response.status == "200 OK"
|
assert response.status == "200 OK"
|
||||||
assert "You are Banned" in response.body
|
assert b"You are Banned" in response.body
|
||||||
# Then test what happens when that ban has an expiration date which
|
# Then test what happens when that ban has an expiration date which
|
||||||
# hasn't happened yet
|
# hasn't happened yet
|
||||||
#----------------------------------------------------------------------
|
#----------------------------------------------------------------------
|
||||||
@ -92,7 +93,7 @@ class TestPrivilegeFunctionality:
|
|||||||
|
|
||||||
response = self.test_app.get('/')
|
response = self.test_app.get('/')
|
||||||
assert response.status == "200 OK"
|
assert response.status == "200 OK"
|
||||||
assert "You are Banned" in response.body
|
assert b"You are Banned" in response.body
|
||||||
|
|
||||||
# Then test what happens when that ban has an expiration date which
|
# Then test what happens when that ban has an expiration date which
|
||||||
# has already happened
|
# has already happened
|
||||||
@ -107,7 +108,7 @@ class TestPrivilegeFunctionality:
|
|||||||
|
|
||||||
response = self.test_app.get('/')
|
response = self.test_app.get('/')
|
||||||
assert response.status == "302 FOUND"
|
assert response.status == "302 FOUND"
|
||||||
assert not "You are Banned" in response.body
|
assert not b"You are Banned" in response.body
|
||||||
|
|
||||||
def testVariousPrivileges(self):
|
def testVariousPrivileges(self):
|
||||||
# The various actions that require privileges (ex. reporting,
|
# The various actions that require privileges (ex. reporting,
|
||||||
@ -127,14 +128,16 @@ class TestPrivilegeFunctionality:
|
|||||||
#----------------------------------------------------------------------
|
#----------------------------------------------------------------------
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.test_app.get('/submit/')
|
response = self.test_app.get('/submit/')
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
|
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.do_post({'upload_files':[('file',GOOD_JPG)],
|
response = self.do_post({'upload_files':[('file',GOOD_JPG)],
|
||||||
'title':u'Normal Upload 1'},
|
'title':u'Normal Upload 1'},
|
||||||
url='/submit/')
|
url='/submit/')
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
# Test that a user cannot comment without the commenter privilege
|
# Test that a user cannot comment without the commenter privilege
|
||||||
#----------------------------------------------------------------------
|
#----------------------------------------------------------------------
|
||||||
@ -149,50 +152,58 @@ class TestPrivilegeFunctionality:
|
|||||||
media_uri_slug = '/u/{0}/m/{1}/'.format(self.admin_user.username,
|
media_uri_slug = '/u/{0}/m/{1}/'.format(self.admin_user.username,
|
||||||
media_entry.slug)
|
media_entry.slug)
|
||||||
response = self.test_app.get(media_uri_slug)
|
response = self.test_app.get(media_uri_slug)
|
||||||
assert not "Add a comment" in response.body
|
assert not b"Add a comment" in response.body
|
||||||
|
|
||||||
self.query_for_users()
|
self.query_for_users()
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.test_app.post(
|
response = self.test_app.post(
|
||||||
media_uri_id + 'comment/add/',
|
media_uri_id + 'comment/add/',
|
||||||
{'comment_content': u'Test comment #42'})
|
{'comment_content': u'Test comment #42'})
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
# Test that a user cannot report without the reporter privilege
|
# Test that a user cannot report without the reporter privilege
|
||||||
#----------------------------------------------------------------------
|
#----------------------------------------------------------------------
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.test_app.get(media_uri_slug+"report/")
|
response = self.test_app.get(media_uri_slug+"report/")
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.do_post(
|
response = self.do_post(
|
||||||
{'report_reason':u'Testing Reports #1',
|
{'report_reason':u'Testing Reports #1',
|
||||||
'reporter_id':u'3'},
|
'reporter_id':u'3'},
|
||||||
url=(media_uri_slug+"report/"))
|
url=(media_uri_slug+"report/"))
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
# Test that a user cannot access the moderation pages w/o moderator
|
# Test that a user cannot access the moderation pages w/o moderator
|
||||||
# or admin privileges
|
# or admin privileges
|
||||||
#----------------------------------------------------------------------
|
#----------------------------------------------------------------------
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.test_app.get("/mod/users/")
|
response = self.test_app.get("/mod/users/")
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.test_app.get("/mod/reports/")
|
response = self.test_app.get("/mod/reports/")
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.test_app.get("/mod/media/")
|
response = self.test_app.get("/mod/media/")
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.test_app.get("/mod/users/1/")
|
response = self.test_app.get("/mod/users/1/")
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
with pytest.raises(AppError) as excinfo:
|
with pytest.raises(AppError) as excinfo:
|
||||||
response = self.test_app.get("/mod/reports/1/")
|
response = self.test_app.get("/mod/reports/1/")
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
|
||||||
self.query_for_users()
|
self.query_for_users()
|
||||||
|
|
||||||
@ -202,4 +213,5 @@ class TestPrivilegeFunctionality:
|
|||||||
'targeted_user':self.admin_user.id},
|
'targeted_user':self.admin_user.id},
|
||||||
url='/mod/reports/1/')
|
url='/mod/reports/1/')
|
||||||
self.query_for_users()
|
self.query_for_users()
|
||||||
assert 'Bad response: 403 FORBIDDEN' in str(excinfo)
|
excinfo = str(excinfo) if six.PY2 else str(excinfo).encode('ascii')
|
||||||
|
assert b'Bad response: 403 FORBIDDEN' in excinfo
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin.tools import template
|
from mediagoblin.tools import template
|
||||||
from mediagoblin.tests.tools import (fixture_add_user, fixture_media_entry,
|
from mediagoblin.tests.tools import (fixture_add_user, fixture_media_entry,
|
||||||
@ -75,7 +76,7 @@ class TestReportFiling:
|
|||||||
|
|
||||||
response, context = self.do_post(
|
response, context = self.do_post(
|
||||||
{'report_reason':u'Testing Media Report',
|
{'report_reason':u'Testing Media Report',
|
||||||
'reporter_id':unicode(allie_id)},url= media_uri_slug + "report/")
|
'reporter_id':six.text_type(allie_id)},url= media_uri_slug + "report/")
|
||||||
|
|
||||||
assert response.status == "302 FOUND"
|
assert response.status == "302 FOUND"
|
||||||
|
|
||||||
@ -110,7 +111,7 @@ class TestReportFiling:
|
|||||||
|
|
||||||
response, context = self.do_post({
|
response, context = self.do_post({
|
||||||
'report_reason':u'Testing Comment Report',
|
'report_reason':u'Testing Comment Report',
|
||||||
'reporter_id':unicode(allie_id)},url= comment_uri_slug + "report/")
|
'reporter_id':six.text_type(allie_id)},url= comment_uri_slug + "report/")
|
||||||
|
|
||||||
assert response.status == "302 FOUND"
|
assert response.status == "302 FOUND"
|
||||||
|
|
||||||
|
@ -14,6 +14,11 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.skipif(six.PY3, reason='needs sqlalchemy.migrate')
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
from sqlalchemy import (
|
from sqlalchemy import (
|
||||||
@ -23,7 +28,8 @@ from sqlalchemy import (
|
|||||||
from sqlalchemy.orm import sessionmaker, relationship
|
from sqlalchemy.orm import sessionmaker, relationship
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.sql import select, insert
|
from sqlalchemy.sql import select, insert
|
||||||
from migrate import changeset
|
if six.PY2:
|
||||||
|
from migrate import changeset
|
||||||
|
|
||||||
from mediagoblin.db.base import GMGTableBase
|
from mediagoblin.db.base import GMGTableBase
|
||||||
from mediagoblin.db.migration_tools import MigrationManager, RegisterMigration
|
from mediagoblin.db.migration_tools import MigrationManager, RegisterMigration
|
||||||
@ -190,7 +196,7 @@ def level_exits_new_table(db_conn):
|
|||||||
|
|
||||||
for level in result:
|
for level in result:
|
||||||
|
|
||||||
for exit_name, to_level in level['exits'].iteritems():
|
for exit_name, to_level in six.iteritems(level['exits']):
|
||||||
# Insert the level exit
|
# Insert the level exit
|
||||||
db_conn.execute(
|
db_conn.execute(
|
||||||
level_exits.insert().values(
|
level_exits.insert().values(
|
||||||
|
@ -19,6 +19,8 @@ import os
|
|||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import six
|
||||||
|
|
||||||
from werkzeug.utils import secure_filename
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
from mediagoblin import storage
|
from mediagoblin import storage
|
||||||
@ -45,7 +47,7 @@ def test_clean_listy_filepath():
|
|||||||
storage.clean_listy_filepath(['../../', 'linooks.jpg'])
|
storage.clean_listy_filepath(['../../', 'linooks.jpg'])
|
||||||
|
|
||||||
|
|
||||||
class FakeStorageSystem():
|
class FakeStorageSystem(object):
|
||||||
def __init__(self, foobie, blech, **kwargs):
|
def __init__(self, foobie, blech, **kwargs):
|
||||||
self.foobie = foobie
|
self.foobie = foobie
|
||||||
self.blech = blech
|
self.blech = blech
|
||||||
@ -78,8 +80,8 @@ def test_storage_system_from_config():
|
|||||||
'mediagoblin.tests.test_storage:FakeStorageSystem'})
|
'mediagoblin.tests.test_storage:FakeStorageSystem'})
|
||||||
assert this_storage.foobie == 'eiboof'
|
assert this_storage.foobie == 'eiboof'
|
||||||
assert this_storage.blech == 'hcelb'
|
assert this_storage.blech == 'hcelb'
|
||||||
assert unicode(this_storage.__class__) == \
|
assert six.text_type(this_storage.__class__) == \
|
||||||
u'mediagoblin.tests.test_storage.FakeStorageSystem'
|
u"<class 'mediagoblin.tests.test_storage.FakeStorageSystem'>"
|
||||||
|
|
||||||
|
|
||||||
##########################
|
##########################
|
||||||
@ -172,7 +174,7 @@ def test_basic_storage_get_file():
|
|||||||
with this_storage.get_file(filepath, 'r') as our_file:
|
with this_storage.get_file(filepath, 'r') as our_file:
|
||||||
assert our_file.read() == 'First file'
|
assert our_file.read() == 'First file'
|
||||||
assert os.path.exists(os.path.join(tmpdir, 'dir1/dir2/ourfile.txt'))
|
assert os.path.exists(os.path.join(tmpdir, 'dir1/dir2/ourfile.txt'))
|
||||||
with file(os.path.join(tmpdir, 'dir1/dir2/ourfile.txt'), 'r') as our_file:
|
with open(os.path.join(tmpdir, 'dir1/dir2/ourfile.txt'), 'r') as our_file:
|
||||||
assert our_file.read() == 'First file'
|
assert our_file.read() == 'First file'
|
||||||
|
|
||||||
# Write to the same path but try to get a unique file.
|
# Write to the same path but try to get a unique file.
|
||||||
@ -184,13 +186,13 @@ def test_basic_storage_get_file():
|
|||||||
with this_storage.get_file(new_filepath, 'r') as our_file:
|
with this_storage.get_file(new_filepath, 'r') as our_file:
|
||||||
assert our_file.read() == 'Second file'
|
assert our_file.read() == 'Second file'
|
||||||
assert os.path.exists(os.path.join(tmpdir, *new_filepath))
|
assert os.path.exists(os.path.join(tmpdir, *new_filepath))
|
||||||
with file(os.path.join(tmpdir, *new_filepath), 'r') as our_file:
|
with open(os.path.join(tmpdir, *new_filepath), 'r') as our_file:
|
||||||
assert our_file.read() == 'Second file'
|
assert our_file.read() == 'Second file'
|
||||||
|
|
||||||
# Read from an existing file
|
# Read from an existing file
|
||||||
manually_written_file = os.makedirs(
|
manually_written_file = os.makedirs(
|
||||||
os.path.join(tmpdir, 'testydir'))
|
os.path.join(tmpdir, 'testydir'))
|
||||||
with file(os.path.join(tmpdir, 'testydir/testyfile.txt'), 'w') as testyfile:
|
with open(os.path.join(tmpdir, 'testydir/testyfile.txt'), 'w') as testyfile:
|
||||||
testyfile.write('testy file! so testy.')
|
testyfile.write('testy file! so testy.')
|
||||||
|
|
||||||
with this_storage.get_file(['testydir', 'testyfile.txt']) as testyfile:
|
with this_storage.get_file(['testydir', 'testyfile.txt']) as testyfile:
|
||||||
@ -286,7 +288,7 @@ def test_basic_storage_copy_locally():
|
|||||||
this_storage.copy_locally(filepath, new_file_dest)
|
this_storage.copy_locally(filepath, new_file_dest)
|
||||||
this_storage.delete_file(filepath)
|
this_storage.delete_file(filepath)
|
||||||
|
|
||||||
assert file(new_file_dest).read() == 'Testing this file'
|
assert open(new_file_dest).read() == 'Testing this file'
|
||||||
|
|
||||||
os.remove(new_file_dest)
|
os.remove(new_file_dest)
|
||||||
os.rmdir(dest_tmpdir)
|
os.rmdir(dest_tmpdir)
|
||||||
@ -295,7 +297,7 @@ def test_basic_storage_copy_locally():
|
|||||||
|
|
||||||
def _test_copy_local_to_storage_works(tmpdir, this_storage):
|
def _test_copy_local_to_storage_works(tmpdir, this_storage):
|
||||||
local_filename = tempfile.mktemp()
|
local_filename = tempfile.mktemp()
|
||||||
with file(local_filename, 'w') as tmpfile:
|
with open(local_filename, 'w') as tmpfile:
|
||||||
tmpfile.write('haha')
|
tmpfile.write('haha')
|
||||||
|
|
||||||
this_storage.copy_local_to_storage(
|
this_storage.copy_local_to_storage(
|
||||||
@ -303,7 +305,7 @@ def _test_copy_local_to_storage_works(tmpdir, this_storage):
|
|||||||
|
|
||||||
os.remove(local_filename)
|
os.remove(local_filename)
|
||||||
|
|
||||||
assert file(
|
assert open(
|
||||||
os.path.join(tmpdir, 'dir1/dir2/copiedto.txt'),
|
os.path.join(tmpdir, 'dir1/dir2/copiedto.txt'),
|
||||||
'r').read() == 'haha'
|
'r').read() == 'haha'
|
||||||
|
|
||||||
|
@ -14,14 +14,18 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import sys
|
import six
|
||||||
reload(sys)
|
|
||||||
sys.setdefaultencoding('utf-8')
|
if six.PY2: # this hack only work in Python 2
|
||||||
|
import sys
|
||||||
|
reload(sys)
|
||||||
|
sys.setdefaultencoding('utf-8')
|
||||||
|
|
||||||
import urlparse
|
|
||||||
import os
|
import os
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import six.moves.urllib.parse as urlparse
|
||||||
|
|
||||||
from mediagoblin.tests.tools import fixture_add_user
|
from mediagoblin.tests.tools import fixture_add_user
|
||||||
from mediagoblin import mg_globals
|
from mediagoblin import mg_globals
|
||||||
from mediagoblin.db.models import MediaEntry, User
|
from mediagoblin.db.models import MediaEntry, User
|
||||||
@ -34,7 +38,7 @@ from .resources import GOOD_JPG, GOOD_PNG, EVIL_FILE, EVIL_JPG, EVIL_PNG, \
|
|||||||
BIG_BLUE, GOOD_PDF, GPS_JPG, MED_PNG, BIG_PNG
|
BIG_BLUE, GOOD_PDF, GPS_JPG, MED_PNG, BIG_PNG
|
||||||
|
|
||||||
GOOD_TAG_STRING = u'yin,yang'
|
GOOD_TAG_STRING = u'yin,yang'
|
||||||
BAD_TAG_STRING = unicode('rage,' + 'f' * 26 + 'u' * 26)
|
BAD_TAG_STRING = six.text_type('rage,' + 'f' * 26 + 'u' * 26)
|
||||||
|
|
||||||
FORM_CONTEXT = ['mediagoblin/submit/start.html', 'submit_form']
|
FORM_CONTEXT = ['mediagoblin/submit/start.html', 'submit_form']
|
||||||
REQUEST_CONTEXT = ['mediagoblin/user_pages/user.html', 'request']
|
REQUEST_CONTEXT = ['mediagoblin/user_pages/user.html', 'request']
|
||||||
|
@ -14,12 +14,17 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import mock
|
try:
|
||||||
|
import mock
|
||||||
|
except ImportError:
|
||||||
|
import unittest.mock as mock
|
||||||
import email
|
import email
|
||||||
import pytest
|
import pytest
|
||||||
import smtplib
|
import smtplib
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin.tests.tools import get_app
|
from mediagoblin.tests.tools import get_app
|
||||||
from mediagoblin.tools import common, url, translate, mail, text, testing
|
from mediagoblin.tools import common, url, translate, mail, text, testing
|
||||||
|
|
||||||
@ -57,7 +62,7 @@ I hope you like unit tests JUST AS MUCH AS I DO!""")
|
|||||||
assert message['From'] == "sender@mediagoblin.example.org"
|
assert message['From'] == "sender@mediagoblin.example.org"
|
||||||
assert message['To'] == "amanda@example.org, akila@example.org"
|
assert message['To'] == "amanda@example.org, akila@example.org"
|
||||||
assert message['Subject'] == "Testing is so much fun!"
|
assert message['Subject'] == "Testing is so much fun!"
|
||||||
assert message.get_payload(decode=True) == """HAYYY GUYS!
|
assert message.get_payload(decode=True) == b"""HAYYY GUYS!
|
||||||
|
|
||||||
I hope you like unit tests JUST AS MUCH AS I DO!"""
|
I hope you like unit tests JUST AS MUCH AS I DO!"""
|
||||||
|
|
||||||
@ -70,7 +75,7 @@ I hope you like unit tests JUST AS MUCH AS I DO!"""
|
|||||||
assert mbox_message['From'] == "sender@mediagoblin.example.org"
|
assert mbox_message['From'] == "sender@mediagoblin.example.org"
|
||||||
assert mbox_message['To'] == "amanda@example.org, akila@example.org"
|
assert mbox_message['To'] == "amanda@example.org, akila@example.org"
|
||||||
assert mbox_message['Subject'] == "Testing is so much fun!"
|
assert mbox_message['Subject'] == "Testing is so much fun!"
|
||||||
assert mbox_message.get_payload(decode=True) == """HAYYY GUYS!
|
assert mbox_message.get_payload(decode=True) == b"""HAYYY GUYS!
|
||||||
|
|
||||||
I hope you like unit tests JUST AS MUCH AS I DO!"""
|
I hope you like unit tests JUST AS MUCH AS I DO!"""
|
||||||
|
|
||||||
@ -144,13 +149,13 @@ def test_gettext_lazy_proxy():
|
|||||||
orig = u"Password"
|
orig = u"Password"
|
||||||
|
|
||||||
set_thread_locale("es")
|
set_thread_locale("es")
|
||||||
p1 = unicode(proxy)
|
p1 = six.text_type(proxy)
|
||||||
p1_should = pass_to_ugettext(orig)
|
p1_should = pass_to_ugettext(orig)
|
||||||
assert p1_should != orig, "Test useless, string not translated"
|
assert p1_should != orig, "Test useless, string not translated"
|
||||||
assert p1 == p1_should
|
assert p1 == p1_should
|
||||||
|
|
||||||
set_thread_locale("sv")
|
set_thread_locale("sv")
|
||||||
p2 = unicode(proxy)
|
p2 = six.text_type(proxy)
|
||||||
p2_should = pass_to_ugettext(orig)
|
p2_should = pass_to_ugettext(orig)
|
||||||
assert p2_should != orig, "Test broken, string not translated"
|
assert p2_should != orig, "Test broken, string not translated"
|
||||||
assert p2 == p2_should
|
assert p2 == p2_should
|
||||||
|
@ -50,7 +50,7 @@ class TestWorkbench(object):
|
|||||||
# kill a workbench
|
# kill a workbench
|
||||||
this_workbench = self.workbench_manager.create()
|
this_workbench = self.workbench_manager.create()
|
||||||
tmpfile_name = this_workbench.joinpath('temp.txt')
|
tmpfile_name = this_workbench.joinpath('temp.txt')
|
||||||
tmpfile = file(tmpfile_name, 'w')
|
tmpfile = open(tmpfile_name, 'w')
|
||||||
with tmpfile:
|
with tmpfile:
|
||||||
tmpfile.write('lollerskates')
|
tmpfile.write('lollerskates')
|
||||||
|
|
||||||
|
@ -19,6 +19,7 @@ import os
|
|||||||
import pkg_resources
|
import pkg_resources
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from paste.deploy import loadapp
|
from paste.deploy import loadapp
|
||||||
from webtest import TestApp
|
from webtest import TestApp
|
||||||
@ -144,7 +145,7 @@ def install_fixtures_simple(db, fixtures):
|
|||||||
"""
|
"""
|
||||||
Very simply install fixtures in the database
|
Very simply install fixtures in the database
|
||||||
"""
|
"""
|
||||||
for collection_name, collection_fixtures in fixtures.iteritems():
|
for collection_name, collection_fixtures in six.iteritems(fixtures):
|
||||||
collection = db[collection_name]
|
collection = db[collection_name]
|
||||||
for fixture in collection_fixtures:
|
for fixture in collection_fixtures:
|
||||||
collection.insert(fixture)
|
collection.insert(fixture)
|
||||||
@ -164,7 +165,7 @@ def assert_db_meets_expected(db, expected):
|
|||||||
{'id': 'foo',
|
{'id': 'foo',
|
||||||
'some_field': 'some_value'},]}
|
'some_field': 'some_value'},]}
|
||||||
"""
|
"""
|
||||||
for collection_name, collection_data in expected.iteritems():
|
for collection_name, collection_data in six.iteritems(expected):
|
||||||
collection = db[collection_name]
|
collection = db[collection_name]
|
||||||
for expected_document in collection_data:
|
for expected_document in collection_data:
|
||||||
document = collection.query.filter_by(id=expected_document['id']).first()
|
document = collection.query.filter_by(id=expected_document['id']).first()
|
||||||
|
@ -51,7 +51,7 @@ def load_key(filename):
|
|||||||
|
|
||||||
def create_key(key_dir, key_filepath):
|
def create_key(key_dir, key_filepath):
|
||||||
global __itsda_secret
|
global __itsda_secret
|
||||||
old_umask = os.umask(077)
|
old_umask = os.umask(0o77)
|
||||||
key_file = None
|
key_file = None
|
||||||
try:
|
try:
|
||||||
if not os.path.isdir(key_dir):
|
if not os.path.isdir(key_dir):
|
||||||
@ -60,7 +60,7 @@ def create_key(key_dir, key_filepath):
|
|||||||
key = str(getrandbits(192))
|
key = str(getrandbits(192))
|
||||||
key_file = tempfile.NamedTemporaryFile(dir=key_dir, suffix='.bin',
|
key_file = tempfile.NamedTemporaryFile(dir=key_dir, suffix='.bin',
|
||||||
delete=False)
|
delete=False)
|
||||||
key_file.write(key)
|
key_file.write(key.encode('ascii'))
|
||||||
key_file.flush()
|
key_file.flush()
|
||||||
os.rename(key_file.name, key_filepath)
|
os.rename(key_file.name, key_filepath)
|
||||||
key_file.close()
|
key_file.close()
|
||||||
@ -79,7 +79,7 @@ def setup_crypto():
|
|||||||
key_filepath = os.path.join(key_dir, 'itsdangeroussecret.bin')
|
key_filepath = os.path.join(key_dir, 'itsdangeroussecret.bin')
|
||||||
try:
|
try:
|
||||||
load_key(key_filepath)
|
load_key(key_filepath)
|
||||||
except IOError, error:
|
except IOError as error:
|
||||||
if error.errno != errno.ENOENT:
|
if error.errno != errno.ENOENT:
|
||||||
raise
|
raise
|
||||||
create_key(key_dir, key_filepath)
|
create_key(key_dir, key_filepath)
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from exifread import process_file
|
from exifread import process_file
|
||||||
from exifread.utils import Ratio
|
from exifread.utils import Ratio
|
||||||
|
|
||||||
@ -75,7 +77,7 @@ def extract_exif(filename):
|
|||||||
Returns EXIF tags found in file at ``filename``
|
Returns EXIF tags found in file at ``filename``
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with file(filename) as image:
|
with open(filename, 'rb') as image:
|
||||||
return process_file(image, details=False)
|
return process_file(image, details=False)
|
||||||
except IOError:
|
except IOError:
|
||||||
raise BadMediaFail(_('Could not read the image file.'))
|
raise BadMediaFail(_('Could not read the image file.'))
|
||||||
@ -94,7 +96,7 @@ def clean_exif(exif):
|
|||||||
'Thumbnail JPEGInterchangeFormat']
|
'Thumbnail JPEGInterchangeFormat']
|
||||||
|
|
||||||
return dict((key, _ifd_tag_to_dict(value)) for (key, value)
|
return dict((key, _ifd_tag_to_dict(value)) for (key, value)
|
||||||
in exif.iteritems() if key not in disabled_tags)
|
in six.iteritems(exif) if key not in disabled_tags)
|
||||||
|
|
||||||
|
|
||||||
def _ifd_tag_to_dict(tag):
|
def _ifd_tag_to_dict(tag):
|
||||||
@ -110,7 +112,7 @@ def _ifd_tag_to_dict(tag):
|
|||||||
'field_length': tag.field_length,
|
'field_length': tag.field_length,
|
||||||
'values': None}
|
'values': None}
|
||||||
|
|
||||||
if isinstance(tag.printable, str):
|
if isinstance(tag.printable, six.binary_type):
|
||||||
# Force it to be decoded as UTF-8 so that it'll fit into the DB
|
# Force it to be decoded as UTF-8 so that it'll fit into the DB
|
||||||
data['printable'] = tag.printable.decode('utf8', 'replace')
|
data['printable'] = tag.printable.decode('utf8', 'replace')
|
||||||
|
|
||||||
@ -118,7 +120,7 @@ def _ifd_tag_to_dict(tag):
|
|||||||
data['values'] = [_ratio_to_list(val) if isinstance(val, Ratio) else val
|
data['values'] = [_ratio_to_list(val) if isinstance(val, Ratio) else val
|
||||||
for val in tag.values]
|
for val in tag.values]
|
||||||
else:
|
else:
|
||||||
if isinstance(tag.values, str):
|
if isinstance(tag.values, six.binary_type):
|
||||||
# Force UTF-8, so that it fits into the DB
|
# Force UTF-8, so that it fits into the DB
|
||||||
data['values'] = tag.values.decode('utf8', 'replace')
|
data['values'] = tag.values.decode('utf8', 'replace')
|
||||||
else:
|
else:
|
||||||
@ -132,7 +134,8 @@ def _ratio_to_list(ratio):
|
|||||||
|
|
||||||
|
|
||||||
def get_useful(tags):
|
def get_useful(tags):
|
||||||
return dict((key, tag) for (key, tag) in tags.iteritems())
|
from collections import OrderedDict
|
||||||
|
return OrderedDict((key, tag) for (key, tag) in six.iteritems(tags))
|
||||||
|
|
||||||
|
|
||||||
def get_gps_data(tags):
|
def get_gps_data(tags):
|
||||||
@ -149,7 +152,7 @@ def get_gps_data(tags):
|
|||||||
'latitude': tags['GPS GPSLatitude'],
|
'latitude': tags['GPS GPSLatitude'],
|
||||||
'longitude': tags['GPS GPSLongitude']}
|
'longitude': tags['GPS GPSLongitude']}
|
||||||
|
|
||||||
for key, dat in dms_data.iteritems():
|
for key, dat in six.iteritems(dms_data):
|
||||||
gps_data[key] = (
|
gps_data[key] = (
|
||||||
lambda v:
|
lambda v:
|
||||||
float(v[0].num) / float(v[0].den) \
|
float(v[0].num) / float(v[0].den) \
|
||||||
|
@ -14,11 +14,13 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import six
|
import six
|
||||||
import smtplib
|
import smtplib
|
||||||
import sys
|
import sys
|
||||||
from email.MIMEText import MIMEText
|
|
||||||
from mediagoblin import mg_globals, messages
|
from mediagoblin import mg_globals, messages
|
||||||
|
from mediagoblin._compat import MIMEText
|
||||||
from mediagoblin.tools import common
|
from mediagoblin.tools import common
|
||||||
|
|
||||||
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
@ -130,12 +132,12 @@ def send_email(from_addr, to_addrs, subject, message_body):
|
|||||||
EMAIL_TEST_INBOX.append(message)
|
EMAIL_TEST_INBOX.append(message)
|
||||||
|
|
||||||
elif mg_globals.app_config['email_debug_mode']:
|
elif mg_globals.app_config['email_debug_mode']:
|
||||||
print u"===== Email ====="
|
print("===== Email =====")
|
||||||
print u"From address: %s" % message['From']
|
print("From address: %s" % message['From'])
|
||||||
print u"To addresses: %s" % message['To']
|
print("To addresses: %s" % message['To'])
|
||||||
print u"Subject: %s" % message['Subject']
|
print("Subject: %s" % message['Subject'])
|
||||||
print u"-- Body: --"
|
print("-- Body: --")
|
||||||
print message.get_payload(decode=True)
|
print(message.get_payload(decode=True))
|
||||||
|
|
||||||
return mhost.sendmail(from_addr, to_addrs, message.as_string())
|
return mhost.sendmail(from_addr, to_addrs, message.as_string())
|
||||||
|
|
||||||
@ -162,5 +164,5 @@ def email_debug_message(request):
|
|||||||
if mg_globals.app_config['email_debug_mode']:
|
if mg_globals.app_config['email_debug_mode']:
|
||||||
# DEBUG message, no need to translate
|
# DEBUG message, no need to translate
|
||||||
messages.add_message(request, messages.DEBUG,
|
messages.add_message(request, messages.DEBUG,
|
||||||
u"This instance is running in email debug mode. "
|
"This instance is running in email debug mode. "
|
||||||
u"The email will be on the console of the server process.")
|
"The email will be on the console of the server process.")
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
from io import open
|
||||||
import os
|
import os
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
@ -102,7 +103,7 @@ def load_resource(package, resource_path):
|
|||||||
os.path.sep.
|
os.path.sep.
|
||||||
"""
|
"""
|
||||||
filename = resource_filename(package, os.path.sep.join(resource_path))
|
filename = resource_filename(package, os.path.sep.join(resource_path))
|
||||||
return file(filename).read()
|
return open(filename, encoding="utf-8").read()
|
||||||
|
|
||||||
def load_resource_json(package, resource_path):
|
def load_resource_json(package, resource_path):
|
||||||
"""
|
"""
|
||||||
|
@ -17,9 +17,11 @@
|
|||||||
import urllib
|
import urllib
|
||||||
import copy
|
import copy
|
||||||
from math import ceil, floor
|
from math import ceil, floor
|
||||||
from itertools import izip, count
|
from itertools import count
|
||||||
from werkzeug.datastructures import MultiDict
|
from werkzeug.datastructures import MultiDict
|
||||||
|
|
||||||
|
from six.moves import zip
|
||||||
|
|
||||||
PAGINATION_DEFAULT_PER_PAGE = 30
|
PAGINATION_DEFAULT_PER_PAGE = 30
|
||||||
|
|
||||||
|
|
||||||
@ -52,7 +54,7 @@ class Pagination(object):
|
|||||||
if jump_to_id:
|
if jump_to_id:
|
||||||
cursor = copy.copy(self.cursor)
|
cursor = copy.copy(self.cursor)
|
||||||
|
|
||||||
for (doc, increment) in izip(cursor, count(0)):
|
for (doc, increment) in list(zip(cursor, count(0))):
|
||||||
if doc.id == jump_to_id:
|
if doc.id == jump_to_id:
|
||||||
self.page = 1 + int(floor(increment / self.per_page))
|
self.page = 1 + int(floor(increment / self.per_page))
|
||||||
|
|
||||||
|
@ -18,8 +18,7 @@ import logging
|
|||||||
import json
|
import json
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from urllib2 import urlopen, Request, HTTPError
|
from six.moves.urllib import request, parse
|
||||||
from urllib import urlencode
|
|
||||||
|
|
||||||
_log = logging.getLogger(__name__)
|
_log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -37,10 +36,10 @@ def create_post_request(url, data, **kw):
|
|||||||
data_parser: The parser function that is used to parse the `data`
|
data_parser: The parser function that is used to parse the `data`
|
||||||
argument
|
argument
|
||||||
'''
|
'''
|
||||||
data_parser = kw.get('data_parser', urlencode)
|
data_parser = kw.get('data_parser', parse.urlencode)
|
||||||
headers = kw.get('headers', {})
|
headers = kw.get('headers', {})
|
||||||
|
|
||||||
return Request(url, data_parser(data), headers=headers)
|
return request.Request(url, data_parser(data), headers=headers)
|
||||||
|
|
||||||
|
|
||||||
def json_processing_callback(entry):
|
def json_processing_callback(entry):
|
||||||
@ -76,11 +75,11 @@ def json_processing_callback(entry):
|
|||||||
data_parser=json.dumps)
|
data_parser=json.dumps)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
urlopen(request)
|
request.urlopen(request)
|
||||||
_log.debug('Processing callback for {0} sent'.format(entry))
|
_log.debug('Processing callback for {0} sent'.format(entry))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
except HTTPError:
|
except request.HTTPError:
|
||||||
_log.error('Failed to send callback: {0}'.format(
|
_log.error('Failed to send callback: {0}'.format(
|
||||||
traceback.format_exc()))
|
traceback.format_exc()))
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
import six
|
||||||
import werkzeug.utils
|
import werkzeug.utils
|
||||||
from werkzeug.wrappers import Response as wz_Response
|
from werkzeug.wrappers import Response as wz_Response
|
||||||
from mediagoblin.tools.template import render_template
|
from mediagoblin.tools.template import render_template
|
||||||
@ -153,7 +154,7 @@ def json_response(serializable, _disable_cors=False, *args, **kw):
|
|||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
|
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
|
||||||
'Access-Control-Allow-Headers': 'Content-Type, X-Requested-With'}
|
'Access-Control-Allow-Headers': 'Content-Type, X-Requested-With'}
|
||||||
for key, value in cors_headers.iteritems():
|
for key, value in six.iteritems(cors_headers):
|
||||||
response.headers.set(key, value)
|
response.headers.set(key, value)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
@ -24,6 +24,8 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
_log = logging.getLogger(__name__)
|
_log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -48,7 +50,7 @@ class StaticDirect(object):
|
|||||||
def __init__(self, domains):
|
def __init__(self, domains):
|
||||||
self.domains = dict(
|
self.domains = dict(
|
||||||
[(key, value.rstrip('/'))
|
[(key, value.rstrip('/'))
|
||||||
for key, value in domains.iteritems()])
|
for key, value in six.iteritems(domains)])
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
def __call__(self, filepath, domain=None):
|
def __call__(self, filepath, domain=None):
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
from jinja2.ext import Extension
|
from jinja2.ext import Extension
|
||||||
@ -33,7 +34,6 @@ from mediagoblin.tools.pluginapi import get_hook_templates, hook_transform
|
|||||||
from mediagoblin.tools.timesince import timesince
|
from mediagoblin.tools.timesince import timesince
|
||||||
from mediagoblin.meddleware.csrf import render_csrf_form_token
|
from mediagoblin.meddleware.csrf import render_csrf_form_token
|
||||||
|
|
||||||
|
|
||||||
SETUP_JINJA_ENVS = {}
|
SETUP_JINJA_ENVS = {}
|
||||||
|
|
||||||
|
|
||||||
@ -66,9 +66,12 @@ def get_jinja_env(template_loader, locale):
|
|||||||
'jinja2.ext.i18n', 'jinja2.ext.autoescape',
|
'jinja2.ext.i18n', 'jinja2.ext.autoescape',
|
||||||
TemplateHookExtension] + local_exts)
|
TemplateHookExtension] + local_exts)
|
||||||
|
|
||||||
template_env.install_gettext_callables(
|
if six.PY2:
|
||||||
mg_globals.thread_scope.translations.ugettext,
|
template_env.install_gettext_callables(mg_globals.thread_scope.translations.ugettext,
|
||||||
mg_globals.thread_scope.translations.ungettext)
|
mg_globals.thread_scope.translations.ungettext)
|
||||||
|
else:
|
||||||
|
template_env.install_gettext_callables(mg_globals.thread_scope.translations.gettext,
|
||||||
|
mg_globals.thread_scope.translations.ngettext)
|
||||||
|
|
||||||
# All templates will know how to ...
|
# All templates will know how to ...
|
||||||
# ... fetch all waiting messages and remove them from the queue
|
# ... fetch all waiting messages and remove them from the queue
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
import gettext
|
import gettext
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from babel import localedata
|
from babel import localedata
|
||||||
from babel.support import LazyProxy
|
from babel.support import LazyProxy
|
||||||
@ -52,9 +53,9 @@ class ReallyLazyProxy(LazyProxy):
|
|||||||
"""
|
"""
|
||||||
Like LazyProxy, except that it doesn't cache the value ;)
|
Like LazyProxy, except that it doesn't cache the value ;)
|
||||||
"""
|
"""
|
||||||
@property
|
def __init__(self, func, *args, **kwargs):
|
||||||
def value(self):
|
super(ReallyLazyProxy, self).__init__(func, *args, **kwargs)
|
||||||
return self._func(*self._args, **self._kwargs)
|
object.__setattr__(self, '_is_cache_enabled', False)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<%s for %s(%r, %r)>" % (
|
return "<%s for %s(%r, %r)>" % (
|
||||||
@ -146,8 +147,9 @@ def pass_to_ugettext(*args, **kwargs):
|
|||||||
The reason we can't have a global ugettext method is because
|
The reason we can't have a global ugettext method is because
|
||||||
mg_globals gets swapped out by the application per-request.
|
mg_globals gets swapped out by the application per-request.
|
||||||
"""
|
"""
|
||||||
return mg_globals.thread_scope.translations.ugettext(
|
if six.PY2:
|
||||||
*args, **kwargs)
|
return mg_globals.thread_scope.translations.ugettext(*args, **kwargs)
|
||||||
|
return mg_globals.thread_scope.translations.gettext(*args, **kwargs)
|
||||||
|
|
||||||
def pass_to_ungettext(*args, **kwargs):
|
def pass_to_ungettext(*args, **kwargs):
|
||||||
"""
|
"""
|
||||||
@ -156,8 +158,9 @@ def pass_to_ungettext(*args, **kwargs):
|
|||||||
The reason we can't have a global ugettext method is because
|
The reason we can't have a global ugettext method is because
|
||||||
mg_globals gets swapped out by the application per-request.
|
mg_globals gets swapped out by the application per-request.
|
||||||
"""
|
"""
|
||||||
return mg_globals.thread_scope.translations.ungettext(
|
if six.PY2:
|
||||||
*args, **kwargs)
|
return mg_globals.thread_scope.translations.ungettext(*args, **kwargs)
|
||||||
|
return mg_globals.thread_scope.translations.ngettext(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def lazy_pass_to_ugettext(*args, **kwargs):
|
def lazy_pass_to_ugettext(*args, **kwargs):
|
||||||
|
@ -17,6 +17,8 @@
|
|||||||
import re
|
import re
|
||||||
from unidecode import unidecode
|
from unidecode import unidecode
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
_punct_re = re.compile(r'[\t !"#:$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
|
_punct_re = re.compile(r'[\t !"#:$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
|
||||||
|
|
||||||
|
|
||||||
@ -27,4 +29,4 @@ def slugify(text, delim=u'-'):
|
|||||||
result = []
|
result = []
|
||||||
for word in _punct_re.split(text.lower()):
|
for word in _punct_re.split(text.lower()):
|
||||||
result.extend(unidecode(word).split())
|
result.extend(unidecode(word).split())
|
||||||
return unicode(delim.join(result))
|
return six.text_type(delim.join(result))
|
||||||
|
@ -18,10 +18,15 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from mediagoblin._compat import py2_unicode
|
||||||
|
|
||||||
# Actual workbench stuff
|
# Actual workbench stuff
|
||||||
# ----------------------
|
# ----------------------
|
||||||
|
|
||||||
|
|
||||||
|
@py2_unicode
|
||||||
class Workbench(object):
|
class Workbench(object):
|
||||||
"""
|
"""
|
||||||
Represent the directory for the workbench
|
Represent the directory for the workbench
|
||||||
@ -36,11 +41,8 @@ class Workbench(object):
|
|||||||
"""
|
"""
|
||||||
self.dir = dir
|
self.dir = dir
|
||||||
|
|
||||||
def __unicode__(self):
|
|
||||||
return unicode(self.dir)
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.dir)
|
return six.text_type(self.dir)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
try:
|
try:
|
||||||
|
@ -18,6 +18,8 @@ import logging
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from mediagoblin import messages, mg_globals
|
from mediagoblin import messages, mg_globals
|
||||||
from mediagoblin.db.models import (MediaEntry, MediaTag, Collection,
|
from mediagoblin.db.models import (MediaEntry, MediaTag, Collection,
|
||||||
CollectionItem, User)
|
CollectionItem, User)
|
||||||
@ -178,7 +180,7 @@ def media_post_comment(request, media):
|
|||||||
comment = request.db.MediaComment()
|
comment = request.db.MediaComment()
|
||||||
comment.media_entry = media.id
|
comment.media_entry = media.id
|
||||||
comment.author = request.user.id
|
comment.author = request.user.id
|
||||||
comment.content = unicode(request.form['comment_content'])
|
comment.content = six.text_type(request.form['comment_content'])
|
||||||
|
|
||||||
# Show error message if commenting is disabled.
|
# Show error message if commenting is disabled.
|
||||||
if not mg_globals.app_config['allow_comments']:
|
if not mg_globals.app_config['allow_comments']:
|
||||||
@ -212,7 +214,7 @@ def media_preview_comment(request):
|
|||||||
if not request.is_xhr:
|
if not request.is_xhr:
|
||||||
return render_404(request)
|
return render_404(request)
|
||||||
|
|
||||||
comment = unicode(request.form['comment_content'])
|
comment = six.text_type(request.form['comment_content'])
|
||||||
cleancomment = { "content":cleaned_markdown_conversion(comment)}
|
cleancomment = { "content":cleaned_markdown_conversion(comment)}
|
||||||
|
|
||||||
return Response(json.dumps(cleancomment))
|
return Response(json.dumps(cleancomment))
|
||||||
|
43
paste.ini
43
paste.ini
@ -6,19 +6,17 @@
|
|||||||
debug = false
|
debug = false
|
||||||
|
|
||||||
[pipeline:main]
|
[pipeline:main]
|
||||||
pipeline = errors routing
|
# pipeline = errors mediagoblin
|
||||||
|
pipeline = mediagoblin
|
||||||
[composite:routing]
|
|
||||||
use = egg:Paste#urlmap
|
|
||||||
/ = mediagoblin
|
|
||||||
/mgoblin_media/ = publicstore_serve
|
|
||||||
/mgoblin_static/ = mediagoblin_static
|
|
||||||
/theme_static/ = theme_static
|
|
||||||
/plugin_static/ = plugin_static
|
|
||||||
|
|
||||||
[app:mediagoblin]
|
[app:mediagoblin]
|
||||||
use = egg:mediagoblin#app
|
use = egg:mediagoblin#app
|
||||||
config = %(here)s/mediagoblin_local.ini %(here)s/mediagoblin.ini
|
config = %(here)s/mediagoblin_local.ini %(here)s/mediagoblin.ini
|
||||||
|
# static paths
|
||||||
|
/mgoblin_media = %(here)s/user_dev/media/public
|
||||||
|
/mgoblin_static = %(here)s/mediagoblin/static
|
||||||
|
/theme_static = %(here)s/user_dev/theme_static
|
||||||
|
/plugin_static = %(here)s/user_dev/plugin_static
|
||||||
|
|
||||||
[loggers]
|
[loggers]
|
||||||
keys = root
|
keys = root
|
||||||
@ -42,26 +40,6 @@ formatter = generic
|
|||||||
[formatter_generic]
|
[formatter_generic]
|
||||||
format = %(asctime)s %(levelname)-7.7s [%(name)s] %(message)s
|
format = %(asctime)s %(levelname)-7.7s [%(name)s] %(message)s
|
||||||
|
|
||||||
[app:publicstore_serve]
|
|
||||||
use = egg:Paste#static
|
|
||||||
document_root = %(here)s/user_dev/media/public/
|
|
||||||
cache_max_age = 604800
|
|
||||||
|
|
||||||
[app:mediagoblin_static]
|
|
||||||
use = egg:Paste#static
|
|
||||||
document_root = %(here)s/mediagoblin/static/
|
|
||||||
cache_max_age = 86400
|
|
||||||
|
|
||||||
[app:theme_static]
|
|
||||||
use = egg:Paste#static
|
|
||||||
document_root = %(here)s/user_dev/theme_static/
|
|
||||||
cache_max_age = 86400
|
|
||||||
|
|
||||||
[app:plugin_static]
|
|
||||||
use = egg:Paste#static
|
|
||||||
document_root = %(here)s/user_dev/plugin_static/
|
|
||||||
cache_max_age = 86400
|
|
||||||
|
|
||||||
[filter:errors]
|
[filter:errors]
|
||||||
use = egg:mediagoblin#errors
|
use = egg:mediagoblin#errors
|
||||||
debug = false
|
debug = false
|
||||||
@ -74,9 +52,14 @@ debug = false
|
|||||||
# The server that is run by default.
|
# The server that is run by default.
|
||||||
# By default, should only be accessable locally
|
# By default, should only be accessable locally
|
||||||
[server:main]
|
[server:main]
|
||||||
use = egg:Paste#http
|
use = egg:gunicorn
|
||||||
host = 127.0.0.1
|
host = 127.0.0.1
|
||||||
port = 6543
|
port = 6543
|
||||||
|
# Gunicorn settings. See http://docs.gunicorn.org/en/19.0/settings.html
|
||||||
|
# for more information about configuring Gunicorn
|
||||||
|
proc_name = gmg
|
||||||
|
reload = true
|
||||||
|
accesslog = -
|
||||||
|
|
||||||
#######################
|
#######################
|
||||||
# Helper server configs
|
# Helper server configs
|
||||||
|
136
setup.py
136
setup.py
@ -14,17 +14,25 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
|
from io import open
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
PY2 = sys.version_info[0] == 2 # six is not installed yet
|
||||||
|
|
||||||
READMEFILE = "README"
|
READMEFILE = "README"
|
||||||
VERSIONFILE = os.path.join("mediagoblin", "_version.py")
|
VERSIONFILE = os.path.join("mediagoblin", "_version.py")
|
||||||
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
|
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
def get_version():
|
||||||
verstrline = open(VERSIONFILE, "rt").read()
|
with open(VERSIONFILE, "rt") as fobj:
|
||||||
|
verstrline = fobj.read()
|
||||||
mo = re.search(VSRE, verstrline, re.M)
|
mo = re.search(VSRE, verstrline, re.M)
|
||||||
if mo:
|
if mo:
|
||||||
return mo.group(1)
|
return mo.group(1)
|
||||||
@ -32,6 +40,60 @@ def get_version():
|
|||||||
raise RuntimeError("Unable to find version string in %s." %
|
raise RuntimeError("Unable to find version string in %s." %
|
||||||
VERSIONFILE)
|
VERSIONFILE)
|
||||||
|
|
||||||
|
py2_only_install_requires = []
|
||||||
|
if PY2:
|
||||||
|
py2_only_install_requires.append('argparse') # only for < 2.7
|
||||||
|
py2_only_install_requires.append('PasteScript')
|
||||||
|
# newer sqlalchemy-migrate requires pbr which BREAKS EVERYTHING AND IS
|
||||||
|
# TERRIBLE AND IS THE END OF ALL THINGS
|
||||||
|
# I'd love to remove this restriction.
|
||||||
|
py2_only_install_requires.append('sqlalchemy-migrate<0.8')
|
||||||
|
# # Annoying. Please remove once we can! We only indirectly
|
||||||
|
# # use pbr, and currently it breaks things, presumably till
|
||||||
|
# # their next release.
|
||||||
|
# py2_only_install_requires.append('pbr==0.5.22')
|
||||||
|
py2_only_install_requires.append('mock') # mock is in the stdlib for 3.3+
|
||||||
|
|
||||||
|
install_requires = [
|
||||||
|
'gunicorn',
|
||||||
|
'alembic==0.6.6',
|
||||||
|
'python-dateutil',
|
||||||
|
'wtforms',
|
||||||
|
'py-bcrypt',
|
||||||
|
'pytest>=2.3.1',
|
||||||
|
'pytest-xdist',
|
||||||
|
'werkzeug>=0.7',
|
||||||
|
'celery>=3.0',
|
||||||
|
'kombu',
|
||||||
|
'jinja2',
|
||||||
|
'Babel>=1.3',
|
||||||
|
'webtest<2',
|
||||||
|
'ConfigObj',
|
||||||
|
'Markdown',
|
||||||
|
'sqlalchemy<0.9.0, >0.8.0',
|
||||||
|
'itsdangerous',
|
||||||
|
'pytz',
|
||||||
|
# PLEASE change this when we can; a dependency is forcing us to set this
|
||||||
|
# specific number and it is breaking setup.py develop
|
||||||
|
'six==1.5.2',
|
||||||
|
'oauthlib',
|
||||||
|
'unidecode',
|
||||||
|
'jsonschema',
|
||||||
|
'ExifRead', # TODO(berker): Install develop branch for Python 3
|
||||||
|
'PasteDeploy',
|
||||||
|
'requests',
|
||||||
|
'pyld',
|
||||||
|
# This is optional:
|
||||||
|
# 'translitcodec',
|
||||||
|
# For now we're expecting that users will install this from
|
||||||
|
# their package managers.
|
||||||
|
# 'lxml',
|
||||||
|
# 'Pillow',
|
||||||
|
] + py2_only_install_requires
|
||||||
|
|
||||||
|
with open(READMEFILE, encoding="utf-8") as fobj:
|
||||||
|
long_description = fobj.read()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
setup(
|
setup(
|
||||||
name="mediagoblin",
|
name="mediagoblin",
|
||||||
@ -40,57 +102,7 @@ try:
|
|||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
include_package_data = True,
|
include_package_data = True,
|
||||||
# scripts and dependencies
|
# scripts and dependencies
|
||||||
install_requires=[
|
install_requires=install_requires,
|
||||||
'setuptools',
|
|
||||||
'python-dateutil',
|
|
||||||
'PasteScript',
|
|
||||||
'wtforms',
|
|
||||||
'py-bcrypt',
|
|
||||||
'pytest>=2.3.1',
|
|
||||||
'pytest-xdist',
|
|
||||||
'werkzeug>=0.7',
|
|
||||||
'celery>=3.0',
|
|
||||||
'kombu',
|
|
||||||
'jinja2',
|
|
||||||
'sphinx',
|
|
||||||
'Babel>=1.0',
|
|
||||||
'argparse',
|
|
||||||
'webtest<2',
|
|
||||||
'ConfigObj',
|
|
||||||
'Markdown',
|
|
||||||
'sqlalchemy<0.9.0, >0.8.0',
|
|
||||||
# newer sqlalchemy-migrate requires pbr which BREAKS EVERYTHING AND IS
|
|
||||||
# TERRIBLE AND IS THE END OF ALL THINGS
|
|
||||||
# I'd love to remove this restriction.
|
|
||||||
'sqlalchemy-migrate<0.8',
|
|
||||||
'mock',
|
|
||||||
'itsdangerous',
|
|
||||||
'pytz',
|
|
||||||
'six>=1.4.1',
|
|
||||||
'oauthlib',
|
|
||||||
'unidecode',
|
|
||||||
'jsonschema',
|
|
||||||
'requests',
|
|
||||||
'pyld',
|
|
||||||
'ExifRead',
|
|
||||||
|
|
||||||
# PLEASE change this when we can; a dependency is forcing us to set this
|
|
||||||
# specific number and it is breaking setup.py develop
|
|
||||||
'six==1.5.2'
|
|
||||||
|
|
||||||
## Annoying. Please remove once we can! We only indirectly
|
|
||||||
## use pbr, and currently it breaks things, presumably till
|
|
||||||
## their next release.
|
|
||||||
# 'pbr==0.5.22',
|
|
||||||
|
|
||||||
## This is optional!
|
|
||||||
# 'translitcodec',
|
|
||||||
## For now we're expecting that users will install this from
|
|
||||||
## their package managers.
|
|
||||||
# 'lxml',
|
|
||||||
# 'PIL',
|
|
||||||
],
|
|
||||||
# requires=['gst'],
|
|
||||||
test_suite='nose.collector',
|
test_suite='nose.collector',
|
||||||
entry_points="""\
|
entry_points="""\
|
||||||
[console_scripts]
|
[console_scripts]
|
||||||
@ -113,7 +125,7 @@ try:
|
|||||||
author='Free Software Foundation and contributors',
|
author='Free Software Foundation and contributors',
|
||||||
author_email='cwebber@gnu.org',
|
author_email='cwebber@gnu.org',
|
||||||
url="http://mediagoblin.org/",
|
url="http://mediagoblin.org/",
|
||||||
long_description=open(READMEFILE).read(),
|
long_description=long_description,
|
||||||
description='MediaGoblin is a web application for publishing all kinds of media',
|
description='MediaGoblin is a web application for publishing all kinds of media',
|
||||||
classifiers=[
|
classifiers=[
|
||||||
"Development Status :: 3 - Alpha",
|
"Development Status :: 3 - Alpha",
|
||||||
@ -121,22 +133,26 @@ try:
|
|||||||
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
|
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
|
||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
|
'Programming Language :: Python :: 2',
|
||||||
'Programming Language :: Python :: 2.6',
|
'Programming Language :: Python :: 2.6',
|
||||||
'Programming Language :: Python :: 2.7',
|
'Programming Language :: Python :: 2.7',
|
||||||
|
'Programming Language :: Python :: 3',
|
||||||
|
'Programming Language :: Python :: 3.3',
|
||||||
|
'Programming Language :: Python :: 3.4',
|
||||||
"Topic :: Internet :: WWW/HTTP :: Dynamic Content"
|
"Topic :: Internet :: WWW/HTTP :: Dynamic Content"
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
except TypeError, e:
|
except TypeError as e:
|
||||||
|
import sys
|
||||||
|
|
||||||
# Check if the problem is caused by the sqlalchemy/setuptools conflict
|
# Check if the problem is caused by the sqlalchemy/setuptools conflict
|
||||||
msg_as_str = str(e)
|
msg_as_str = str(e)
|
||||||
if not (msg_as_str == 'dist must be a Distribution instance'):
|
if not (msg_as_str == 'dist must be a Distribution instance'):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# If so, tell the user it is OK to just run the script again.
|
# If so, tell the user it is OK to just run the script again.
|
||||||
print "\n\n---------- NOTE ----------"
|
print("\n\n---------- NOTE ----------", file=sys.stderr)
|
||||||
print "The setup.py command you ran failed."
|
print("The setup.py command you ran failed.\n", file=sys.stderr)
|
||||||
print ""
|
print("It is a known possible failure. Just run it again. It works the "
|
||||||
print ("It is a known possible failure. Just run it again. It works the "
|
"second time.", file=sys.stderr)
|
||||||
"second time.")
|
|
||||||
import sys
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user