Householding update
* Removed flask-migrate from project as it did not work for this project as intended * Removed redundant routes
This commit is contained in:
parent
594651872c
commit
0cdd3a16dc
@ -1,71 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# /**********************************************************************************
|
|
||||||
# * _author : Domeniko Gentner
|
|
||||||
# * _mail : code@tuxstash.de
|
|
||||||
# * _repo : https://git.tuxstash.de/gothseidank/labertasche
|
|
||||||
# * _license : This project is under MIT License
|
|
||||||
# *********************************************************************************/
|
|
||||||
from . import bp_jsconnector
|
|
||||||
from flask import request, redirect
|
|
||||||
from flask_login import login_required
|
|
||||||
from labertasche.database import labertasche_db as db
|
|
||||||
from labertasche.models import TComments, TEmail
|
|
||||||
from labertasche.helper import export_location
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
# @bp_jsconnector.route('/block-mail/<int:location_id>/<int:comment_id>', methods=["GET"])
|
|
||||||
# @login_required
|
|
||||||
# def dashboard_review_spam_block_mail(location_id, comment_id):
|
|
||||||
# comment = db.session.query(TComments).filter(TComments.comments_id == comment_id).first()
|
|
||||||
# if comment:
|
|
||||||
# addr = db.session.query(TEmail).filter(TEmail.email == comment.email).first()
|
|
||||||
# if addr:
|
|
||||||
# setattr(addr, 'is_allowed', False)
|
|
||||||
# setattr(addr, 'is_blocked', True)
|
|
||||||
# else:
|
|
||||||
# new_mail = {
|
|
||||||
# "email": comment.first().email,
|
|
||||||
# "is_allowed": False,
|
|
||||||
# "is_blocked": True
|
|
||||||
# }
|
|
||||||
# db.session.add(TEmail(**new_mail))
|
|
||||||
#
|
|
||||||
# # Delete all comments made by this mail address
|
|
||||||
# db.session.query(TComments).filter(TComments.email == comment.email).delete()
|
|
||||||
# db.session.commit()
|
|
||||||
#
|
|
||||||
# url = re.match("^(.*[/])", request.referrer)[0]
|
|
||||||
# export_location(location_id)
|
|
||||||
# return redirect(f"{url}/{location_id}")
|
|
||||||
#
|
|
||||||
|
|
||||||
# @bp_jsconnector.route('/allow-user/<int:location_id>/<int:comment_id>', methods=["GET"])
|
|
||||||
# @login_required
|
|
||||||
# def dashboard_review_spam_allow_user(location_id, comment_id):
|
|
||||||
# comment = db.session.query(TComments).filter(TComments.comments_id == comment_id).first()
|
|
||||||
# if comment:
|
|
||||||
# addr = db.session.query(TEmail).filter(TEmail.email == comment.email).first()
|
|
||||||
# if addr:
|
|
||||||
# setattr(addr, 'is_allowed', True)
|
|
||||||
# setattr(addr, 'is_blocked', False)
|
|
||||||
# else:
|
|
||||||
# new_mail = {
|
|
||||||
# "email": comment.email,
|
|
||||||
# "is_allowed": True,
|
|
||||||
# "is_blocked": False
|
|
||||||
# }
|
|
||||||
# db.session.add(TEmail(**new_mail))
|
|
||||||
#
|
|
||||||
# # Allow all comments made by this mail address
|
|
||||||
# all_comments = db.session.query(TComments).filter(TComments.email == comment.email).all()
|
|
||||||
# if all_comments:
|
|
||||||
# for comment in all_comments:
|
|
||||||
# setattr(comment, 'is_published', True)
|
|
||||||
# setattr(comment, 'is_spam', False)
|
|
||||||
#
|
|
||||||
# db.session.commit()
|
|
||||||
# url = re.match("^(.*[/])", request.referrer)[0]
|
|
||||||
# export_location(location_id)
|
|
||||||
# return redirect(f"{url}/{location_id}")
|
|
@ -1 +0,0 @@
|
|||||||
Generic single-database configuration.
|
|
@ -1,45 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# template used to generate migration files
|
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
@ -1,96 +0,0 @@
|
|||||||
from __future__ import with_statement
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from logging.config import fileConfig
|
|
||||||
|
|
||||||
from sqlalchemy import engine_from_config
|
|
||||||
from sqlalchemy import pool
|
|
||||||
|
|
||||||
from alembic import context
|
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
|
||||||
# access to the values within the .ini file in use.
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
|
||||||
# This line sets up loggers basically.
|
|
||||||
fileConfig(config.config_file_name)
|
|
||||||
logger = logging.getLogger('alembic.env')
|
|
||||||
|
|
||||||
# add your model's MetaData object here
|
|
||||||
# for 'autogenerate' support
|
|
||||||
# from myapp import mymodel
|
|
||||||
# target_metadata = mymodel.Base.metadata
|
|
||||||
from flask import current_app
|
|
||||||
config.set_main_option(
|
|
||||||
'sqlalchemy.url',
|
|
||||||
str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%'))
|
|
||||||
target_metadata = current_app.extensions['migrate'].db.metadata
|
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
|
||||||
# can be acquired:
|
|
||||||
# my_important_option = config.get_main_option("my_important_option")
|
|
||||||
# ... etc.
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline():
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
|
||||||
context.configure(
|
|
||||||
url=url, target_metadata=target_metadata, literal_binds=True
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online():
|
|
||||||
"""Run migrations in 'online' mode.
|
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# this callback is used to prevent an auto-migration from being generated
|
|
||||||
# when there are no changes to the schema
|
|
||||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
|
||||||
def process_revision_directives(context, revision, directives):
|
|
||||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
|
||||||
script = directives[0]
|
|
||||||
if script.upgrade_ops.is_empty():
|
|
||||||
directives[:] = []
|
|
||||||
logger.info('No changes in schema detected.')
|
|
||||||
|
|
||||||
connectable = engine_from_config(
|
|
||||||
config.get_section(config.config_ini_section),
|
|
||||||
prefix='sqlalchemy.',
|
|
||||||
poolclass=pool.NullPool,
|
|
||||||
)
|
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
|
||||||
context.configure(
|
|
||||||
connection=connection,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
process_revision_directives=process_revision_directives,
|
|
||||||
**current_app.extensions['migrate'].configure_args
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
run_migrations_online()
|
|
@ -1,24 +0,0 @@
|
|||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = ${repr(up_revision)}
|
|
||||||
down_revision = ${repr(down_revision)}
|
|
||||||
branch_labels = ${repr(branch_labels)}
|
|
||||||
depends_on = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
@ -1,56 +0,0 @@
|
|||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: a5937005972e
|
|
||||||
Revises:
|
|
||||||
Create Date: 2020-12-17 17:44:38.813991
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = 'a5937005972e'
|
|
||||||
down_revision = None
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
with op.batch_alter_table('t_comments', schema=None) as batch_op:
|
|
||||||
batch_op.add_column(sa.Column('project_id', sa.Integer(), nullable=False, default=1))
|
|
||||||
batch_op.create_unique_constraint(batch_op.f('uq_t_comments_content'), ['content'])
|
|
||||||
batch_op.create_foreign_key(batch_op.f('fk_t_comments_project_id_t_projects'), 't_projects', ['project_id'], ['id_project'])
|
|
||||||
|
|
||||||
with op.batch_alter_table('t_email', schema=None) as batch_op:
|
|
||||||
batch_op.add_column(sa.Column('project_id', sa.Integer(), nullable=False, default=1))
|
|
||||||
batch_op.create_unique_constraint(batch_op.f('uq_t_email_email'), ['email'])
|
|
||||||
batch_op.create_foreign_key(batch_op.f('fk_t_email_project_id_t_projects'), 't_projects', ['project_id'], ['id_project'])
|
|
||||||
|
|
||||||
with op.batch_alter_table('t_location', schema=None) as batch_op:
|
|
||||||
batch_op.add_column(sa.Column('project_id', sa.Integer(), nullable=False, default=1))
|
|
||||||
batch_op.create_unique_constraint(batch_op.f('uq_t_location_location'), ['location'])
|
|
||||||
batch_op.create_foreign_key(batch_op.f('fk_t_location_project_id_t_projects'), 't_projects', ['project_id'], ['id_project'])
|
|
||||||
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
with op.batch_alter_table('t_location', schema=None) as batch_op:
|
|
||||||
batch_op.drop_constraint(batch_op.f('fk_t_location_project_id_t_projects'), type_='foreignkey')
|
|
||||||
batch_op.drop_constraint(batch_op.f('uq_t_location_location'), type_='unique')
|
|
||||||
batch_op.drop_column('project_id')
|
|
||||||
|
|
||||||
with op.batch_alter_table('t_email', schema=None) as batch_op:
|
|
||||||
batch_op.drop_constraint(batch_op.f('fk_t_email_project_id_t_projects'), type_='foreignkey')
|
|
||||||
batch_op.drop_constraint(batch_op.f('uq_t_email_email'), type_='unique')
|
|
||||||
batch_op.drop_column('project_id')
|
|
||||||
|
|
||||||
with op.batch_alter_table('t_comments', schema=None) as batch_op:
|
|
||||||
batch_op.drop_constraint(batch_op.f('fk_t_comments_project_id_t_projects'), type_='foreignkey')
|
|
||||||
batch_op.drop_constraint(batch_op.f('uq_t_comments_content'), type_='unique')
|
|
||||||
batch_op.drop_column('project_id')
|
|
||||||
|
|
||||||
# ### end Alembic commands ###
|
|
Loading…
x
Reference in New Issue
Block a user