Merge pull request #281 from simple-login/batch-import

User can batch import aliases
This commit is contained in:
Son Nguyen Kim 2020-09-10 20:20:02 +02:00 committed by GitHub
commit 43babcf2d9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 278 additions and 10 deletions

View file

@ -225,6 +225,7 @@ JOB_ONBOARDING_1 = "onboarding-1"
JOB_ONBOARDING_2 = "onboarding-2"
JOB_ONBOARDING_3 = "onboarding-3"
JOB_ONBOARDING_4 = "onboarding-4"
JOB_BATCH_IMPORT = "batch-import"
# for pagination
PAGE_LIMIT = 20

View file

@ -24,4 +24,5 @@ from .views import (
recovery_code,
contact_detail,
setup_done,
batch_import,
)

View file

@ -0,0 +1,57 @@
{% extends 'default.html' %}
{% set active_page = "setting" %}
{% block title %}
Alias Batch Import
{% endblock %}
{% block default_content %}
<div class="card">
<div class="card-body">
<h1 class="h3">Alias Batch Import</h1>
<p>
The import can take several minutes.
Please come back to this page to verify the import status. <br>
Only aliases created with <b>your verified domains</b> can be imported.<br>
If an alias already exists, it won't be imported.
</p>
<a href="/static/batch_import_template.csv" download>Download CSV Template</a>
<hr>
<form method="post" enctype="multipart/form-data" class="mt-4">
<input required type="file"
name="alias-file"
accept=".csv"
class="form-control-file">
<label>Only <b>.csv</b> file is supported. </label> <br>
<button class="btn btn-success mt-2">Upload</button>
</form>
{% if batch_imports %}
<hr>
<h2 class="h3 mt-7">Batch imports</h2>
<table class="table">
<thead>
<tr>
<th scope="col">Uploaded</th>
<th scope="col">Number Alias Imported</th>
<th scope="col">Status</th>
</tr>
</thead>
<tbody>
{% for batch_import in batch_imports %}
<tr>
<td>{{ batch_import.created_at | dt }}</td>
<td>{{ batch_import.nb_alias() }}</td>
<td>{% if batch_import.processed %} Processed ✅ {% else %} Pending {% endif %}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
</div>
</div>
{% endblock %}

View file

@ -296,6 +296,19 @@
</div>
</div>
<div class="card">
<div class="card-body">
<div class="card-title">Import alias</div>
<div class="mb-3">
You can import your aliases created on other platforms into SimpleLogin.
</div>
<a href="{{ url_for('dashboard.batch_import_route') }}" class="btn btn-outline-primary">
Batch Import
</a>
</div>
</div>
<div class="card">
<div class="card-body">
<div class="card-title">Export Data</div>

View file

@ -0,0 +1,53 @@
import arrow
from flask import render_template, flash, request, redirect, url_for
from flask_login import login_required, current_user
from flask_wtf import FlaskForm
from wtforms import StringField, validators
from app import s3
from app.config import JOB_BATCH_IMPORT
from app.dashboard.base import dashboard_bp
from app.extensions import db
from app.log import LOG
from app.models import CustomDomain, File, BatchImport, Job
from app.utils import random_string
@dashboard_bp.route("/batch_import", methods=["GET", "POST"])
@login_required
def batch_import_route():
# only for users who have custom domains
if not current_user.verified_custom_domains():
flash("Alias batch import is only available for custom domains", "warning")
batch_imports = BatchImport.query.filter_by(user_id=current_user.id).all()
if request.method == "POST":
alias_file = request.files["alias-file"]
file_path = random_string(20) + ".csv"
file = File.create(user_id=current_user.id, path=file_path)
s3.upload_from_bytesio(file_path, alias_file)
db.session.flush()
LOG.d("upload file %s to s3 at %s", file, file_path)
bi = BatchImport.create(user_id=current_user.id, file_id=file.id)
db.session.flush()
LOG.debug("Add a batch import job %s for %s", bi, current_user)
# Schedule batch import job
Job.create(
name=JOB_BATCH_IMPORT,
payload={"batch_import_id": bi.id},
run_at=arrow.now(),
)
db.session.commit()
flash(
"The file has been uploaded successfully and the import will start shortly",
"success",
)
return redirect(url_for("dashboard.batch_import_route"))
return render_template("dashboard/batch_import.html", batch_imports=batch_imports)

View file

@ -94,6 +94,9 @@ class File(db.Model, ModelMixin):
def get_url(self, expires_in=3600):
return s3.get_url(self.path, expires_in)
def __repr__(self):
return f"<File {self.path}>"
class EnumE(enum.Enum):
@classmethod
@ -817,6 +820,13 @@ class Alias(db.Model, ModelMixin):
db.Boolean, nullable=False, default=False, server_default="0"
)
# to know whether an alias is added using a batch import
batch_import_id = db.Column(
db.ForeignKey("batch_import.id", ondelete="SET NULL"),
nullable=True,
default=None,
)
user = db.relationship(User)
mailbox = db.relationship("Mailbox", lazy="joined")
@ -1742,3 +1752,19 @@ class Monitoring(db.Model, ModelMixin):
incoming_queue = db.Column(db.Integer, nullable=False)
active_queue = db.Column(db.Integer, nullable=False)
deferred_queue = db.Column(db.Integer, nullable=False)
class BatchImport(db.Model, ModelMixin):
user_id = db.Column(db.ForeignKey(User.id, ondelete="cascade"), nullable=False)
file_id = db.Column(db.ForeignKey(File.id, ondelete="cascade"), nullable=False)
processed = db.Column(db.Boolean, nullable=False, default=False)
summary = db.Column(db.Text, nullable=True, default=None)
file = db.relationship(File)
user = db.relationship(User)
def nb_alias(self):
return Alias.query.filter_by(batch_import_id=self.id).count()
def __repr__(self):
return f"<BatchImport {self.id}>"

View file

@ -2,20 +2,36 @@
Run scheduled jobs.
Not meant for running job at precise time (+- 1h)
"""
import csv
import time
import arrow
import requests
from app import s3
from app.config import (
JOB_ONBOARDING_1,
JOB_ONBOARDING_2,
JOB_ONBOARDING_3,
JOB_ONBOARDING_4,
JOB_BATCH_IMPORT,
)
from app.email_utils import (
send_email,
render,
get_email_domain_part,
)
from app.email_utils import send_email, render
from app.extensions import db
from app.log import LOG
from app.models import User, Job
from app.models import (
User,
Job,
BatchImport,
Alias,
DeletedAlias,
DomainDeletedAlias,
CustomDomain,
)
from server import create_app
@ -71,6 +87,55 @@ def onboarding_mailbox(user):
)
def handle_batch_import(batch_import: BatchImport):
user = batch_import.user
batch_import.processed = True
db.session.commit()
LOG.debug("Start batch import for %s %s", batch_import, user)
file_url = s3.get_url(batch_import.file.path)
LOG.d("Download file %s from %s", batch_import.file, file_url)
r = requests.get(file_url)
lines = [l.decode() for l in r.iter_lines()]
reader = csv.DictReader(lines)
for row in reader:
full_alias = row["alias"].lower().strip().replace(" ", "")
note = row["note"]
alias_domain = get_email_domain_part(full_alias)
custom_domain = CustomDomain.get_by(domain=alias_domain)
if (
not custom_domain
or not custom_domain.verified
or custom_domain.user_id != user.id
):
LOG.debug("domain %s can't be used %s", alias_domain, user)
continue
if (
Alias.get_by(email=full_alias)
or DeletedAlias.get_by(email=full_alias)
or DomainDeletedAlias.get_by(email=full_alias)
):
LOG.d("alias already used %s", full_alias)
continue
alias = Alias.create(
user_id=user.id,
email=full_alias,
note=note,
mailbox_id=user.default_mailbox_id,
custom_domain_id=custom_domain.id,
batch_import_id=batch_import.id,
)
db.session.commit()
LOG.d("Create %s", alias)
if __name__ == "__main__":
while True:
# run a job 1h earlier or later is not a big deal ...
@ -129,6 +194,11 @@ if __name__ == "__main__":
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
elif job.name == JOB_BATCH_IMPORT:
batch_import_id = job.payload.get("batch_import_id")
batch_import = BatchImport.get(batch_import_id)
handle_batch_import(batch_import)
else:
LOG.exception("Unknown job name %s", job.name)

View file

@ -0,0 +1,44 @@
"""empty message
Revision ID: 84471852b610
Revises: b82bcad9accf
Create Date: 2020-09-10 20:15:10.956801
"""
import sqlalchemy_utils
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '84471852b610'
down_revision = 'b82bcad9accf'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('batch_import',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('created_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=False),
sa.Column('updated_at', sqlalchemy_utils.types.arrow.ArrowType(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('file_id', sa.Integer(), nullable=False),
sa.Column('processed', sa.Boolean(), nullable=False),
sa.Column('summary', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['file.id'], ondelete='cascade'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='cascade'),
sa.PrimaryKeyConstraint('id')
)
op.add_column('alias', sa.Column('batch_import_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'alias', 'batch_import', ['batch_import_id'], ['id'], ondelete='SET NULL')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'alias', type_='foreignkey')
op.drop_column('alias', 'batch_import_id')
op.drop_table('batch_import')
# ### end Alembic commands ###

3
static/batch_import_template.csv vendored Normal file
View file

@ -0,0 +1,3 @@
"alias","note"
"ebay@my-domain.com","Used on eBay"
"facebook@my-domain.com","Used on Facebook, Instagram."
1 alias note
2 ebay@my-domain.com Used on eBay
3 facebook@my-domain.com Used on Facebook, Instagram.