nk20-scripts/management/commands/import_account.py

266 lines
10 KiB
Python

# Copyright (C) 2018-2020 by BDE ENS Paris-Saclay
# SPDX-License-Identifier: GPL-3.0-or-later
import psycopg2 as pg
import psycopg2.extras as pge
import datetime
import json
from django.template.loader import render_to_string
from django.utils.timezone import make_aware, now
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.db import transaction
from note.models import Note, NoteUser, NoteClub
from note.models import Alias
from member.models import Club, Profile
from ._import_utils import ImportCommand, BulkCreateManager, timed
M_DURATION = 396
M_START = datetime.date(2019, 8, 1)
M_END = datetime.date(2020, 9, 30)
MAP_IDBDE = {
-4: 2, # Carte Bancaire
-3: 4, # Virement
-2: 1, # Especes
-1: 3, # Chèque
0: 5, # BDE
}
# some Aliases have been created in the fixtures
ALIAS_SET = {a[0] for a in Alias.objects.all().values_list("normalized_name")}
# Some people might loose some aliases due to normalization. We warn them on them.
LOST_ALIASES = {}
# In some rare cases, the username might be in conflict with some others. We change them and warn the users.
CHANGED_USERNAMES = []
note_user_type = ContentType.objects.get(app_label="note", model="noteuser")
note_club_type = ContentType.objects.get(app_label="note", model="noteclub")
class Command(ImportCommand):
"""
Import command for People base data (Comptes, and Aliases)
"""
def add_arguments(self, parser):
parser.add_argument('-a', '--alias', action='store_true', help="import alias")
def import_special_account(self, cur):
cur.execute("SELECT idbde, solde from comptes where idbde <=0")
for row in cur:
note = Note.objects.get(pk=MAP_IDBDE[row["idbde"]])
note.amount = row["solde"]
note.save()
@timed
@transaction.atomic
def import_account(self, cur, chunk_size):
"""
Import every account of the nk15 in a batch fashion.
Every Model has to be manually created, and no magic `.save()`
function is being called.
"""
# Get promotion and date of creation of the account
cur.execute("SELECT idbde, MIN(date) AS created_at, MIN(annee) AS promo FROM adhesions"
" GROUP BY idbde ORDER BY promo, created_at;")
MAP_IDBDE_PROMOTION = {}
for row in cur:
MAP_IDBDE_PROMOTION[row["idbde"]] = row
cur.execute("SELECT * FROM comptes WHERE idbde <= 0 ORDER BY idbde;")
for row in cur:
note = Note.objects.get(pk=MAP_IDBDE[row["idbde"]])
note.balance = row["solde"]
note.save()
cur.execute("SELECT * FROM comptes WHERE idbde > 0 ORDER BY idbde;")
pk_club = 3
pk_user = 1
pk_profile = 1
pk_note = 7 # pk 6 is Kfet!
n = cur.rowcount
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
for idx, row in enumerate(cur):
pseudo = row["pseudo"]
pseudo_norm = Alias.normalize(pseudo)
self.update_line(idx, n, pseudo)
# clean pseudo (normalized pseudo must be unique and not empty)
if not pseudo_norm or pseudo_norm in ALIAS_SET:
pseudo = pseudo + str(row["idbde"])
CHANGED_USERNAMES.append((pk_note, row["idbde"], pseudo))
else:
ALIAS_SET.add(pseudo_norm)
# clean date
note_dict = {
"pk": pk_note,
"balance": row['solde'],
"last_negative": None,
"is_active": not row["bloque"],
"display_image": "pic/default.png",
}
if row["last_negatif"] is not None:
note_dict["last_negative"] = make_aware(row["last_negatif"])
if row["type"] == "personne":
# sanitize password
if row["passwd"] != "*|*" and not row["deleted"]:
passwd_nk15 = "$".join(["custom_nk15", "1", row["passwd"]])
else:
passwd_nk15 = ''
if row["idbde"] not in MAP_IDBDE_PROMOTION:
# NK12 bug. Applying default values
MAP_IDBDE_PROMOTION[row["idbde"]] = {"promo": 2014,
"created_at": datetime.datetime(2014, 9, 1, 0, 0, 0)}
obj_dict = {
"pk": pk_user,
"username": row["pseudo"],
"password": passwd_nk15,
"first_name": row["prenom"],
"last_name": row["nom"],
"email": row["mail"],
"is_active": not row["bloque"],
"date_joined": make_aware(MAP_IDBDE_PROMOTION[row["idbde"]]["created_at"]),
}
profile_dict = {
"pk": pk_profile,
"user_id": pk_user,
"phone_number": row['tel'],
"address": row['adresse'],
"paid": row['normalien'],
"registration_valid": True,
"email_confirmed": True,
"promotion": MAP_IDBDE_PROMOTION[row["idbde"]]["promo"],
"report_frequency": max(row["report_period"], 0),
"last_report": make_aware(row["previous_report_date"]),
}
note_dict["created_at"] = make_aware(MAP_IDBDE_PROMOTION[row["idbde"]]["created_at"])
note_dict["polymorphic_ctype"] = note_user_type
note_user_dict = {
"pk": pk_note,
"user_id": pk_user,
}
alias_dict = {
"pk": pk_note,
"name": pseudo,
"normalized_name": Alias.normalize(pseudo),
"note_id": pk_note,
}
bulk_mgr.add(User(**obj_dict),
Profile(**profile_dict),
Note(**note_dict),
NoteUser(**note_user_dict),
Alias(**alias_dict),)
pk_user += 1
pk_profile += 1
else: # club
obj_dict = {
"pk": pk_club,
"name": row["pseudo"],
"email": row["mail"],
"parent_club_id": 1, # All clubs depends on BDE by default
"membership_duration": M_DURATION,
"membership_start": M_START,
"membership_end": M_END,
"membership_fee_paid": 0,
"membership_fee_unpaid": 0,
}
note_club_dict = {
"pk": pk_note,
"club_id": pk_club
}
alias_dict = {
"pk": pk_note,
"name": pseudo,
"normalized_name": Alias.normalize(pseudo),
"note_id": pk_note
}
note_dict["created_at"] = make_aware(row["previous_report_date"]) # Not perfect, but good approximation
note_dict["polymorphic_ctype"] = note_club_type
bulk_mgr.add(Club(**obj_dict),
Note(**note_dict),
NoteClub(**note_club_dict),
Alias(**alias_dict))
pk_club += 1
# row import completed
MAP_IDBDE[row["idbde"]] = pk_note
pk_note += 1
bulk_mgr.done()
@timed
def import_alias(self, cur, chunk_size):
"""
Import Alias from nk15
We rely on validation of the models, but it is slow.
"""
cur.execute("SELECT * FROM aliases ORDER by id")
n = cur.rowcount
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
pk_alias = Alias.objects.order_by('-id').first().id + 1
for idx, row in enumerate(cur):
alias_name = row["alias"]
alias_name = (alias_name[:252] + '...') if len(alias_name) > 255 else alias_name
alias_norm = Alias.normalize(alias_name)
self.update_line(idx, n, alias_norm)
# clean pseudo (normalized pseudo must be unique)
if not alias_norm or alias_norm in ALIAS_SET:
LOST_ALIASES.setdefault(MAP_IDBDE[row["idbde"]], [])
LOST_ALIASES[MAP_IDBDE[row["idbde"]]].append(alias_name)
continue
else:
ALIAS_SET.add(alias_norm)
obj_dict = {
"pk": pk_alias,
"note_id": MAP_IDBDE[row["idbde"]],
"name": alias_name,
"normalized_name": alias_norm,
}
pk_alias += 1
bulk_mgr.add(Alias(**obj_dict))
bulk_mgr.done()
def handle(self, *args, **kwargs):
# default args, provided by ImportCommand.
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
# connecting to nk15 database
conn = pg.connect(database=nk15db, user=nk15user)
cur = conn.cursor(cursor_factory=pge.DictCursor)
self.import_special_account(cur)
self.import_account(cur, kwargs["chunk"])
# Alias Management
if kwargs["alias"]:
self.import_alias(cur, kwargs["chunk"])
# save to disk
if kwargs["save"]:
filename = kwargs["save"]
with open(filename, 'w') as fp:
json.dump(MAP_IDBDE, fp, sort_keys=True, indent=2)
for pk_user, old_username, new_username in CHANGED_USERNAMES:
user = User.objects.get(pk=pk_user)
mail_text = render_to_string("scripts/unsupported_username.txt", dict(
user=user,
old_username=old_username,
new_username=new_username,
))
user.email_user("Transition à la Note Kfet 2020 : pseudo non supporté", mail_text)
for pk_user, aliases_list in CHANGED_USERNAMES:
user = User.objects.get(pk=pk_user)
mail_text = render_to_string("scripts/deleted_aliases.txt", dict(
user=user,
aliases_list=aliases_list,
))
user.email_user("Transition à la Note Kfet 2020 : suppression d'alias", mail_text)