nk20-scripts/management/commands/import_account.py

217 lines
7.6 KiB
Python
Raw Normal View History

2020-07-25 19:57:46 +00:00
# Copyright (C) 2018-2020 by BDE ENS Paris-Saclay
# SPDX-License-Identifier: GPL-3.0-or-later
import psycopg2 as pg
import psycopg2.extras as pge
import datetime
2020-05-18 17:39:19 +00:00
import json
from django.utils.timezone import make_aware, now
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.db import transaction
from note.models import Note, NoteUser, NoteClub
from note.models import Alias
from member.models import Club, Profile
2020-05-24 23:12:31 +00:00
from ._import_utils import ImportCommand, BulkCreateManager, timed
M_DURATION = 396
M_START = datetime.date(2019, 8, 31)
M_END = datetime.date(2020, 9, 30)
MAP_IDBDE = {
-4: 2, # Carte Bancaire
-3: 4, # Virement
-2: 1, # Especes
-1: 3, # Chèque
0: 5, # BDE
}
2020-05-18 17:39:19 +00:00
# some Aliases have been created in the fixtures
ALIAS_SET = {a[0] for a in Alias.objects.all().values_list("normalized_name")}
note_user_type = ContentType.objects.get(app_label="note", model="noteuser")
note_club_type = ContentType.objects.get(app_label="note", model="noteclub")
class Command(ImportCommand):
"""
Import command for People base data (Comptes, and Aliases)
"""
2020-05-21 16:28:38 +00:00
def add_arguments(self, parser):
2020-05-18 17:38:25 +00:00
parser.add_argument('-a', '--alias', action='store_true', help="import alias")
2020-05-21 16:28:38 +00:00
2020-05-24 23:15:43 +00:00
def import_special_account(self, cur):
cur.execute("SELECT idbde, solde from comptes where idbde <=0")
for row in cur:
note = Note.objects.get(pk=MAP_IDBDE[row["idbde"]])
note.amount = row["solde"]
note.save()
2020-05-24 23:12:31 +00:00
@timed
@transaction.atomic
def import_account(self, cur, chunk_size):
"""
Import every account of the nk15 in a batch fashion.
Every Model has to be manually created, and no magic `.save()`
function is being called.
"""
cur.execute("SELECT * FROM comptes WHERE idbde > 0 ORDER BY idbde;")
pk_club = 3
pk_user = 1
pk_profile = 1
2020-05-21 16:28:38 +00:00
pk_note = 7 # pk 6 is Kfet!
n = cur.rowcount
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
for idx, row in enumerate(cur):
pseudo = row["pseudo"]
pseudo_norm = Alias.normalize(pseudo)
self.update_line(idx, n, pseudo)
# clean pseudo (normalized pseudo must be unique)
2020-05-18 17:39:19 +00:00
if pseudo_norm in ALIAS_SET:
2020-05-24 23:15:43 +00:00
pseudo = pseudo + str(row["idbde"])
else:
2020-05-18 17:39:19 +00:00
ALIAS_SET.add(pseudo_norm)
# clean date
note_dict = {
"pk": pk_note,
2020-05-24 23:15:43 +00:00
"balance": row['solde'],
"last_negative": None,
"is_active": True,
2020-07-21 23:28:28 +00:00
"display_image": "pic/default.png",
"created_at": now()
}
if row["last_negatif"] is not None:
note_dict["last_negative"] = make_aware(row["last_negatif"])
if row["type"] == "personne":
# sanitize password
if row["passwd"] != "*|*" and not row["deleted"]:
passwd_nk15 = "$".join(["custom_nk15", "1", row["passwd"]])
else:
passwd_nk15 = ''
obj_dict = {
"pk": pk_user,
"username": row["pseudo"],
"password": passwd_nk15,
"first_name": row["nom"],
"last_name": row["prenom"],
"email": row["mail"],
"is_active": True, # temporary
}
profile_dict = {
"pk": pk_profile,
"user_id": pk_user,
"phone_number": row['tel'],
"address": row['adresse'],
"paid": row['normalien'],
2020-07-26 10:05:26 +00:00
"section": row["section"],
"registration_valid": True,
"email_confirmed": True,
}
note_dict["polymorphic_ctype"] = note_user_type
note_user_dict = {
"pk": pk_note,
"user_id": pk_user,
}
alias_dict = {
"pk": pk_note,
"name": pseudo,
"normalized_name": Alias.normalize(pseudo),
"note_id": pk_note,
}
bulk_mgr.add(User(**obj_dict),
Profile(**profile_dict),
Note(**note_dict),
NoteUser(**note_user_dict),
Alias(**alias_dict),)
pk_user += 1
pk_profile += 1
else: # club
obj_dict = {
"pk": pk_club,
"name": row["pseudo"],
"email": row["mail"],
"membership_duration": M_DURATION,
"membership_start": M_START,
"membership_end": M_END,
"membership_fee_paid": 0,
"membership_fee_unpaid": 0,
}
note_club_dict = {
"pk": pk_note,
"club_id": pk_club,
}
alias_dict = {
"pk": pk_note,
"name": pseudo,
"normalized_name": Alias.normalize(pseudo),
"note_id": pk_note
}
note_dict["polymorphic_ctype"] = note_club_type
bulk_mgr.add(Club(**obj_dict),
Note(**note_dict),
NoteClub(**note_club_dict),
Alias(**alias_dict))
pk_club += 1
# row import completed
MAP_IDBDE[row["idbde"]] = pk_note
pk_note += 1
2020-05-18 17:39:19 +00:00
bulk_mgr.done()
2020-05-24 23:12:31 +00:00
@timed
2020-05-18 17:39:19 +00:00
def import_alias(self, cur, chunk_size):
"""
Import Alias from nk15
We rely on validation of the models, but it is slow.
"""
cur.execute("SELECT * FROM aliases ORDER by id")
n = cur.rowcount
2020-05-18 17:39:19 +00:00
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
pk_alias = Alias.objects.order_by('-id').first().id + 1
for idx, row in enumerate(cur):
alias_name = row["alias"]
2020-05-18 17:39:19 +00:00
alias_name = (alias_name[:252] + '...') if len(alias_name) > 255 else alias_name
alias_norm = Alias.normalize(alias_name)
2020-05-25 10:15:48 +00:00
self.update_line(idx, n, alias_norm)
2020-05-18 17:39:19 +00:00
# clean pseudo (normalized pseudo must be unique)
if alias_norm in ALIAS_SET:
continue
else:
ALIAS_SET.add(alias_norm)
obj_dict = {
"pk": pk_alias,
"note_id": MAP_IDBDE[row["idbde"]],
"name": alias_name,
"normalized_name": alias_norm,
}
pk_alias += 1
bulk_mgr.add(Alias(**obj_dict))
bulk_mgr.done()
def handle(self, *args, **kwargs):
# default args, provided by ImportCommand.
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
# connecting to nk15 database
conn = pg.connect(database=nk15db, user=nk15user)
cur = conn.cursor(cursor_factory=pge.DictCursor)
2020-05-24 23:15:43 +00:00
self.import_special_account(cur)
self.import_account(cur, kwargs["chunk"])
# Alias Management
2020-05-18 17:39:19 +00:00
if kwargs["alias"]:
2020-05-24 23:15:43 +00:00
self.import_alias(cur, kwargs["chunk"])
# save to disk
if kwargs["save"]:
filename = kwargs["save"]
with open(filename, 'w') as fp:
json.dump(MAP_IDBDE, fp, sort_keys=True, indent=2)