mirror of
https://gitlab.crans.org/bde/nk20-scripts
synced 2024-11-26 02:23:52 +00:00
215 lines
7.5 KiB
Python
215 lines
7.5 KiB
Python
#!/usr/bin/env python3
|
|
|
|
import psycopg2 as pg
|
|
import psycopg2.extras as pge
|
|
import datetime
|
|
import json
|
|
|
|
from django.utils.timezone import make_aware, now
|
|
from django.contrib.auth.models import User
|
|
from django.contrib.contenttypes.models import ContentType
|
|
|
|
from django.db import transaction
|
|
|
|
from note.models import Note, NoteUser, NoteClub
|
|
from note.models import Alias
|
|
from member.models import Club, Profile
|
|
|
|
from ._import_utils import ImportCommand, BulkCreateManager, timed
|
|
|
|
M_DURATION = 396
|
|
M_START = datetime.date(2019, 8, 31)
|
|
M_END = datetime.date(2020, 9, 30)
|
|
|
|
MAP_IDBDE = {
|
|
-4: 2, # Carte Bancaire
|
|
-3: 4, # Virement
|
|
-2: 1, # Especes
|
|
-1: 3, # Chèque
|
|
0: 5, # BDE
|
|
}
|
|
|
|
# some Aliases have been created in the fixtures
|
|
ALIAS_SET = {a[0] for a in Alias.objects.all().values_list("normalized_name")}
|
|
|
|
note_user_type = ContentType.objects.get(app_label="note", model="noteuser")
|
|
note_club_type = ContentType.objects.get(app_label="note", model="noteclub")
|
|
|
|
|
|
class Command(ImportCommand):
|
|
"""
|
|
Import command for People base data (Comptes, and Aliases)
|
|
"""
|
|
|
|
def add_arguments(self, parser):
|
|
parser.add_argument('-a', '--alias', action='store_true', help="import alias")
|
|
|
|
def import_special_account(self, cur):
|
|
cur.execute("SELECT idbde, solde from comptes where idbde <=0")
|
|
for row in cur:
|
|
note = Note.objects.get(pk=MAP_IDBDE[row["idbde"]])
|
|
note.amount = row["solde"]
|
|
note.save()
|
|
|
|
@timed
|
|
@transaction.atomic
|
|
def import_account(self, cur, chunk_size):
|
|
"""
|
|
Import every account of the nk15 in a batch fashion.
|
|
Every Model has to be manually created, and no magic `.save()`
|
|
function is being called.
|
|
"""
|
|
cur.execute("SELECT * FROM comptes WHERE idbde > 0 ORDER BY idbde;")
|
|
pk_club = 3
|
|
pk_user = 1
|
|
pk_profile = 1
|
|
pk_note = 7 # pk 6 is Kfet!
|
|
n = cur.rowcount
|
|
|
|
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
|
|
for idx, row in enumerate(cur):
|
|
pseudo = row["pseudo"]
|
|
pseudo_norm = Alias.normalize(pseudo)
|
|
self.update_line(idx, n, pseudo)
|
|
# clean pseudo (normalized pseudo must be unique)
|
|
if pseudo_norm in ALIAS_SET:
|
|
pseudo = pseudo + str(row["idbde"])
|
|
else:
|
|
ALIAS_SET.add(pseudo_norm)
|
|
# clean date
|
|
note_dict = {
|
|
"pk": pk_note,
|
|
"balance": row['solde'],
|
|
"last_negative": None,
|
|
"is_active": True,
|
|
"display_image": "pic/default.png",
|
|
"created_at": now()
|
|
}
|
|
if row["last_negatif"] is not None:
|
|
note_dict["last_negative"] = make_aware(row["last_negatif"])
|
|
if row["type"] == "personne":
|
|
# sanitize password
|
|
if row["passwd"] != "*|*" and not row["deleted"]:
|
|
passwd_nk15 = "$".join(["custom_nk15", "1", row["passwd"]])
|
|
else:
|
|
passwd_nk15 = ''
|
|
|
|
obj_dict = {
|
|
"pk": pk_user,
|
|
"username": row["pseudo"],
|
|
"password": passwd_nk15,
|
|
"first_name": row["nom"],
|
|
"last_name": row["prenom"],
|
|
"email": row["mail"],
|
|
"is_active": True, # temporary
|
|
}
|
|
profile_dict = {
|
|
"pk": pk_profile,
|
|
"user_id": pk_user,
|
|
"phone_number": row['tel'],
|
|
"address": row['adresse'],
|
|
"paid": row['normalien'],
|
|
"registration_valid": True,
|
|
"email_confirmed": True,
|
|
}
|
|
note_dict["polymorphic_ctype"] = note_user_type
|
|
note_user_dict = {
|
|
"pk": pk_note,
|
|
"user_id": pk_user,
|
|
}
|
|
alias_dict = {
|
|
"pk": pk_note,
|
|
"name": pseudo,
|
|
"normalized_name": Alias.normalize(pseudo),
|
|
"note_id": pk_note,
|
|
}
|
|
|
|
bulk_mgr.add(User(**obj_dict),
|
|
Profile(**profile_dict),
|
|
Note(**note_dict),
|
|
NoteUser(**note_user_dict),
|
|
Alias(**alias_dict),)
|
|
pk_user += 1
|
|
pk_profile += 1
|
|
|
|
else: # club
|
|
obj_dict = {
|
|
"pk": pk_club,
|
|
"name": row["pseudo"],
|
|
"email": row["mail"],
|
|
"membership_duration": M_DURATION,
|
|
"membership_start": M_START,
|
|
"membership_end": M_END,
|
|
"membership_fee_paid": 0,
|
|
"membership_fee_unpaid": 0,
|
|
}
|
|
note_club_dict = {
|
|
"pk": pk_note,
|
|
"club_id": pk_club,
|
|
}
|
|
alias_dict = {
|
|
"pk": pk_note,
|
|
"name": pseudo,
|
|
"normalized_name": Alias.normalize(pseudo),
|
|
"note_id": pk_note
|
|
}
|
|
note_dict["polymorphic_ctype"] = note_club_type
|
|
bulk_mgr.add(Club(**obj_dict),
|
|
Note(**note_dict),
|
|
NoteClub(**note_club_dict),
|
|
Alias(**alias_dict))
|
|
pk_club += 1
|
|
# row import completed
|
|
MAP_IDBDE[row["idbde"]] = pk_note
|
|
pk_note += 1
|
|
bulk_mgr.done()
|
|
|
|
@timed
|
|
def import_alias(self, cur, chunk_size):
|
|
"""
|
|
Import Alias from nk15
|
|
We rely on validation of the models, but it is slow.
|
|
"""
|
|
cur.execute("SELECT * FROM aliases ORDER by id")
|
|
n = cur.rowcount
|
|
|
|
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
|
|
pk_alias = Alias.objects.order_by('-id').first().id + 1
|
|
for idx, row in enumerate(cur):
|
|
alias_name = row["alias"]
|
|
alias_name = (alias_name[:252] + '...') if len(alias_name) > 255 else alias_name
|
|
alias_norm = Alias.normalize(alias_name)
|
|
self.update_line(idx, n, alias_norm)
|
|
# clean pseudo (normalized pseudo must be unique)
|
|
if alias_norm in ALIAS_SET:
|
|
continue
|
|
else:
|
|
ALIAS_SET.add(alias_norm)
|
|
obj_dict = {
|
|
"pk": pk_alias,
|
|
"note_id": MAP_IDBDE[row["idbde"]],
|
|
"name": alias_name,
|
|
"normalized_name": alias_norm,
|
|
}
|
|
pk_alias += 1
|
|
bulk_mgr.add(Alias(**obj_dict))
|
|
bulk_mgr.done()
|
|
|
|
def handle(self, *args, **kwargs):
|
|
# default args, provided by ImportCommand.
|
|
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
|
|
# connecting to nk15 database
|
|
conn = pg.connect(database=nk15db, user=nk15user)
|
|
cur = conn.cursor(cursor_factory=pge.DictCursor)
|
|
|
|
self.import_special_account(cur)
|
|
self.import_account(cur, kwargs["chunk"])
|
|
# Alias Management
|
|
if kwargs["alias"]:
|
|
self.import_alias(cur, kwargs["chunk"])
|
|
# save to disk
|
|
if kwargs["save"]:
|
|
filename = kwargs["save"]
|
|
with open(filename, 'w') as fp:
|
|
json.dump(MAP_IDBDE, fp, sort_keys=True, indent=2)
|