1
0
mirror of https://gitlab.crans.org/bde/nk20-scripts synced 2024-11-26 10:27:09 +00:00

Compare commits

..

No commits in common. "8758cb5238de7202598fece86bbd1269c7f0b974" and "64dd63503914870615e91357d4c19f9798804e5b" have entirely different histories.

5 changed files with 529 additions and 383 deletions

View File

@ -1,39 +1,20 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import json
import time
from collections import defaultdict
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from collections import defaultdict
from django.apps import apps from django.apps import apps
from django.db import transaction from django.db import transaction
from django.contrib.auth.models import User
from django.db.models import Model
from polymorphic.models import PolymorphicModel from polymorphic.models import PolymorphicModel
def timed(method):
""""
A simple decorator to measure time elapsed in class function (hence the args[0])
"""
def _timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
args[0].print_success(f"{method.__name__} executed ({te-ts:.2f}s)")
return result
return _timed
class ImportCommand(BaseCommand): class ImportCommand(BaseCommand):
""" """
Generic command for import of NK15 database Generic command for import of NK15 database
""" """
def __init__(self, *args, **kwargs):
super().__init__(args, kwargs)
self.MAP_IDBDE = dict()
def print_success(self, to_print): def print_success(self, to_print):
return self.stdout.write(self.style.SUCCESS(to_print)) return self.stdout.write(self.style.SUCCESS(to_print))
@ -55,14 +36,6 @@ class ImportCommand(BaseCommand):
parser.add_argument('-c', '--chunk', type=int, default=100, help="chunk size for bulk_create") parser.add_argument('-c', '--chunk', type=int, default=100, help="chunk size for bulk_create")
return parser return parser
def save_map(self, filename):
with open(filename, 'w') as fp:
json.dump(self.MAP_IDBDE, fp, sort_keys=True, indent=2)
def load_map(self, filename):
with open(filename, 'r') as fp:
self.MAP_IDBDE = json.load(fp, object_hook=lambda d: {int(k): int(v) for k, v in d.items()})
class BulkCreateManager(object): class BulkCreateManager(object):
""" """
@ -80,16 +53,14 @@ class BulkCreateManager(object):
def _commit(self, model_class): def _commit(self, model_class):
model_key = model_class._meta.label model_key = model_class._meta.label
# check for mutli-table inheritance it happens if model_class.__base__ in [Model, PolymorphicModel] or model_class is User:
# if model_class is a grand-child of PolymorphicModel model_class.objects.bulk_create(self._create_queues[model_key])
if model_class.__base__ is not PolymorphicModel and model_class.__base__.__base__ is PolymorphicModel: else:
# ensure that parents models exists
self._commit(model_class.__base__) self._commit(model_class.__base__)
with transaction.atomic(): with transaction.atomic():
for obj in self._create_queues[model_key]: for obj in self._create_queues[model_key]:
obj.save_base(raw=True) obj.save_base(raw=True)
else:
# ensure that parents models exists
model_class.objects.bulk_create(self._create_queues[model_key])
self._create_queues[model_key] = [] self._create_queues[model_key] = []
def add(self, *args): def add(self, *args):

View File

@ -10,12 +10,13 @@ from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.models import ContentType
from django.db import transaction from django.db import transaction
from django.db import IntegrityError
from note.models import Note, NoteUser, NoteClub from note.models import Note, NoteUser, NoteClub
from note.models import Alias from note.models import Alias
from member.models import Club, Profile from member.models import Club, Profile
from ._import_utils import ImportCommand, BulkCreateManager, timed from ._import_utils import ImportCommand, BulkCreateManager
M_DURATION = 396 M_DURATION = 396
M_START = datetime.date(2019, 8, 31) M_START = datetime.date(2019, 8, 31)
@ -40,18 +41,10 @@ class Command(ImportCommand):
""" """
Import command for People base data (Comptes, and Aliases) Import command for People base data (Comptes, and Aliases)
""" """
def add_arguments(self, parser): def add_arguments(self, parser):
parser.add_argument('-a', '--alias', action='store_true', help="import alias") parser.add_argument('-a', '--alias', action='store_true', help="import alias")
def import_special_account(self, cur):
cur.execute("SELECT idbde, solde from comptes where idbde <=0")
for row in cur:
note = Note.objects.get(pk=MAP_IDBDE[row["idbde"]])
note.amount = row["solde"]
note.save()
@timed
@transaction.atomic @transaction.atomic
def import_account(self, cur, chunk_size): def import_account(self, cur, chunk_size):
""" """
@ -63,7 +56,7 @@ class Command(ImportCommand):
pk_club = 3 pk_club = 3
pk_user = 1 pk_user = 1
pk_profile = 1 pk_profile = 1
pk_note = 7 # pk 6 is Kfet! pk_note = 7 # pk 6 is Kfet!
n = cur.rowcount n = cur.rowcount
bulk_mgr = BulkCreateManager(chunk_size=chunk_size) bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
@ -73,13 +66,13 @@ class Command(ImportCommand):
self.update_line(idx, n, pseudo) self.update_line(idx, n, pseudo)
# clean pseudo (normalized pseudo must be unique) # clean pseudo (normalized pseudo must be unique)
if pseudo_norm in ALIAS_SET: if pseudo_norm in ALIAS_SET:
pseudo = pseudo + str(row["idbde"]) pseudo = pseudo+str(row["idbde"])
else: else:
ALIAS_SET.add(pseudo_norm) ALIAS_SET.add(pseudo_norm)
# clean date # clean date
note_dict = { note_dict = {
"pk": pk_note, "pk": pk_note,
"balance": row['solde'], "balance": 0,
"last_negative": None, "last_negative": None,
"is_active": True, "is_active": True,
"display_image": "", "display_image": "",
@ -163,8 +156,8 @@ class Command(ImportCommand):
MAP_IDBDE[row["idbde"]] = pk_note MAP_IDBDE[row["idbde"]] = pk_note
pk_note += 1 pk_note += 1
bulk_mgr.done() bulk_mgr.done()
self.print_success("comptes table imported")
@timed
def import_alias(self, cur, chunk_size): def import_alias(self, cur, chunk_size):
""" """
Import Alias from nk15 Import Alias from nk15
@ -184,6 +177,7 @@ class Command(ImportCommand):
if alias_norm in ALIAS_SET: if alias_norm in ALIAS_SET:
continue continue
else: else:
print(alias_norm)
ALIAS_SET.add(alias_norm) ALIAS_SET.add(alias_norm)
obj_dict = { obj_dict = {
"pk": pk_alias, "pk": pk_alias,
@ -195,19 +189,19 @@ class Command(ImportCommand):
bulk_mgr.add(Alias(**obj_dict)) bulk_mgr.add(Alias(**obj_dict))
bulk_mgr.done() bulk_mgr.done()
def handle(self, *args, **kwargs): def handle(self, *args, **kwargs):
# default args, provided by ImportCommand. # default args, provided by ImportCommand.
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user'] nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
# connecting to nk15 database # connecting to nk15 database
conn = pg.connect(database=nk15db, user=nk15user) conn = pg.connect(database=nk15db, user=nk15user)
cur = conn.cursor(cursor_factory=pge.DictCursor) cur = conn.cursor(cursor_factory=pge.DictCursor)
self.import_special_account(cur) self.import_account(cur,kwargs["chunk"])
self.import_account(cur, kwargs["chunk"]) # Alias Management
# Alias Management
if kwargs["alias"]: if kwargs["alias"]:
self.import_alias(cur, kwargs["chunk"]) self.import_alias(cur,kwargs["chunk"])
# save to disk #save to disk
if kwargs["save"]: if kwargs["save"]:
filename = kwargs["save"] filename = kwargs["save"]
with open(filename, 'w') as fp: with open(filename, 'w') as fp:

View File

@ -1,105 +0,0 @@
#!/usr/bin/env python3
import psycopg2 as pg
import psycopg2.extras as pge
import datetime
import copy
from django.utils.timezone import make_aware
from django.db import transaction
from activity.models import ActivityType, Activity, Guest, Entry
from member.models import Club
from note.models import Note
from ._import_utils import ImportCommand, BulkCreateManager, timed
MAP_ACTIVITY = dict()
class Command(ImportCommand):
"""
Import command for Activities Base Data (Comptes, and Aliases)
"""
@timed
@transaction.atomic
def import_activities(self, cur, chunk_size):
cur.execute("SELECT * FROM activites ORDER by id")
n = cur.rowcount
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
activity_type_id = ActivityType.objects.get(name="Pot").pk # Need to be fixed manually
kfet = Club.objects.get(name="Kfet")
pk_activity = 1
for idx, row in enumerate(cur):
self.update_line(idx, n, row["titre"])
note = self.MAP_IDBDE[row["responsable"]]
if note == 6244:
# Licorne magique ne doit pas utiliser son compte club pour proposer des activités
note = Note.objects.get(pk=self.MAP_IDBDE[6524])
note = note.user_id
organizer = Club.objects.filter(name=row["signature"])
if organizer.exists():
# Try to find the club that organizes the activity.
# If not found, assume it's Kfet (fix manually)
organizer = organizer.get()
else:
organizer = kfet
obj_dict = {
"pk": pk_activity,
"name": row["titre"],
"description": row["description"],
"activity_type_id": activity_type_id, # By default Pot
"creater_id": note,
"organizer_id": organizer.pk,
"attendees_club_id": kfet.pk, # Maybe fix manually
"date_start": make_aware(row["debut"]),
"date_end": make_aware(row["fin"]),
"valid": row["validepar"] is not None,
"open": row["ouvert"], # Should always be False
}
# WARNING: Fields lieu, liste, listeimprimee are missing
MAP_ACTIVITY[row["id"]] = pk_activity
pk_activity +=1
bulk_mgr.add(Activity(**obj_dict))
bulk_mgr.done()
@timed
@transaction.atomic
def import_activities_entries(self, cur):
bulk_mgr = BulkCreateManager()
cur.execute("SELECT * FROM invites ORDER by id")
n = cur.rowcount
for idx, row in enumerate(cur):
self.update_line(idx, n, f"{row['nom']} {row['prenom']}")
obj_dict = {
"pk": row["id"],
"activity_id": MAP_ACTIVITY[row["activite"]],
"last_name": row["nom"],
"first_name": row["prenom"],
"inviter_id": self.MAP_IDBDE[row["responsable"]],
}
bulk_mgr.add(Guest(**obj_dict))
bulk_mgr.done()
cur.execute("SELECT * FROM entree_activites ORDER by id")
n = cur.rowcount
for idx, row in enumerate(cur):
self.update_line(idx, n, f"{row['idbde']} {row['responsable']}")
obj_dict = {
"activity_id": MAP_ACTIVITY[row["activite"]],
"time": make_aware(row["heure_entree"]),
"note_id": self.MAP_IDBDE[row["responsable"]] if row['est_invite'] else row["idbde"],
"guest_id": row["idbde"] if row['est_invite'] else None,
}
bulk_mgr.add(Entry(**obj_dict))
bulk_mgr.done()
def handle(self, *args, **kwargs):
# default args, provided by ImportCommand.
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
# connecting to nk15 database
conn = pg.connect(database=nk15db, user=nk15user)
cur = conn.cursor(cursor_factory=pge.DictCursor)
if kwargs["map"]:
self.load_map(kwargs["map"])
self.import_activities(cur, kwargs["chunk"])
self.import_activities_entries(cur)

View File

@ -1,8 +1,458 @@
#!/usr/env/bin python3 #!/usr/env/bin python3
import subprocess
import json
import datetime
import re
import pytz
import psycopg2 as pg
import psycopg2.extras as pge
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.core.management import call_command from django.core.management import call_command
from django.db import transaction
from django.core.exceptions import ValidationError
from django.utils.timezone import make_aware
from django.db import IntegrityError
from django.contrib.auth.models import User
from activity.models import ActivityType, Activity, Guest, Entry, GuestTransaction
from note.models import Note
from note.models import Alias
from note.models import (
TemplateCategory,
TransactionTemplate,
Transaction,
RecurrentTransaction,
MembershipTransaction,
SpecialTransaction,
)
from member.models import Club, Membership
from treasury.models import RemittanceType, Remittance, SpecialTransactionProxy
"""
Script d'import de la nk15:
TODO: import transactions
TODO: import adhesion
TODO: import activite
TODO: import ...
"""
M_DURATION = 396
M_START = datetime.date(2019, 8, 31)
M_END = datetime.date(2020, 9, 30)
MAP_IDBDE = {
-4: 2, # Carte Bancaire
-3: 4, # Virement
-2: 1, # Especes
-1: 3, # Chèque
0: 5, # BDE
}
MAP_IDACTIVITY = {}
MAP_NAMEACTIVITY = {}
MAP_NAMEGUEST = {}
MAP_IDSPECIALTRANSACTION = {}
def update_line(n, total, content):
n = str(n)
total = str(total)
n.rjust(len(total))
print(f"\r ({n}/{total}) {content:10.10}", end="")
@transaction.atomic
def import_comptes(cur):
cur.execute("SELECT * FROM comptes WHERE idbde > 0 ORDER BY idbde;")
pkclub = 3
n = cur.rowcount
for idx, row in enumerate(cur):
update_line(idx, n, row["pseudo"])
if row["type"] == "personne":
# sanitize password
if row["passwd"] != "*|*" and not row["deleted"]:
passwd_nk15 = "$".join(["custom_nk15", "1", row["passwd"]])
else:
passwd_nk15 = ''
try:
obj_dict = {
"username": row["pseudo"],
"password": passwd_nk15,
"first_name": row["nom"],
"last_name": row["prenom"],
"email": row["mail"],
"is_active": True, # temporary
}
user = User.objects.create(**obj_dict)
profile = user.profile
profile.phone_number = row['tel']
profile.address = row['adresse']
profile.paid = row['normalien']
profile.registration_valid = True
profile.email_confirmed = True
user.save()
profile.save()
# sanitize duplicate aliases (nk12)
except ValidationError as e:
if e.code == 'same_alias':
user.username = row["pseudo"] + str(row["idbde"])
user.save()
else:
raise e
# profile and note created via signal.
note = user.note
date = row.get("last_negatif", None)
if date is not None:
note.last_negative = make_aware(date)
note.balance = row["solde"]
note.save()
else: # club
obj_dict = {
"pk": pkclub,
"name": row["pseudo"],
"email": row["mail"],
"membership_duration": M_DURATION,
"membership_start": M_START,
"membership_end": M_END,
"membership_fee_paid": 0,
"membership_fee_unpaid": 0,
}
club, c = Club.objects.get_or_create(**obj_dict)
pkclub += 1
note = club.note
note.balance = row["solde"]
club.save()
note.save()
MAP_IDBDE[row["idbde"]] = note.note_ptr_id
@transaction.atomic
def import_boutons(cur):
cur.execute("SELECT * FROM boutons;")
n = cur.rowcount
for idx, row in enumerate(cur):
update_line(idx, n, row["label"])
cat, created = TemplateCategory.objects.get_or_create(name=row["categorie"])
if created:
cat.save()
obj_dict = {
"pk": row["id"],
"name": row["label"],
"amount": row["montant"],
"destination_id": MAP_IDBDE[row["destinataire"]],
"category": cat,
"display": row["affiche"],
"description": row["description"],
}
try:
with transaction.atomic(): # required for error management
button = TransactionTemplate.objects.create(**obj_dict)
except IntegrityError as e:
# button with the same name is not possible in NK20.
if "unique" in e.args[0]:
qs = Club.objects.filter(note__note_ptr=MAP_IDBDE[row["destinataire"]]).values('name')
note_name = qs[0]["name"]
# rename button name
obj_dict["name"] = f"{obj_dict_name['name']} {note_name}"
button = TransactionTemplate.objects.create(**obj_dict)
else:
raise e
button.save()
@transaction.atomic
def import_transaction(cur):
idmin = 58770
bde = Club.objects.get(name="BDE")
kfet = Club.objects.get(name="Kfet")
cur.execute(
"SELECT t.date AS transac_date, t.type, t.emetteur,\
t.destinataire,t.quantite, t.montant, t.description,\
t.valide, t.cantinvalidate, t.categorie, \
a.idbde, a.annee, a.wei, a.date AS adh_date, a.section\
FROM transactions AS t \
LEFT JOIN adhesions AS a ON t.id = a.idtransaction \
WHERE t.id> {} \
ORDER BY t.id;".format(idmin)
)
n = cur.rowcount
for idx, row in enumerate(cur):
update_line(idx, n, row["description"])
try:
date = make_aware(row["transac_date"])
except (pytz.NonExistentTimeError, pytz.AmbiguousTimeError):
date = make_aware(row["transac_date"] + datetime.timedelta(hours=1))
# standart transaction object
obj_dict = {
# "pk": row["id"],
"destination_id": MAP_IDBDE[row["destinataire"]],
"source_id": MAP_IDBDE[row["emetteur"]],
"created_at": date,
"amount": row["montant"],
"quantity": row["quantite"],
"reason": row["description"],
"valid": row["valide"],
}
ttype = row["type"]
if ttype == "don" or ttype == "transfert":
Transaction.objects.create(**obj_dict)
elif ttype == "bouton":
cat_name = row["categorie"]
if cat_name is None:
cat_name = 'None'
cat, created = TemplateCategory.objects.get_or_create(name=cat_name)
if created:
cat.save()
obj_dict["category"] = cat
RecurrentTransaction.objects.create(**obj_dict)
elif ttype == "crédit" or ttype == "retrait":
field_id = "source_id" if ttype == "crédit" else "destination_id"
if "espèce" in row["description"]:
obj_dict[field_id] = 1
elif "carte" in row["description"]:
obj_dict[field_id] = 2
elif "cheques" in row["description"]:
obj_dict[field_id] = 3
elif "virement" in row["description"]:
obj_dict[field_id] = 4
pk = max(row["destinataire"], row["emetteur"])
actor = Note.objects.get(id=MAP_IDBDE[pk])
# custom fields of SpecialTransaction
if actor.__class__.__name__ == "NoteUser":
obj_dict["first_name"] = actor.user.first_name
obj_dict["last_name"] = actor.user.last_name
elif actor.__class__.__name__ == "NoteClub":
obj_dict["first_name"] = actor.club.name
obj_dict["last_name"] = actor.club.name
else:
raise Exception("Badly formatted Special Transaction You should'nt be there.")
tr = SpecialTransaction.objects.create(**obj_dict)
if "cheques" in row["description"]:
MAP_IDSPECIALTRANSACTION[row["id"]] = tr
elif ttype == "adhésion":
montant = row["montant"]
# Create Double membership to Kfet and Bde
# sometimes montant = 0, fees are modified accordingly.
bde_dict = {
"user": MAP_IDBDE[row["idbde"]],
"club": bde,
"date_start": row["date"].date(), # Only date, not time
"fee": min(500, montant)
}
kfet_dict = {
"user": MAP_IDBDE[row["idbde"]],
"club": kfet,
"date_start": row["date"].date(), # Only date, not time
"fee": max(montant - 500, 0),
}
if row["valide"]:
with transaction.atomic():
# membership save triggers MembershipTransaction creation
bde_membership = Membership.objects.get_or_create(**bde_dict)
kfet_membership = Membership.objects.get_or_create(**kfet_dict)
bde_membership.transaction.created_at = row["transac_date"]
bde_membership.transaction.description = row["description"]
bde_membership.transaction.save()
kfet_membership.transaction.created_at = row["transac_date"]
kfet_membership.transaction.description = row["description"] + "(Kfet)"
kfet_membership.transaction.save()
else:
# don't create membership
MembershipTransaction.objects.create(**obj_dict)
elif ttype == "invitation":
m = re.search(r"Invitation (.*?) \((.*?)\)", row["description"])
if m is None:
raise IntegrityError(f"Invitation is not well formated: {row['description']} (must be 'Invitation ACTIVITY_NAME (NAME)')")
activity_name = m.group(1)
guest_name = m.group(2)
if activity_name not in MAP_NAMEACTIVITY:
raise IntegrityError(f"Activity {activity_name} is not found")
activity = MAP_NAMEACTIVITY[activity_name]
if guest_name not in MAP_NAMEGUEST:
raise IntegrityError(f"Guest {guest_name} is not found")
guest = None
for g in MAP_NAMEGUEST[guest_name]:
if g.activity.pk == activity.pk:
guest = g
break
if guest is None:
raise IntegrityError("Guest {guest_name} didn't go to the activity {activity_name}")
obj_dict["guest"] = guest
GuestTransaction.objects.get_or_create(**obj_dict)
else:
print("other type not supported yet:", ttype)
@transaction.atomic
def import_aliases(cur):
cur.execute("SELECT * FROM aliases ORDER by id")
n = cur.rowcount
for idx, row in enumerate(cur):
update_line(idx, n, row["alias"])
alias_name = row["alias"]
alias_name_good = (alias_name[:252] + '...') if len(alias_name) > 255 else alias_name
obj_dict = {
"note_id": MAP_IDBDE[row["idbde"]],
"name": alias_name_good,
"normalized_name": Alias.normalize(alias_name_good),
}
try:
with transaction.atomic():
alias, created = Alias.objects.get_or_create(**obj_dict)
except IntegrityError as e:
if "unique" in e.args[0]:
continue
else:
raise e
alias.save()
@transaction.atomic
def import_activities(cur):
cur.execute("SELECT * FROM activites ORDER by id")
n = cur.rowcount
activity_type = ActivityType.objects.get(name="Pot") # Need to be fixed manually
kfet = Club.objects.get(name="Kfet")
for idx, row in enumerate(cur):
update_line(idx, n, row["alias"])
organizer = Club.objects.filter(name=row["signature"])
if organizer.exists():
# Try to find the club that organizes the activity. If not founded, assume it's Kfet (fix manually)
organizer = organizer.get()
else:
organizer = kfet
obj_dict = {
"name": row["titre"],
"description": row["description"],
"activity_type": activity_type, # By default Pot
"creater": MAP_IDBDE[row["responsable"]],
"organizer": organizer,
"attendees_club": kfet, # Maybe fix manually
"date_start": row["debut"],
"date_end": row["fin"],
"valid": row["validepar"] is not None,
"open": row["open"], # Should be always False
}
# WARNING: Fields lieu, liste, listeimprimee are missing
try:
with transaction.atomic():
activity = Activity.objects.get_or_create(**obj_dict)[0]
MAP_IDACTIVITY[row["id"]] = activity
MAP_NAMEACTIVITY[activity.name] = activity
except IntegrityError as e:
raise e
@transaction.atomic
def import_activity_entries(cur):
map_idguests = {}
cur.execute("SELECT * FROM invites ORDER by id")
n = cur.rowcount
for idx, row in enumerate(cur):
update_line(idx, n, row["nom"] + " " + row["prenom"])
obj_dict = {
"activity": MAP_IDACTIVITY[row["activity"]],
"last_name": row["nom"],
"first_name": row["prenom"],
"inviter": MAP_IDBDE[row["responsable"]],
}
try:
with transaction.atomic():
guest = Guest.objects.get_or_create(**obj_dict)[0]
map_idguests.setdefault(row["responsable"], [])
map_idguests[row["id"]].append(guest)
guest_name = guest.first_name + " " + guest.last_name
MAP_NAMEGUEST.setdefault(guest_name, [])
MAP_NAMEGUEST[guest_name].append(guest)
except IntegrityError as e:
raise e
cur.execute("SELECT * FROM entree_activites ORDER by id")
n = cur.rowcount
for idx, row in enumerate(cur):
update_line(idx, n, row["nom"] + " " + row["prenom"])
activity = MAP_IDACTIVITY[row["activity"]]
guest = None
if row["est_invite"]:
for g in map_idguests[row["id"]]:
if g.activity.pk == activity.pk:
guest = g
break
if not guest:
raise IntegrityError("Guest was not found: " + str(row))
obj_dict = {
"activity": activity,
"time": row["heure_entree"],
"note": guest.inviter if guest else MAP_IDBDE[row["idbde"]],
"guest": guest,
}
try:
with transaction.atomic():
Entry.objects.get_or_create(**obj_dict)
except IntegrityError as e:
raise e
@transaction.atomic
def import_remittances(cur):
cur.execute("SELECT * FROM remises ORDER by id")
map_idremittance = {}
n = cur.rowcount
check_type = RemittanceType.objects.get(note__name="Chèque")
for idx, row in enumerate(cur):
update_line(idx, n, row["date"])
obj_dict = {
"date": row["date"][10:],
"remittance_type": check_type,
"comment": row["commentaire"],
"closed": row["close"],
}
try:
with transaction.atomic():
remittance = Remittance.objects.get_or_create(**obj_dict)
map_idremittance[row["id"]] = remittance
except IntegrityError as e:
raise e
print("remittances are imported")
print("imported checks")
cur.execute("SELECT * FROM cheques ORDER by id")
n = cur.rowcount
for idx, row in enumerate(cur):
update_line(idx, n, row["date"])
obj_dict = {
"date": row["date"][10:],
"remittance_type": check_type,
"comment": row["commentaire"],
"closed": row["close"],
}
tr = MAP_IDSPECIALTRANSACTION[row["idtransaction"]]
proxy = SpecialTransactionProxy.objects.get_or_create(transaction=tr)
proxy.remittance = map_idremittance[row["idremise"]]
try:
with transaction.atomic():
proxy.save()
except IntegrityError as e:
raise e
class Command(BaseCommand): class Command(BaseCommand):
""" """
@ -10,9 +460,60 @@ class Command(BaseCommand):
Need to be run by a user with a registered role in postgres for the database nk15. Need to be run by a user with a registered role in postgres for the database nk15.
""" """
def print_success(self, to_print):
return self.stdout.write(self.style.SUCCESS(to_print))
def add_arguments(self, parser):
parser.add_argument('-c', '--comptes', action='store_true', help="import accounts")
parser.add_argument('-b', '--boutons', action='store_true', help="import boutons")
parser.add_argument('-t', '--transactions', action='store_true', help="import transaction")
parser.add_argument('-al', '--aliases', action='store_true', help="import aliases")
parser.add_argument('-ac', '--activities', action='store_true', help="import activities")
parser.add_argument('-r', '--remittances', action='store_true', help="import check remittances")
parser.add_argument('-s', '--save', action='store', help="save mapping of idbde")
parser.add_argument('-m', '--map', action='store', help="import mapping of idbde")
parser.add_argument('-d', '--nk15db', action='store', default='nk15', help='NK15 database name')
parser.add_argument('-u', '--nk15user', action='store', default='nk15_user', help='NK15 database owner')
def handle(self, *args, **kwargs): def handle(self, *args, **kwargs):
subprocess.call("./apps/scripts/shell/tabularasa") global MAP_IDBDE
call_command('import_account', alias=True, chunk=1000, save = "map.json") nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
call_command('import_activities', chunk=100, map="map.json") # connecting to nk15 database
call_command('import_transaction', buttons=True, map="map.json") conn = pg.connect(database=nk15db, user=nk15user)
# cur = conn.cursor(cursor_factory=pge.DictCursor)
if kwargs["comptes"]:
# reset database.
call_command("migrate")
call_command("loaddata", "initial")
self.print_success("reset nk20 database\n")
import_comptes(cur)
self.print_success("comptes table imported")
elif kwargs["map"]:
filename = kwargs["map"]
with open(filename, 'r') as fp:
MAP_IDBDE = json.load(fp)
MAP_IDBDE = {int(k): int(v) for k, v in MAP_IDBDE.items()}
if kwargs["save"]:
filename = kwargs["save"]
with open(filename, 'w') as fp:
json.dump(MAP_IDBDE, fp, sort_keys=True, indent=2)
# /!\ need a prober MAP_IDBDE
if kwargs["boutons"]:
import_boutons(cur)
self.print_success("boutons table imported\n")
if kwargs["activities"]:
import_activities(cur)
self.print_success("activities imported\n")
import_activity_entries(cur)
self.print_success("activity entries imported\n")
if kwargs["aliases"]:
import_aliases(cur)
self.print_success("aliases imported\n")
if kwargs["transactions"]:
import_transaction(cur)
self.print_success("transaction imported\n")
if kwargs["remittances"]:
import_remittances(cur)
self.print_success("remittances imported\n")

View File

@ -1,215 +0,0 @@
#!/usr/bin/env python3
import re
import psycopg2 as pg
import psycopg2.extras as pge
import pytz
import datetime
import copy
from django.utils.timezone import make_aware
from django.db import transaction
from note.models import (TemplateCategory,
TransactionTemplate,
Transaction,
RecurrentTransaction,
SpecialTransaction
)
from note.models import Note
from activity.models import Guest, GuestTransaction
from member.models import Membership, MembershipTransaction
from ._import_utils import ImportCommand, BulkCreateManager, timed
BDE_PK = 1
KFET_PK = 2
NOTE_SPECIAL_CODE = {
"espèce": 1,
"carte": 2,
"chèque": 3,
"virement": 4,
}
def get_date_end(date_start):
date_end = copy.deepcopy(date_start)
if date_start > 8:
date_end.year = date_start + 1
date_end.month = 9
date_end.day = 30
return date_end
class Command(ImportCommand):
"""
Import command for People base data (Comptes, and Aliases)
"""
def add_arguments(self, parser):
parser.add_argument('-b', '--buttons', action='store_true', help="import buttons")
parser.add_argument('-t', '--transactions', action='store', default=0, help="start id for transaction import")
@timed
@transaction.atomic
def import_buttons(self, cur, chunk_size):
categories = dict()
buttons = dict()
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
cur.execute("SELECT * FROM boutons;")
n = cur.rowcount
pk_category = 1
for idx, row in enumerate(cur):
self.update_line(idx, n, row["label"])
if row["categorie"] not in categories:
bulk_mgr.add(TemplateCategory(pk=pk_category, name=row["categorie"]))
pk_category += 1
categories[row["categorie"]] = pk_category
obj_dict = {
"pk": row["id"],
"name": row["label"],
"amount": row["montant"],
"destination_id": self.MAP_IDBDE[row["destinataire"]],
"category_id": categories[row["categorie"]],
"display": row["affiche"],
"description": row["description"],
}
if row["label"] in buttons:
obj_dict["label"] = f"{obj_dict['label']}_{obj_dict['destination_id']}"
bulk_mgr.add(TransactionTemplate(**obj_dict))
buttons[obj_dict["label"]] = row["id"]
bulk_mgr.done()
return buttons, categories
@timed
@transaction.atomic
def import_transaction(self, cur, chunk_size, idmin, buttons, categories):
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
cur.execute(
f"SELECT t.date AS transac_date, t.type, t.emetteur,\
t.destinataire,t.quantite, t.montant, t.description,\
t.valide, t.cantinvalidate, t.categorie, \
a.idbde, a.annee, a.wei, a.date AS adh_date, a.section\
FROM transactions AS t \
LEFT JOIN adhesions AS a ON t.id = a.idtransaction \
WHERE t.id >= {idmin} \
ORDER BY t.id;")
n = cur.rowcount
pk_membership = 1
pk_transaction = 1
for idx, row in enumerate(cur):
self.update_line(idx, n, row["description"])
try:
date = make_aware(row["transac_date"])
except (pytz.NonExistentTimeError, pytz.AmbiguousTimeError):
date = make_aware(row["transac_date"] + datetime.timedelta(hours=1))
# standart transaction object
obj_dict = {
"pk": pk_transaction,
"destination_id": self.MAP_IDBDE[row["destinataire"]],
"source_id": self.MAP_IDBDE[row["emetteur"]],
"created_at": date,
"amount": row["montant"],
"quantity": row["quantite"],
"reason": row["description"],
"valid": row["valide"],
}
# for child transaction Models
child_dict = {"pk": obj_dict["pk"]}
ttype = row["type"]
if ttype == "don" or ttype == "transfert":
child_transaction = None
elif ttype == "bouton":
child_transaction = RecurrentTransaction
child_dict["category_id"] = categories.get(row["categorie"], categories["Autre"])
child_dict["template_id"] = buttons[row["description"]]
elif ttype == "crédit" or ttype == "retrait":
child_transaction = SpecialTransaction
# Some transaction uses BDE (idbde=0) as source or destination,
# lets fix that.
field_id = "source_id" if ttype == "crédit" else "destination_id"
if "espèce" in row["description"]:
obj_dict[field_id] = 1
elif "carte" in row["description"]:
obj_dict[field_id] = 2
elif "cheques" in row["description"]:
obj_dict[field_id] = 3
elif "virement" in row["description"]:
obj_dict[field_id] = 4
# humans and clubs have always the biggest id
actor_pk = max(row["destinataire"], row["emetteur"])
actor = Note.objects.get(id=self.MAP_IDBDE[actor_pk])
# custom fields of SpecialTransaction
if actor.__class__.__name__ == "NoteUser":
child_dict["first_name"] = actor.user.first_name
child_dict["last_name"] = actor.user.last_name
else:
child_dict["first_name"] = actor.club.name
child_dict["last_name"] = actor.club.name
elif ttype == "adhésion" and row["valide"]:
child_transaction = MembershipTransaction
# Kfet membership
montant = row["montant"]
obj_dict["amount"] = min(500, montant)
child_dict["membership_id"] = pk_membership
kfet_dict = {
"pk": pk_membership,
"user": self.MAP_IDBDE[row["idbde"]],
"club": KFET_PK,
"date_start": row["date"].date(), # Only date, not time
"date_end": get_date_end(row["date"].date()),
"fee": min(500, montant)
}
pk_membership += 1
pk_transaction += 1
# BDE Membership
obj_dict2 = obj_dict.copy()
child_dict2 = dict()
obj_dict2["pk"] = pk_transaction
obj_dict2["amount"] = max(montant - 500, 0)
child_dict2["pk"] = pk_transaction
bde_dict = {
"pk": pk_membership,
"user": self.MAP_IDBDE[row["idbde"]],
"club": BDE_PK,
"date_start": row["date"].date(), # Only date, not time
"date_end": get_date_end(row["date"].date()),
"fee": max(montant - 500, 0),
}
pk_membership += 1
# BDE membership Transaction is inserted before the Kfet membershipTransaction
bulk_mgr.add(
Transaction(**obj_dict2),
child_transaction(**child_dict2),
Membership(**bde_dict),
Membership(**kfet_dict),
)
elif ttype == "invitation":
child_transaction = GuestTransaction
m = re.search(r"Invitation (.*?)(?:\s\()(.*?)\s(.*?)\)", row["description"])
if m:
first_name, last_name = m.groups(1), m.groups(2)
guest_id = Guest.object.filter(first_name__iexact=first_name,
last_name__iexact=last_name).first().pk
child_dict["guest_id"] = guest_id
else:
raise(f"Guest not Found {row['id']} {first_name}, last_name" )
bulk_mgr.add(Transaction(**obj_dict),
child_transaction(**child_dict))
pk_transaction += 1
def handle(self, *args, **kwargs):
# default args, provided by ImportCommand.
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
# connecting to nk15 database
conn = pg.connect(database=nk15db, user=nk15user)
cur = conn.cursor(cursor_factory=pge.DictCursor)
if kwargs["map"]:
self.load(kwargs["map"])
self.import_buttons(cur, kwargs["chunk"])
self.import_transaction(cur, kwargs["chunk"])