Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
ee54fca89e
|
@ -0,0 +1,113 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import time
|
||||
from collections import defaultdict
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.apps import apps
|
||||
from django.db import transaction
|
||||
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
|
||||
def timed(method):
|
||||
""""
|
||||
A simple decorator to measure time elapsed in class function (hence the args[0])
|
||||
"""
|
||||
def _timed(*args, **kw):
|
||||
ts = time.time()
|
||||
result = method(*args, **kw)
|
||||
te = time.time()
|
||||
args[0].print_success(f"\n {method.__name__} executed ({te-ts:.2f}s)")
|
||||
return result
|
||||
|
||||
return _timed
|
||||
|
||||
|
||||
class ImportCommand(BaseCommand):
|
||||
"""
|
||||
Generic command for import of NK15 database
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(args, kwargs)
|
||||
self.MAP_IDBDE = dict()
|
||||
|
||||
def print_success(self, to_print):
|
||||
return self.stdout.write(self.style.SUCCESS(to_print))
|
||||
|
||||
def print_error(self, to_print):
|
||||
return self.stdout.write(self.style.ERROR(to_print))
|
||||
|
||||
def update_line(self, n, total, content):
|
||||
n = str(n)
|
||||
total = str(total)
|
||||
n.rjust(len(total))
|
||||
print(f"\r ({n}/{total}) {content:10.10}", end="")
|
||||
|
||||
def create_parser(self, prog_name, subcommand, **kwargs):
|
||||
parser = super().create_parser(prog_name, subcommand, **kwargs)
|
||||
parser.add_argument('--nk15db', action='store', default='nk15', help='NK15 database name')
|
||||
parser.add_argument('--nk15user', action='store', default='nk15_user', help='NK15 database owner')
|
||||
parser.add_argument('-s', '--save', action='store', help="save mapping of idbde")
|
||||
parser.add_argument('-m', '--map', action='store', help="import mapping of idbde")
|
||||
parser.add_argument('-c', '--chunk', type=int, default=100, help="chunk size for bulk_create")
|
||||
return parser
|
||||
|
||||
def save_map(self, filename):
|
||||
with open(filename, 'w') as fp:
|
||||
json.dump(self.MAP_IDBDE, fp, sort_keys=True, indent=2)
|
||||
|
||||
def load_map(self, filename):
|
||||
with open(filename, 'r') as fp:
|
||||
self.MAP_IDBDE = json.load(fp, object_hook=lambda d: {int(k): int(v) for k, v in d.items()})
|
||||
|
||||
|
||||
class BulkCreateManager(object):
|
||||
"""
|
||||
This helper class keeps track of ORM objects to be created for multiple
|
||||
model classes, and automatically creates those objects with `bulk_create`
|
||||
when the number of objects accumulated for a given model class exceeds
|
||||
`chunk_size`.
|
||||
Upon completion of the loop that's `add()`ing objects, the developer must
|
||||
call `done()` to ensure the final set of objects is created for all models.
|
||||
"""
|
||||
|
||||
def __init__(self, chunk_size=100):
|
||||
self._create_queues = defaultdict(list)
|
||||
self.chunk_size = chunk_size
|
||||
|
||||
def _commit(self, model_class):
|
||||
model_key = model_class._meta.label
|
||||
# check for mutli-table inheritance it happens
|
||||
# if model_class is a grand-child of PolymorphicModel
|
||||
if model_class.__base__.__base__ is PolymorphicModel:
|
||||
self._commit(model_class.__base__)
|
||||
with transaction.atomic():
|
||||
for obj in self._create_queues[model_key]:
|
||||
obj.save_base(raw=True)
|
||||
else:
|
||||
model_class.objects.bulk_create(self._create_queues[model_key])
|
||||
self._create_queues[model_key] = []
|
||||
|
||||
def add(self, *args):
|
||||
"""
|
||||
Add an object to the queue to be created, and call bulk_create if we
|
||||
have enough objs.
|
||||
"""
|
||||
for obj in args:
|
||||
model_class = type(obj)
|
||||
model_key = model_class._meta.label
|
||||
self._create_queues[model_key].append(obj)
|
||||
if len(self._create_queues[model_key]) >= self.chunk_size:
|
||||
self._commit(model_class)
|
||||
|
||||
def done(self):
|
||||
"""
|
||||
Always call this upon completion to make sure the final partial chunk
|
||||
is saved.
|
||||
"""
|
||||
for model_name, objs in self._create_queues.items():
|
||||
if len(objs) > 0:
|
||||
self._commit(apps.get_model(model_name))
|
|
@ -1,89 +0,0 @@
|
|||
# Copyright (C) 2018-2020 by BDE ENS Paris-Saclay
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from django.core.management import BaseCommand, CommandError
|
||||
from django.db.models import Q
|
||||
from django.db.models.functions import Lower
|
||||
|
||||
from wei.models import WEIClub, Bus, BusTeam, WEIMembership
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Export WEI registrations."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--bus', '-b', choices=[bus.name for bus in Bus.objects.all()], type=str, default=None,
|
||||
help='Filter by bus')
|
||||
parser.add_argument('--team', '-t', choices=[team.name for team in BusTeam.objects.all()], type=str,
|
||||
default=None, help='Filter by team. Type "none" if you want to select the members '
|
||||
+ 'that are not in a team.')
|
||||
parser.add_argument('--year', '-y', type=int, default=None,
|
||||
help='Select the year of the concerned WEI. Default: last year')
|
||||
parser.add_argument('--sep', type=str, default='|',
|
||||
help='Select the CSV separator.')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
year = options["year"]
|
||||
if year:
|
||||
try:
|
||||
wei = WEIClub.objects.get(year=year)
|
||||
except WEIClub.DoesNotExist:
|
||||
raise CommandError("The WEI of year {:d} does not exist.".format(year,))
|
||||
else:
|
||||
wei = WEIClub.objects.order_by('-year').first()
|
||||
|
||||
bus = options["bus"]
|
||||
if bus:
|
||||
try:
|
||||
bus = Bus.objects.filter(wei=wei).get(name=bus)
|
||||
except Bus.DoesNotExist:
|
||||
raise CommandError("The bus {} does not exist or does not belong to the WEI {}.".format(bus, wei.name,))
|
||||
|
||||
team = options["team"]
|
||||
if team:
|
||||
if team.lower() == "none":
|
||||
team = 0
|
||||
else:
|
||||
try:
|
||||
team = BusTeam.objects.filter(Q(bus=bus) | Q(wei=wei)).get(name=team)
|
||||
bus = team.bus
|
||||
except BusTeam.DoesNotExist:
|
||||
raise CommandError("The bus {} does not exist or does not belong to the bus {} neither the wei {}."
|
||||
.format(team, bus.name if bus else "<None>", wei.name,))
|
||||
|
||||
qs = WEIMembership.objects
|
||||
qs = qs.filter(club=wei).order_by(
|
||||
Lower('bus__name'),
|
||||
Lower('team__name'),
|
||||
'user__profile__promotion',
|
||||
Lower('user__last_name'),
|
||||
Lower('user__first_name'),
|
||||
).distinct()
|
||||
|
||||
if bus:
|
||||
qs = qs.filter(bus=bus)
|
||||
|
||||
if team is not None:
|
||||
qs = qs.filter(team=team if team else None)
|
||||
|
||||
sep = options["sep"]
|
||||
|
||||
self.stdout.write("Nom|Prénom|Date de naissance|Genre|Département|Année|Section|Bus|Équipe|Rôles"
|
||||
.replace(sep, sep))
|
||||
|
||||
for membership in qs.all():
|
||||
user = membership.user
|
||||
registration = membership.registration
|
||||
bus = membership.bus
|
||||
team = membership.team
|
||||
s = user.last_name
|
||||
s += sep + user.first_name
|
||||
s += sep + str(registration.birth_date)
|
||||
s += sep + registration.get_gender_display()
|
||||
s += sep + user.profile.get_department_display()
|
||||
s += sep + str(user.profile.ens_year) + "A"
|
||||
s += sep + user.profile.section_generated
|
||||
s += sep + bus.name
|
||||
s += sep + (team.name if team else "--")
|
||||
s += sep + ", ".join(role.name for role in membership.roles.filter(~Q(name="Adhérent WEI")).all())
|
||||
self.stdout.write(s)
|
|
@ -1,52 +0,0 @@
|
|||
# Copyright (C) 2018-2020 by BDE ENS Paris-Saclay
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from datetime import date
|
||||
|
||||
from django.core.management import BaseCommand
|
||||
from django.db.models import Q
|
||||
from member.models import Membership, Club
|
||||
from wei.models import WEIClub
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Get mailing list registrations from the last wei. " \
|
||||
"Usage: manage.py extract_ml_registrations -t {events,art,sport}. " \
|
||||
"You can write this into a file with a pipe, then paste the document into your mail manager."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--type', '-t', choices=["members", "clubs", "events", "art", "sport"], default="members",
|
||||
help='Select the type of the mailing list (default members)')
|
||||
parser.add_argument('--year', '-y', type=int, default=None,
|
||||
help='Select the year of the concerned WEI. Default: last year')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if options["type"] == "members":
|
||||
for membership in Membership.objects.filter(
|
||||
club__name="BDE",
|
||||
date_start__lte=date.today(),
|
||||
date_end__gte=date.today(),
|
||||
).all():
|
||||
self.stdout.write(membership.user.email)
|
||||
return
|
||||
|
||||
if options["type"] == "clubs":
|
||||
for club in Club.objects.all():
|
||||
self.stdout.write(club.email)
|
||||
return
|
||||
|
||||
if options["year"] is None:
|
||||
wei = WEIClub.objects.order_by('-year').first()
|
||||
else:
|
||||
wei = WEIClub.objects.filter(year=options["year"])
|
||||
if wei.exists():
|
||||
wei = wei.get()
|
||||
else:
|
||||
wei = WEIClub.objects.order_by('-year').first()
|
||||
self.stderr.write(self.style.WARNING("Warning: there was no WEI in year " + str(options["year"]) + ". "
|
||||
+ "Assuming the last WEI (year " + str(wei.year) + ")"))
|
||||
q = Q(ml_events_registration=True) if options["type"] == "events" else Q(ml_art_registration=True)\
|
||||
if options["type"] == "art" else Q(ml_sport_registration=True)
|
||||
registrations = wei.users.filter(q)
|
||||
for registration in registrations.all():
|
||||
self.stdout.write(registration.user.email)
|
|
@ -0,0 +1,214 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import psycopg2 as pg
|
||||
import psycopg2.extras as pge
|
||||
import datetime
|
||||
import json
|
||||
|
||||
from django.utils.timezone import make_aware, now
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from note.models import Note, NoteUser, NoteClub
|
||||
from note.models import Alias
|
||||
from member.models import Club, Profile
|
||||
|
||||
from ._import_utils import ImportCommand, BulkCreateManager, timed
|
||||
|
||||
M_DURATION = 396
|
||||
M_START = datetime.date(2019, 8, 31)
|
||||
M_END = datetime.date(2020, 9, 30)
|
||||
|
||||
MAP_IDBDE = {
|
||||
-4: 2, # Carte Bancaire
|
||||
-3: 4, # Virement
|
||||
-2: 1, # Especes
|
||||
-1: 3, # Chèque
|
||||
0: 5, # BDE
|
||||
}
|
||||
|
||||
# some Aliases have been created in the fixtures
|
||||
ALIAS_SET = {a[0] for a in Alias.objects.all().values_list("normalized_name")}
|
||||
|
||||
note_user_type = ContentType.objects.get(app_label="note", model="noteuser")
|
||||
note_club_type = ContentType.objects.get(app_label="note", model="noteclub")
|
||||
|
||||
|
||||
class Command(ImportCommand):
|
||||
"""
|
||||
Import command for People base data (Comptes, and Aliases)
|
||||
"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('-a', '--alias', action='store_true', help="import alias")
|
||||
|
||||
def import_special_account(self, cur):
|
||||
cur.execute("SELECT idbde, solde from comptes where idbde <=0")
|
||||
for row in cur:
|
||||
note = Note.objects.get(pk=MAP_IDBDE[row["idbde"]])
|
||||
note.amount = row["solde"]
|
||||
note.save()
|
||||
|
||||
@timed
|
||||
@transaction.atomic
|
||||
def import_account(self, cur, chunk_size):
|
||||
"""
|
||||
Import every account of the nk15 in a batch fashion.
|
||||
Every Model has to be manually created, and no magic `.save()`
|
||||
function is being called.
|
||||
"""
|
||||
cur.execute("SELECT * FROM comptes WHERE idbde > 0 ORDER BY idbde;")
|
||||
pk_club = 3
|
||||
pk_user = 1
|
||||
pk_profile = 1
|
||||
pk_note = 7 # pk 6 is Kfet!
|
||||
n = cur.rowcount
|
||||
|
||||
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
|
||||
for idx, row in enumerate(cur):
|
||||
pseudo = row["pseudo"]
|
||||
pseudo_norm = Alias.normalize(pseudo)
|
||||
self.update_line(idx, n, pseudo)
|
||||
# clean pseudo (normalized pseudo must be unique)
|
||||
if pseudo_norm in ALIAS_SET:
|
||||
pseudo = pseudo + str(row["idbde"])
|
||||
else:
|
||||
ALIAS_SET.add(pseudo_norm)
|
||||
# clean date
|
||||
note_dict = {
|
||||
"pk": pk_note,
|
||||
"balance": row['solde'],
|
||||
"last_negative": None,
|
||||
"is_active": True,
|
||||
"display_image": "",
|
||||
"created_at": now()
|
||||
}
|
||||
if row["last_negatif"] is not None:
|
||||
note_dict["last_negative"] = make_aware(row["last_negatif"])
|
||||
if row["type"] == "personne":
|
||||
# sanitize password
|
||||
if row["passwd"] != "*|*" and not row["deleted"]:
|
||||
passwd_nk15 = "$".join(["custom_nk15", "1", row["passwd"]])
|
||||
else:
|
||||
passwd_nk15 = ''
|
||||
|
||||
obj_dict = {
|
||||
"pk": pk_user,
|
||||
"username": row["pseudo"],
|
||||
"password": passwd_nk15,
|
||||
"first_name": row["nom"],
|
||||
"last_name": row["prenom"],
|
||||
"email": row["mail"],
|
||||
"is_active": True, # temporary
|
||||
}
|
||||
profile_dict = {
|
||||
"pk": pk_profile,
|
||||
"user_id": pk_user,
|
||||
"phone_number": row['tel'],
|
||||
"address": row['adresse'],
|
||||
"paid": row['normalien'],
|
||||
"registration_valid": True,
|
||||
"email_confirmed": True,
|
||||
}
|
||||
note_dict["polymorphic_ctype"] = note_user_type
|
||||
note_user_dict = {
|
||||
"pk": pk_note,
|
||||
"user_id": pk_user,
|
||||
}
|
||||
alias_dict = {
|
||||
"pk": pk_note,
|
||||
"name": pseudo,
|
||||
"normalized_name": Alias.normalize(pseudo),
|
||||
"note_id": pk_note,
|
||||
}
|
||||
|
||||
bulk_mgr.add(User(**obj_dict),
|
||||
Profile(**profile_dict),
|
||||
Note(**note_dict),
|
||||
NoteUser(**note_user_dict),
|
||||
Alias(**alias_dict),)
|
||||
pk_user += 1
|
||||
pk_profile += 1
|
||||
|
||||
else: # club
|
||||
obj_dict = {
|
||||
"pk": pk_club,
|
||||
"name": row["pseudo"],
|
||||
"email": row["mail"],
|
||||
"membership_duration": M_DURATION,
|
||||
"membership_start": M_START,
|
||||
"membership_end": M_END,
|
||||
"membership_fee_paid": 0,
|
||||
"membership_fee_unpaid": 0,
|
||||
}
|
||||
note_club_dict = {
|
||||
"pk": pk_note,
|
||||
"club_id": pk_club,
|
||||
}
|
||||
alias_dict = {
|
||||
"pk": pk_note,
|
||||
"name": pseudo,
|
||||
"normalized_name": Alias.normalize(pseudo),
|
||||
"note_id": pk_note
|
||||
}
|
||||
note_dict["polymorphic_ctype"] = note_club_type
|
||||
bulk_mgr.add(Club(**obj_dict),
|
||||
Note(**note_dict),
|
||||
NoteClub(**note_club_dict),
|
||||
Alias(**alias_dict))
|
||||
pk_club += 1
|
||||
# row import completed
|
||||
MAP_IDBDE[row["idbde"]] = pk_note
|
||||
pk_note += 1
|
||||
bulk_mgr.done()
|
||||
|
||||
@timed
|
||||
def import_alias(self, cur, chunk_size):
|
||||
"""
|
||||
Import Alias from nk15
|
||||
We rely on validation of the models, but it is slow.
|
||||
"""
|
||||
cur.execute("SELECT * FROM aliases ORDER by id")
|
||||
n = cur.rowcount
|
||||
|
||||
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
|
||||
pk_alias = Alias.objects.order_by('-id').first().id + 1
|
||||
for idx, row in enumerate(cur):
|
||||
alias_name = row["alias"]
|
||||
alias_name = (alias_name[:252] + '...') if len(alias_name) > 255 else alias_name
|
||||
alias_norm = Alias.normalize(alias_name)
|
||||
self.update_line(idx, n, alias_norm)
|
||||
# clean pseudo (normalized pseudo must be unique)
|
||||
if alias_norm in ALIAS_SET:
|
||||
continue
|
||||
else:
|
||||
ALIAS_SET.add(alias_norm)
|
||||
obj_dict = {
|
||||
"pk": pk_alias,
|
||||
"note_id": MAP_IDBDE[row["idbde"]],
|
||||
"name": alias_name,
|
||||
"normalized_name": alias_norm,
|
||||
}
|
||||
pk_alias += 1
|
||||
bulk_mgr.add(Alias(**obj_dict))
|
||||
bulk_mgr.done()
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
# default args, provided by ImportCommand.
|
||||
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
|
||||
# connecting to nk15 database
|
||||
conn = pg.connect(database=nk15db, user=nk15user)
|
||||
cur = conn.cursor(cursor_factory=pge.DictCursor)
|
||||
|
||||
self.import_special_account(cur)
|
||||
self.import_account(cur, kwargs["chunk"])
|
||||
# Alias Management
|
||||
if kwargs["alias"]:
|
||||
self.import_alias(cur, kwargs["chunk"])
|
||||
# save to disk
|
||||
if kwargs["save"]:
|
||||
filename = kwargs["save"]
|
||||
with open(filename, 'w') as fp:
|
||||
json.dump(MAP_IDBDE, fp, sort_keys=True, indent=2)
|
|
@ -0,0 +1,124 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import psycopg2 as pg
|
||||
import psycopg2.extras as pge
|
||||
import datetime
|
||||
import copy
|
||||
|
||||
from django.utils.timezone import make_aware
|
||||
from django.db import transaction
|
||||
|
||||
from activity.models import ActivityType, Activity, Guest, Entry
|
||||
from member.models import Club
|
||||
from note.models import Note
|
||||
from ._import_utils import ImportCommand, BulkCreateManager, timed
|
||||
|
||||
MAP_ACTIVITY = dict()
|
||||
|
||||
CLUB_RELOU = [
|
||||
0, # BDE
|
||||
4771, # Kataclist
|
||||
5162, # Assurance BDE ?!
|
||||
5164, #S & L
|
||||
625, #Aspique
|
||||
5154, #Frekens
|
||||
]
|
||||
|
||||
class Command(ImportCommand):
|
||||
"""
|
||||
Import command for Activities Base Data (Comptes, and Aliases)
|
||||
"""
|
||||
|
||||
@timed
|
||||
@transaction.atomic
|
||||
def import_activities(self, cur, chunk):
|
||||
cur.execute("SELECT * FROM activites ORDER by id")
|
||||
n = cur.rowcount
|
||||
bulk_mgr = BulkCreateManager(chunk_size=chunk)
|
||||
activity_type_id = ActivityType.objects.get(name="Pot").pk # Need to be fixed manually
|
||||
kfet = Club.objects.get(name="Kfet")
|
||||
pk_activity = 1
|
||||
for idx, row in enumerate(cur):
|
||||
self.update_line(idx, n, row["titre"])
|
||||
note = self.MAP_IDBDE[row["responsable"]]
|
||||
if note == 6244:
|
||||
# Licorne magique ne doit pas utiliser son compte club pour proposer des activités
|
||||
note = Note.objects.get(pk=self.MAP_IDBDE[6524])
|
||||
note = note.user_id
|
||||
organizer = Club.objects.filter(name=row["signature"])
|
||||
if organizer.exists():
|
||||
# Try to find the club that organizes the activity.
|
||||
# If not found, assume it's Kfet (fix manually)
|
||||
organizer = organizer.get()
|
||||
else:
|
||||
organizer = kfet
|
||||
obj_dict = {
|
||||
"pk": pk_activity,
|
||||
"name": row["titre"],
|
||||
"description": row["description"],
|
||||
"activity_type_id": activity_type_id, # By default Pot
|
||||
"creater_id": note,
|
||||
"organizer_id": organizer.pk,
|
||||
"attendees_club_id": kfet.pk, # Maybe fix manually
|
||||
"date_start": make_aware(row["debut"]),
|
||||
"date_end": make_aware(row["fin"]),
|
||||
"valid": row["validepar"] is not None,
|
||||
"open": False,
|
||||
}
|
||||
# WARNING: Fields lieu, liste, listeimprimee are missing
|
||||
MAP_ACTIVITY[row["id"]] = pk_activity
|
||||
pk_activity += 1
|
||||
bulk_mgr.add(Activity(**obj_dict))
|
||||
bulk_mgr.done()
|
||||
|
||||
@timed
|
||||
@transaction.atomic
|
||||
def import_guest(self, cur, chunk):
|
||||
bulk_mgr = BulkCreateManager(chunk_size=chunk)
|
||||
cur.execute("SELECT * FROM invites ORDER by id")
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
self.update_line(idx, n, f"{row['nom']} {row['prenom']}")
|
||||
if row["responsable"] in CLUB_RELOU:
|
||||
row["responsable"] = 3508
|
||||
obj_dict = {
|
||||
"pk": row["id"],
|
||||
"activity_id": MAP_ACTIVITY[row["activite"]],
|
||||
"last_name": row["nom"],
|
||||
"first_name": row["prenom"],
|
||||
"inviter_id": self.MAP_IDBDE[row["responsable"]],
|
||||
}
|
||||
bulk_mgr.add(Guest(**obj_dict))
|
||||
bulk_mgr.done()
|
||||
|
||||
@timed
|
||||
@transaction.atomic
|
||||
def import_activities_entries(self, cur, chunk):
|
||||
bulk_mgr = BulkCreateManager(chunk_size=chunk)
|
||||
cur.execute("SELECT * FROM entree_activites ORDER by id")
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
self.update_line(idx, n, f"{row['idbde']} {row['responsable']}")
|
||||
if row["idbde"] in CLUB_RELOU:
|
||||
row["idbde"] = 3508
|
||||
obj_dict = {
|
||||
"activity_id": MAP_ACTIVITY[row["activite"]],
|
||||
"time": make_aware(row["heure_entree"]),
|
||||
"note_id": self.MAP_IDBDE[row["responsable"] if row['est_invite'] else row["idbde"]],
|
||||
"guest_id": row["idbde"] if row['est_invite'] else None,
|
||||
}
|
||||
bulk_mgr.add(Entry(**obj_dict))
|
||||
bulk_mgr.done()
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
# default args, provided by ImportCommand.
|
||||
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
|
||||
# connecting to nk15 database
|
||||
conn = pg.connect(database=nk15db, user=nk15user)
|
||||
cur = conn.cursor(cursor_factory=pge.DictCursor)
|
||||
|
||||
if kwargs["map"]:
|
||||
self.load_map(kwargs["map"])
|
||||
self.import_activities(cur, kwargs["chunk"])
|
||||
self.import_guest(cur, kwargs["chunk"])
|
||||
self.import_activities_entries(cur, kwargs["chunk"])
|
|
@ -1,441 +1,8 @@
|
|||
#!/usr/env/bin python3
|
||||
|
||||
import json
|
||||
import datetime
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management import call_command
|
||||
import psycopg2 as pg
|
||||
import psycopg2.extras as pge
|
||||
from django.db import transaction
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.timezone import make_aware
|
||||
from django.db import IntegrityError
|
||||
from django.contrib.auth.models import User
|
||||
from activity.models import ActivityType, Activity, Guest, Entry, GuestTransaction
|
||||
from note.models import Note
|
||||
from note.models import Alias
|
||||
from note.models import TemplateCategory, TransactionTemplate, \
|
||||
Transaction, RecurrentTransaction, SpecialTransaction
|
||||
from member.models import Club, Membership
|
||||
from treasury.models import RemittanceType, Remittance, SpecialTransactionProxy
|
||||
|
||||
"""
|
||||
Script d'import de la nk15:
|
||||
TODO: import transactions
|
||||
TODO: import adhesion
|
||||
TODO: import activite
|
||||
TODO: import ...
|
||||
|
||||
"""
|
||||
M_DURATION = 396
|
||||
M_START = datetime.date(2019, 8, 31)
|
||||
M_END = datetime.date(2020, 9, 30)
|
||||
|
||||
MAP_IDBDE = {
|
||||
-4: 2, # Carte Bancaire
|
||||
-3: 4, # Virement
|
||||
-2: 1, # Especes
|
||||
-1: 3, # Chèque
|
||||
0: 5, # BDE
|
||||
}
|
||||
|
||||
MAP_IDACTIVITY = {}
|
||||
MAP_NAMEACTIVITY = {}
|
||||
MAP_NAMEGUEST = {}
|
||||
MAP_IDSPECIALTRANSACTION = {}
|
||||
|
||||
|
||||
def update_line(n, total, content):
|
||||
n = str(n)
|
||||
total = str(total)
|
||||
n.rjust(len(total))
|
||||
print(f"\r ({n}/{total}) {content:10.10}", end="")
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
def import_comptes(cur):
|
||||
cur.execute("SELECT * FROM comptes WHERE idbde > 0 ORDER BY idbde;")
|
||||
pkclub = 3
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, row["pseudo"])
|
||||
if row["type"] == "personne":
|
||||
# sanitize password
|
||||
if row["passwd"] != "*|*":
|
||||
passwd_nk15 = "$".join(["custom_nk15", "1", row["passwd"]])
|
||||
else:
|
||||
passwd_nk15 = ''
|
||||
try:
|
||||
obj_dict = {
|
||||
"username": row["pseudo"],
|
||||
"password": passwd_nk15,
|
||||
"first_name": row["nom"],
|
||||
"last_name": row["prenom"],
|
||||
"email": row["mail"],
|
||||
"is_active": True, # temporary
|
||||
}
|
||||
|
||||
user = User.objects.create(**obj_dict)
|
||||
profile = user.profile
|
||||
profile.phone_number = row['tel']
|
||||
profile.address = row['adresse']
|
||||
profile.paid = row['normalien']
|
||||
profile.registration_valid = True
|
||||
profile.email_confirmed = True
|
||||
user.save()
|
||||
profile.save()
|
||||
# sanitize duplicate aliases (nk12)
|
||||
except ValidationError as e:
|
||||
if e.code == 'same_alias':
|
||||
user.username = row["pseudo"] + str(row["idbde"])
|
||||
user.save()
|
||||
else:
|
||||
raise e
|
||||
# profile and note created via signal.
|
||||
|
||||
note = user.note
|
||||
date = row.get("last_negatif", None)
|
||||
if date is not None:
|
||||
note.last_negative = make_aware(date)
|
||||
note.balance = row["solde"]
|
||||
note.save()
|
||||
else: # club
|
||||
obj_dict = {
|
||||
"pk": pkclub,
|
||||
"name": row["pseudo"],
|
||||
"email": row["mail"],
|
||||
"membership_duration": M_DURATION,
|
||||
"membership_start": M_START,
|
||||
"membership_end": M_END,
|
||||
"membership_fee_paid": 0,
|
||||
"membership_fee_unpaid": 0,
|
||||
}
|
||||
club, c = Club.objects.get_or_create(**obj_dict)
|
||||
pkclub += 1
|
||||
note = club.note
|
||||
note.balance = row["solde"]
|
||||
club.save()
|
||||
note.save()
|
||||
|
||||
MAP_IDBDE[row["idbde"]] = note.note_ptr_id
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
def import_boutons(cur):
|
||||
cur.execute("SELECT * FROM boutons;")
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, row["label"])
|
||||
cat, created = TemplateCategory.objects.get_or_create(name=row["categorie"])
|
||||
if created:
|
||||
cat.save()
|
||||
obj_dict = {
|
||||
"pk": row["id"],
|
||||
"name": row["label"],
|
||||
"amount": row["montant"],
|
||||
"destination_id": MAP_IDBDE[row["destinataire"]],
|
||||
"category": cat,
|
||||
"display": row["affiche"],
|
||||
"description": row["description"],
|
||||
}
|
||||
try:
|
||||
with transaction.atomic(): # required for error management
|
||||
button = TransactionTemplate.objects.create(**obj_dict)
|
||||
except IntegrityError as e:
|
||||
# button with the same name is not possible in NK20.
|
||||
if "unique" in e.args[0]:
|
||||
qs = Club.objects.filter(note__note_ptr=MAP_IDBDE[row["destinataire"]]).values('name')
|
||||
note_name = qs[0]["name"]
|
||||
# rename button name
|
||||
obj_dict["name"] = "{} {}".format(obj_dict["name"], note_name)
|
||||
button = TransactionTemplate.objects.create(**obj_dict)
|
||||
else:
|
||||
raise e
|
||||
button.save()
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
def import_transaction(cur):
|
||||
idmin = 58770
|
||||
cur.execute("SELECT *, transactions.date AS transac_date\
|
||||
FROM transactions\
|
||||
LEFT JOIN adhesions ON transactions.id = adhesions.id\
|
||||
WHERE transactions.id> {}\
|
||||
ORDER BY transactions.id;".format(idmin))
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, row["description"])
|
||||
# some date are set to None, use the previous one
|
||||
date = row["transac_date"]
|
||||
obj_dict = {
|
||||
# "pk": row["id"],
|
||||
"destination_id": MAP_IDBDE[row["destinataire"]],
|
||||
"source_id": MAP_IDBDE[row["emetteur"]],
|
||||
"created_at": make_aware(date),
|
||||
"amount": row["montant"],
|
||||
"quantity": row["quantite"],
|
||||
"reason": row["description"],
|
||||
"valid": row["valide"],
|
||||
}
|
||||
ttype = row["type"]
|
||||
if ttype == "don" or ttype == "transfert":
|
||||
Transaction.objects.create(**obj_dict)
|
||||
elif ttype == "bouton":
|
||||
cat_name = row["categorie"]
|
||||
if cat_name is None:
|
||||
cat_name = 'None'
|
||||
cat, created = TemplateCategory.objects.get_or_create(name=cat_name)
|
||||
if created:
|
||||
cat.save()
|
||||
obj_dict["category"] = cat
|
||||
RecurrentTransaction.objects.create(**obj_dict)
|
||||
elif ttype == "crédit" or ttype == "retrait":
|
||||
field_id = "source_id" if ttype == "crédit" else "destination_id"
|
||||
if "espèce" in row["description"]:
|
||||
obj_dict[field_id] = 1
|
||||
elif "carte" in row["description"]:
|
||||
obj_dict[field_id] = 2
|
||||
elif "cheques" in row["description"]:
|
||||
obj_dict[field_id] = 3
|
||||
elif "virement" in row["description"]:
|
||||
obj_dict[field_id] = 4
|
||||
pk = max(row["destinataire"], row["emetteur"])
|
||||
actor = Note.objects.get(id=MAP_IDBDE[pk])
|
||||
# custom fields of SpecialTransaction
|
||||
if actor.__class__.__name__ == "NoteUser":
|
||||
obj_dict["first_name"] = actor.user.first_name
|
||||
obj_dict["last_name"] = actor.user.last_name
|
||||
elif actor.__class__.__name__ == "NoteClub":
|
||||
obj_dict["first_name"] = actor.club.name
|
||||
obj_dict["last_name"] = actor.club.name
|
||||
else:
|
||||
raise Exception("You should'nt be there")
|
||||
tr = SpecialTransaction.objects.create(**obj_dict)
|
||||
if "cheques" in row["description"]:
|
||||
MAP_IDSPECIALTRANSACTION[row["id"]] = tr
|
||||
elif ttype == "adhésion":
|
||||
# Since BDE and Kfet are distinct, don't import membership transaction and use our custom transactions.
|
||||
pass
|
||||
elif ttype == "invitation":
|
||||
m = re.search("Invitation (.*?) \((.*?)\)", row["description"])
|
||||
if m is None:
|
||||
raise IntegrityError("Invitation is not well formated: {} (must be 'Invitation ACTIVITY_NAME (NAME)')"
|
||||
.format(row["description"]))
|
||||
|
||||
activity_name = m.group(1)
|
||||
guest_name = m.group(2)
|
||||
|
||||
if activity_name not in MAP_NAMEACTIVITY:
|
||||
raise IntegrityError("Activity {} is not found".format(activity_name,))
|
||||
activity = MAP_NAMEACTIVITY[activity_name]
|
||||
|
||||
if guest_name not in MAP_NAMEGUEST:
|
||||
raise IntegrityError("Guest {} is not found".format(guest_name,))
|
||||
|
||||
guest = None
|
||||
for g in MAP_NAMEGUEST[guest_name]:
|
||||
if g.activity.pk == activity.pk:
|
||||
guest = g
|
||||
break
|
||||
if guest is None:
|
||||
raise IntegrityError("Guest {} didn't go to the activity {}".format(guest_name, activity_name,))
|
||||
|
||||
obj_dict["guest"] = guest
|
||||
|
||||
GuestTransaction.objects.get_or_create(**obj_dict)
|
||||
else:
|
||||
print("other type not supported yet:", ttype)
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
def import_aliases(cur):
|
||||
cur.execute("SELECT * FROM aliases ORDER by id")
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, row["titre"])
|
||||
alias_name = row["alias"]
|
||||
alias_name_good = (alias_name[:252] + '...') if len(alias_name) > 255 else alias_name
|
||||
obj_dict = {
|
||||
"note_id": MAP_IDBDE[row["idbde"]],
|
||||
"name": alias_name_good,
|
||||
"normalized_name": Alias.normalize(alias_name_good)
|
||||
}
|
||||
try:
|
||||
with transaction.atomic():
|
||||
alias, created = Alias.objects.get_or_create(**obj_dict)
|
||||
|
||||
except IntegrityError as e:
|
||||
if "unique" in e.args[0]:
|
||||
continue
|
||||
else:
|
||||
raise e
|
||||
alias.save()
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
def import_activities(cur):
|
||||
cur.execute("SELECT * FROM activites ORDER by id")
|
||||
n = cur.rowcount
|
||||
activity_type = ActivityType.objects.get(name="Pot") # Need to be fixed manually
|
||||
kfet = Club.objects.get(name="Kfet")
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, row["alias"])
|
||||
organizer = Club.objects.filter(name=row["signature"])
|
||||
if organizer.exists():
|
||||
# Try to find the club that organizes the activity. If not founded, assume that is Kfet (fix manually)
|
||||
organizer = organizer.get()
|
||||
else:
|
||||
organizer = kfet
|
||||
obj_dict = {
|
||||
"name": row["titre"],
|
||||
"description": row["description"],
|
||||
"activity_type": activity_type, # By default Pot
|
||||
"creater": MAP_IDBDE[row["responsable"]],
|
||||
"organizer": organizer,
|
||||
"attendees_club": kfet, # Maybe fix manually
|
||||
"date_start": row["debut"],
|
||||
"date_end": row["fin"],
|
||||
"valid": row["validepar"] is not None,
|
||||
"open": row["open"], # Should be always False
|
||||
}
|
||||
# WARNING: Fields lieu, liste, listeimprimee are missing
|
||||
try:
|
||||
with transaction.atomic():
|
||||
activity = Activity.objects.get_or_create(**obj_dict)[0]
|
||||
MAP_IDACTIVITY[row["id"]] = activity
|
||||
MAP_NAMEACTIVITY[activity.name] = activity
|
||||
except IntegrityError as e:
|
||||
raise e
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
def import_activity_entries(cur):
|
||||
map_idguests = {}
|
||||
|
||||
cur.execute("SELECT * FROM invites ORDER by id")
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, row["nom"] + " " + row["prenom"])
|
||||
obj_dict = {
|
||||
"activity": MAP_IDACTIVITY[row["activity"]],
|
||||
"last_name": row["nom"],
|
||||
"first_name": row["prenom"],
|
||||
"inviter": MAP_IDBDE[row["responsable"]],
|
||||
}
|
||||
try:
|
||||
with transaction.atomic():
|
||||
guest = Guest.objects.get_or_create(**obj_dict)[0]
|
||||
map_idguests.setdefault(row["responsable"], [])
|
||||
map_idguests[row["id"]].append(guest)
|
||||
guest_name = guest.first_name + " " + guest.last_name
|
||||
MAP_NAMEGUEST.setdefault(guest_name, [])
|
||||
MAP_NAMEGUEST[guest_name].append(guest)
|
||||
except IntegrityError as e:
|
||||
raise e
|
||||
|
||||
cur.execute("SELECT * FROM entree_activites ORDER by id")
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, row["nom"] + " " + row["prenom"])
|
||||
activity = MAP_IDACTIVITY[row["activity"]]
|
||||
guest = None
|
||||
if row["est_invite"]:
|
||||
for g in map_idguests[row["id"]]:
|
||||
if g.activity.pk == activity.pk:
|
||||
guest = g
|
||||
break
|
||||
if not guest:
|
||||
raise IntegrityError("Guest was not found: " + str(row))
|
||||
obj_dict = {
|
||||
"activity": activity,
|
||||
"time": row["heure_entree"],
|
||||
"note": guest.inviter if guest else MAP_IDBDE[row["idbde"]],
|
||||
"guest": guest,
|
||||
}
|
||||
try:
|
||||
with transaction.atomic():
|
||||
Entry.objects.get_or_create(**obj_dict)
|
||||
except IntegrityError as e:
|
||||
raise e
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
def import_memberships(cur):
|
||||
cur.execute("SELECT * FROM adhesions ORDER by id")
|
||||
n = cur.rowcount
|
||||
bde = Club.objects.get(name="BDE")
|
||||
kfet = Club.objects.get(name="Kfet")
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, MAP_IDBDE[row["idbde"]].username)
|
||||
bde_dict = {
|
||||
"user": MAP_IDBDE[row["idbde"]],
|
||||
"club": bde,
|
||||
"date_start": row["date"][10:], # Only date, not time
|
||||
"fee": 500,
|
||||
}
|
||||
kfet_dict = {
|
||||
"user": MAP_IDBDE[row["idbde"]],
|
||||
"club": kfet,
|
||||
"date_start": row["date"][:10], # Only date, not time
|
||||
"fee": 1500 if row["date"].month in [3, 4, 5, 6, 7] else 3500,
|
||||
}
|
||||
try:
|
||||
with transaction.atomic():
|
||||
bde_membership = Membership.objects.get_or_create(**bde_dict)
|
||||
kfet_membership = Membership.objects.get_or_create(**kfet_dict)
|
||||
bde_membership.transaction.created_at = row["date"]
|
||||
bde_membership.transaction.save()
|
||||
kfet_membership.transaction.created_at = row["date"]
|
||||
kfet_membership.transaction.save()
|
||||
except IntegrityError as e:
|
||||
raise e
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
def import_remittances(cur):
|
||||
cur.execute("SELECT * FROM remises ORDER by id")
|
||||
map_idremittance = {}
|
||||
n = cur.rowcount
|
||||
check_type = RemittanceType.objects.get(note__name="Chèque")
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, row["date"])
|
||||
obj_dict = {
|
||||
"date": row["date"][10:],
|
||||
"remittance_type": check_type,
|
||||
"comment": row["commentaire"],
|
||||
"closed": row["close"],
|
||||
}
|
||||
try:
|
||||
with transaction.atomic():
|
||||
remittance = Remittance.objects.get_or_create(**obj_dict)
|
||||
map_idremittance[row["id"]] = remittance
|
||||
except IntegrityError as e:
|
||||
raise e
|
||||
|
||||
print("remittances are imported")
|
||||
print("imported checks")
|
||||
|
||||
cur.execute("SELECT * FROM cheques ORDER by id")
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
update_line(idx, n, row["date"])
|
||||
obj_dict = {
|
||||
"date": row["date"][10:],
|
||||
"remittance_type": check_type,
|
||||
"comment": row["commentaire"],
|
||||
"closed": row["close"],
|
||||
}
|
||||
tr = MAP_IDSPECIALTRANSACTION[row["idtransaction"]]
|
||||
proxy = SpecialTransactionProxy.objects.get_or_create(transaction=tr)
|
||||
proxy.remittance = map_idremittance[row["idremise"]]
|
||||
try:
|
||||
with transaction.atomic():
|
||||
proxy.save()
|
||||
except IntegrityError as e:
|
||||
raise e
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
|
@ -443,64 +10,9 @@ class Command(BaseCommand):
|
|||
Need to be run by a user with a registered role in postgres for the database nk15.
|
||||
"""
|
||||
|
||||
def print_success(self, to_print):
|
||||
return self.stdout.write(self.style.SUCCESS(to_print))
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('-c', '--comptes', action='store_true', help="import accounts")
|
||||
parser.add_argument('-b', '--boutons', action='store_true', help="import boutons")
|
||||
parser.add_argument('-t', '--transactions', action='store_true', help="import transaction")
|
||||
parser.add_argument('-al', '--aliases', action='store_true', help="import aliases")
|
||||
parser.add_argument('-ac', '--activities', action='store_true', help="import activities")
|
||||
parser.add_argument('-M', '--memberships', action='store_true', help="import memberships")
|
||||
parser.add_argument('-r', '--remittances', action='store_true', help="import check remittances")
|
||||
parser.add_argument('-s', '--save', action='store', help="save mapping of idbde")
|
||||
parser.add_argument('-m', '--map', action='store', help="import mapping of idbde")
|
||||
parser.add_argument('-d', '--nk15db', action='store', default='nk15', help='NK15 database name')
|
||||
parser.add_argument('-u', '--nk15user', action='store', default='nk15_user', help='NK15 database owner')
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
global MAP_IDBDE
|
||||
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
|
||||
# connecting to nk15 database
|
||||
conn = pg.connect(database=nk15db, user=nk15user)
|
||||
cur = conn.cursor(cursor_factory=pge.DictCursor)
|
||||
|
||||
if kwargs["comptes"]:
|
||||
# reset database.
|
||||
call_command("migrate")
|
||||
call_command("loaddata", "initial")
|
||||
self.print_success("reset nk20 database\n")
|
||||
import_comptes(cur)
|
||||
self.print_success("comptes table imported")
|
||||
elif kwargs["map"]:
|
||||
filename = kwargs["map"]
|
||||
with open(filename, 'r') as fp:
|
||||
MAP_IDBDE = json.load(fp)
|
||||
MAP_IDBDE = {int(k): int(v) for k, v in MAP_IDBDE.items()}
|
||||
if kwargs["save"]:
|
||||
filename = kwargs["save"]
|
||||
with open(filename, 'w') as fp:
|
||||
json.dump(MAP_IDBDE, fp, sort_keys=True, indent=2)
|
||||
|
||||
# /!\ need a prober MAP_IDBDE
|
||||
if kwargs["boutons"]:
|
||||
import_boutons(cur)
|
||||
self.print_success("boutons table imported\n")
|
||||
if kwargs["activities"]:
|
||||
import_activities(cur)
|
||||
self.print_success("activities imported\n")
|
||||
import_activity_entries(cur)
|
||||
self.print_success("activity entries imported\n")
|
||||
if kwargs["aliases"]:
|
||||
import_aliases(cur)
|
||||
self.print_success("aliases imported\n")
|
||||
if kwargs["transactions"]:
|
||||
import_transaction(cur)
|
||||
self.print_success("transaction imported\n")
|
||||
if kwargs["memberships"]:
|
||||
import_memberships(cur)
|
||||
self.print_success("memberships imported\n")
|
||||
if kwargs["remittances"]:
|
||||
import_remittances(cur)
|
||||
self.print_success("remittances imported\n")
|
||||
subprocess.call("./apps/scripts/shell/tabularasa")
|
||||
call_command('import_account', alias=True, chunk=1000, save = "map.json")
|
||||
call_command('import_activities', chunk=100, map="map.json")
|
||||
call_command('import_transaction', chunk=10000, buttons=True, map="map.json")
|
||||
#
|
||||
|
|
|
@ -0,0 +1,250 @@
|
|||
#!/usr/bin/env python3
|
||||
import re
|
||||
import psycopg2 as pg
|
||||
import psycopg2.extras as pge
|
||||
import pytz
|
||||
import datetime
|
||||
import copy
|
||||
|
||||
from django.utils.timezone import make_aware
|
||||
from django.db import transaction
|
||||
|
||||
from note.models import (TemplateCategory,
|
||||
TransactionTemplate,
|
||||
Transaction,
|
||||
RecurrentTransaction,
|
||||
SpecialTransaction
|
||||
)
|
||||
from note.models import Note, NoteClub
|
||||
from activity.models import Guest, GuestTransaction
|
||||
|
||||
from member.models import Membership, MembershipTransaction
|
||||
from ._import_utils import ImportCommand, BulkCreateManager, timed
|
||||
|
||||
BDE_PK = 1
|
||||
KFET_PK = 2
|
||||
NOTE_SPECIAL_CODE = {
|
||||
"espèce": 1,
|
||||
"carte": 2,
|
||||
"chèque": 3,
|
||||
"virement": 4,
|
||||
}
|
||||
|
||||
|
||||
def get_date_end(date_start):
|
||||
date_end = copy.deepcopy(date_start)
|
||||
if date_start.month > 8:
|
||||
date_end = date_start.replace(year=date_start.year+1)
|
||||
date_end = date_end.replace(month=9, day=30)
|
||||
return date_end
|
||||
|
||||
|
||||
class Command(ImportCommand):
|
||||
"""
|
||||
Import command for People base data (Comptes, and Aliases)
|
||||
"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('-b', '--buttons', action='store_true', help="import buttons")
|
||||
parser.add_argument('-t', '--transactions', action='store', default=0, help="start id for transaction import")
|
||||
|
||||
@timed
|
||||
def import_buttons(self, cur, chunk_size):
|
||||
self.categories = dict()
|
||||
self.buttons = dict()
|
||||
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
|
||||
cur.execute("SELECT * FROM boutons;")
|
||||
n = cur.rowcount
|
||||
for idx, row in enumerate(cur):
|
||||
self.update_line(idx, n, row["label"])
|
||||
if row["categorie"] not in self.categories:
|
||||
cat = TemplateCategory.objects.create(name=row["categorie"])
|
||||
cat.save()
|
||||
self.categories[row["categorie"]] = cat.pk
|
||||
obj_dict = {
|
||||
"pk": row["id"],
|
||||
"name": row["label"],
|
||||
"amount": row["montant"],
|
||||
"destination_id": self.MAP_IDBDE[row["destinataire"]],
|
||||
"category_id": self.categories[row["categorie"]],
|
||||
"display": row["affiche"],
|
||||
"description": row["description"],
|
||||
}
|
||||
if row["label"] in self.buttons:
|
||||
obj_dict["name"] = f"{obj_dict['name']}_{obj_dict['destination_id']}"
|
||||
bulk_mgr.add(TransactionTemplate(**obj_dict))
|
||||
self.buttons[obj_dict["name"]] = (row["id"], self.categories[row["categorie"]])
|
||||
bulk_mgr.done()
|
||||
|
||||
def _basic_transaction(self, row, obj_dict, child_dict):
|
||||
if len(row["description"]) > 255:
|
||||
obj_dict["reason"] = obj_dict["reason"][:250]+"...)"
|
||||
return obj_dict, None, None
|
||||
|
||||
def _template_transaction(self, row, obj_dict, child_dict):
|
||||
if self.categories.get(row["categorie"]):
|
||||
child_dict["category_id"] = self.categories[row["categorie"]]
|
||||
elif "WEI" in row["description"]:
|
||||
return obj_dict, None, None
|
||||
elif self.buttons.get(row["description"]):
|
||||
child_dict["category_id"] = self.buttons[row["description"]][1]
|
||||
child_dict["template_id"] = self.buttons[row["description"]][0]
|
||||
else:
|
||||
return obj_dict, None, None
|
||||
return obj_dict, child_dict, RecurrentTransaction
|
||||
|
||||
def _membership_transaction(self, row, obj_dict, child_dict, pk_membership):
|
||||
obj_dict2 = obj_dict.copy()
|
||||
child_dict2 = child_dict.copy()
|
||||
child_dict2["membership_id"] = pk_membership
|
||||
|
||||
return obj_dict2, child_dict2, MembershipTransaction
|
||||
|
||||
def _special_transaction(self, row, obj_dict, child_dict):
|
||||
# Some transaction uses BDE (idbde=0) as source or destination,
|
||||
# lets fix that.
|
||||
field_id = "source_id" if row["type"] == "crédit" else "destination_id"
|
||||
if "espèce" in row["description"]:
|
||||
obj_dict[field_id] = 1
|
||||
elif "carte" in row["description"]:
|
||||
obj_dict[field_id] = 2
|
||||
elif "cheques" in row["description"]:
|
||||
obj_dict[field_id] = 3
|
||||
elif "virement" in row["description"]:
|
||||
obj_dict[field_id] = 4
|
||||
# humans and clubs have always the biggest id
|
||||
actor_pk = max(row["destinataire"], row["emetteur"])
|
||||
actor = Note.objects.get(id=self.MAP_IDBDE[actor_pk])
|
||||
# custom fields of SpecialTransaction
|
||||
if actor.__class__.__name__ == "NoteUser":
|
||||
child_dict["first_name"] = actor.user.first_name
|
||||
child_dict["last_name"] = actor.user.last_name
|
||||
else:
|
||||
child_dict["first_name"] = actor.club.name
|
||||
child_dict["last_name"] = actor.club.name
|
||||
return obj_dict, child_dict, SpecialTransaction
|
||||
|
||||
def _guest_transaction(self, row, obj_dict, child_dict):
|
||||
# Currently GuestTransaction is related to a Guest.
|
||||
# This is not ideal and should be change to the Entry of this Guest.
|
||||
m = re.search(r"Invitation (.*?)(?:\s\()(.*?)\s(.*?)\)", row["description"])
|
||||
if m:
|
||||
first_name, last_name = m.group(2), m.group(3)
|
||||
guest_id = Guest.objects.filter(first_name__iexact=first_name,
|
||||
last_name__iexact=last_name).first().pk
|
||||
child_dict["guest_id"] = guest_id
|
||||
else:
|
||||
raise(f"Guest not Found {row['id']} {first_name}, last_name")
|
||||
|
||||
return obj_dict, child_dict, GuestTransaction
|
||||
|
||||
@timed
|
||||
@transaction.atomic
|
||||
def import_transaction(self, cur, chunk_size, idmin):
|
||||
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
|
||||
cur.execute(
|
||||
f"SELECT t.date AS transac_date, t.type, t.emetteur,\
|
||||
t.destinataire,t.quantite, t.montant, t.description,\
|
||||
t.valide, t.cantinvalidate, t.categorie, \
|
||||
a.idbde, a.annee, a.wei, a.date AS adh_date, a.section\
|
||||
FROM transactions AS t \
|
||||
LEFT JOIN adhesions AS a ON t.id = a.idtransaction \
|
||||
WHERE t.id >= {idmin} \
|
||||
ORDER BY t.id;")
|
||||
n = cur.rowcount
|
||||
pk_membership = 1
|
||||
pk_transaction = 1
|
||||
for idx, row in enumerate(cur):
|
||||
self.update_line(idx, n, row["description"])
|
||||
try:
|
||||
date = make_aware(row["transac_date"])
|
||||
except (pytz.NonExistentTimeError, pytz.AmbiguousTimeError):
|
||||
date = make_aware(row["transac_date"] + datetime.timedelta(hours=1))
|
||||
|
||||
# standart transaction object
|
||||
obj_dict = {
|
||||
"pk": pk_transaction,
|
||||
"destination_id": self.MAP_IDBDE[row["destinataire"]],
|
||||
"source_id": self.MAP_IDBDE[row["emetteur"]],
|
||||
"created_at": date,
|
||||
"amount": row["montant"],
|
||||
"quantity": row["quantite"],
|
||||
"reason": row["description"],
|
||||
"valid": row["valide"],
|
||||
}
|
||||
# for child transaction Models
|
||||
child_dict = {"pk": obj_dict["pk"]}
|
||||
ttype = row["type"]
|
||||
if row["valide"] and (ttype == "adhésion" or row["description"].lower() == "inscription"):
|
||||
note = Note.objects.get(pk=obj_dict["source_id"])
|
||||
if isinstance(note, NoteClub):
|
||||
child_transaction = None
|
||||
else:
|
||||
user_id = note.user_id
|
||||
montant = obj_dict["amount"]
|
||||
obj_dict0, child_dict0, child_transaction = self._membership_transaction(row, obj_dict, child_dict,pk_membership)
|
||||
bde_dict = {
|
||||
"pk": pk_membership,
|
||||
"user_id": user_id,
|
||||
"club_id": KFET_PK,
|
||||
"date_start": date.date(), # Only date, not time
|
||||
"date_end": get_date_end(date.date()),
|
||||
"fee": min(500, montant)
|
||||
}
|
||||
pk_membership += 1
|
||||
pk_transaction += 1
|
||||
obj_dict, child_dict, child_transaction = self._membership_transaction(row, obj_dict, child_dict,pk_membership)
|
||||
# Kfet membership
|
||||
# BDE Membership
|
||||
obj_dict["pk"] = pk_transaction
|
||||
child_dict["pk"] = pk_transaction
|
||||
kfet_dict = {
|
||||
"pk": pk_membership,
|
||||
"user_id": user_id,
|
||||
"club_id": BDE_PK,
|
||||
"date_start": date.date(), # Only date, not time
|
||||
"date_end": get_date_end(date.date()),
|
||||
"fee": max(montant - 500, 0),
|
||||
}
|
||||
obj_dict0["amount"] = bde_dict["fee"]
|
||||
obj_dict["amount"] = kfet_dict["fee"]
|
||||
# BDE membership Transaction is inserted before the Kfet membershipTransaction
|
||||
pk_membership += 1
|
||||
pk_transaction += 1
|
||||
bulk_mgr.add(
|
||||
Transaction(**obj_dict0),
|
||||
child_transaction(**child_dict0),
|
||||
Transaction(**obj_dict),
|
||||
child_transaction(**child_dict),
|
||||
Membership(**bde_dict),
|
||||
Membership(**kfet_dict),
|
||||
)
|
||||
continue
|
||||
elif ttype == "bouton":
|
||||
obj_dict, child_dict, child_transaction = self._template_transaction(row, obj_dict, child_dict)
|
||||
elif ttype == "crédit" or ttype == "retrait":
|
||||
obj_dict, child_dict, child_transaction = self._special_transaction(row, obj_dict, child_dict)
|
||||
elif ttype == "invitation":
|
||||
obj_dict, child_dict, child_transaction = self._guest_transaction(row, obj_dict, child_dict)
|
||||
if ttype == "don" or ttype == "transfert":
|
||||
obj_dict, child_dict, child_transaction = self._basic_transaction(row, obj_dict, child_dict)
|
||||
else:
|
||||
child_transaction = None
|
||||
# create base transaction object and typed one
|
||||
bulk_mgr.add(Transaction(**obj_dict))
|
||||
if child_transaction is not None:
|
||||
bulk_mgr.add(child_transaction(**child_dict))
|
||||
pk_transaction += 1
|
||||
bulk_mgr.done()
|
||||
@timed
|
||||
def handle(self, *args, **kwargs):
|
||||
# default args, provided by ImportCommand.
|
||||
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
|
||||
# connecting to nk15 database
|
||||
conn = pg.connect(database=nk15db, user=nk15user)
|
||||
cur = conn.cursor(cursor_factory=pge.DictCursor)
|
||||
|
||||
if kwargs["map"]:
|
||||
self.load_map(kwargs["map"])
|
||||
self.import_buttons(cur, kwargs["chunk"])
|
||||
self.import_transaction(cur, kwargs["chunk"], 0)
|
|
@ -1,15 +0,0 @@
|
|||
# Copyright (C) 2018-2020 by BDE ENS Paris-Saclay
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from django.core.management import BaseCommand
|
||||
from wei.forms import CurrentSurvey
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Attribute to each first year member a bus for the WEI"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""
|
||||
Run the WEI algorithm to attribute a bus to each first year member.
|
||||
"""
|
||||
CurrentSurvey.get_algorithm_class()().run_algorithm()
|
|
@ -1,3 +1,7 @@
|
|||
#!/usr/bin/sh
|
||||
sudo -u postgres sh -c "dropdb note_db && psql -c 'CREATE DATABASE note_db OWNER note;'";
|
||||
echo 'reset db';
|
||||
find apps/ -path "*/migrations/*.py*" -not -name "__init__.py" -delete
|
||||
./manage.py makemigrations
|
||||
./manage.py migrate
|
||||
./manage.py loaddata initial
|
||||
|
|
Loading…
Reference in New Issue