make alias import work
This commit is contained in:
parent
5fa63f2abe
commit
64dd635039
|
@ -3,6 +3,7 @@
|
||||||
import psycopg2 as pg
|
import psycopg2 as pg
|
||||||
import psycopg2.extras as pge
|
import psycopg2.extras as pge
|
||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
from django.utils.timezone import make_aware, now
|
from django.utils.timezone import make_aware, now
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
|
@ -28,6 +29,10 @@ MAP_IDBDE = {
|
||||||
-1: 3, # Chèque
|
-1: 3, # Chèque
|
||||||
0: 5, # BDE
|
0: 5, # BDE
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# some Aliases have been created in the fixtures
|
||||||
|
ALIAS_SET = {a[0] for a in Alias.objects.all().values_list("normalized_name")}
|
||||||
|
|
||||||
note_user_type = ContentType.objects.get(app_label="note", model="noteuser")
|
note_user_type = ContentType.objects.get(app_label="note", model="noteuser")
|
||||||
note_club_type = ContentType.objects.get(app_label="note", model="noteclub")
|
note_club_type = ContentType.objects.get(app_label="note", model="noteclub")
|
||||||
|
|
||||||
|
@ -55,16 +60,15 @@ class Command(ImportCommand):
|
||||||
n = cur.rowcount
|
n = cur.rowcount
|
||||||
|
|
||||||
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
|
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
|
||||||
pseudo_list = set()
|
|
||||||
for idx, row in enumerate(cur):
|
for idx, row in enumerate(cur):
|
||||||
pseudo = row["pseudo"]
|
pseudo = row["pseudo"]
|
||||||
pseudo_norm = Alias.normalize(pseudo)
|
pseudo_norm = Alias.normalize(pseudo)
|
||||||
self.update_line(idx, n, pseudo)
|
self.update_line(idx, n, pseudo)
|
||||||
# clean pseudo (normalized pseudo must be unique)
|
# clean pseudo (normalized pseudo must be unique)
|
||||||
if pseudo_norm in pseudo_list:
|
if pseudo_norm in ALIAS_SET:
|
||||||
pseudo = pseudo+str(row["idbde"])
|
pseudo = pseudo+str(row["idbde"])
|
||||||
else:
|
else:
|
||||||
pseudo_list.add(pseudo_norm)
|
ALIAS_SET.add(pseudo_norm)
|
||||||
# clean date
|
# clean date
|
||||||
note_dict = {
|
note_dict = {
|
||||||
"pk": pk_note,
|
"pk": pk_note,
|
||||||
|
@ -151,37 +155,42 @@ class Command(ImportCommand):
|
||||||
# row import completed
|
# row import completed
|
||||||
MAP_IDBDE[row["idbde"]] = pk_note
|
MAP_IDBDE[row["idbde"]] = pk_note
|
||||||
pk_note += 1
|
pk_note += 1
|
||||||
|
bulk_mgr.done()
|
||||||
self.print_success("comptes table imported")
|
self.print_success("comptes table imported")
|
||||||
|
|
||||||
def import_alias(self, cur):
|
def import_alias(self, cur, chunk_size):
|
||||||
"""
|
"""
|
||||||
Import Alias from nk15
|
Import Alias from nk15
|
||||||
We rely on validation of the models, but it is slow.
|
We rely on validation of the models, but it is slow.
|
||||||
"""
|
"""
|
||||||
cur.execute("SELECT * FROM aliases ORDER by id")
|
cur.execute("SELECT * FROM aliases ORDER by id")
|
||||||
n = cur.rowcount
|
n = cur.rowcount
|
||||||
|
|
||||||
|
bulk_mgr = BulkCreateManager(chunk_size=chunk_size)
|
||||||
|
pk_alias = Alias.objects.order_by('-id').first().id + 1
|
||||||
for idx, row in enumerate(cur):
|
for idx, row in enumerate(cur):
|
||||||
self.update_line(idx, n, row["alias"])
|
self.update_line(idx, n, row["alias"])
|
||||||
alias_name = row["alias"]
|
alias_name = row["alias"]
|
||||||
alias_name_good = (alias_name[:252] + '...') if len(alias_name) > 255 else alias_name
|
alias_name = (alias_name[:252] + '...') if len(alias_name) > 255 else alias_name
|
||||||
obj_dict = {
|
alias_norm = Alias.normalize(alias_name)
|
||||||
"note_id": MAP_IDBDE[row["idbde"]],
|
# clean pseudo (normalized pseudo must be unique)
|
||||||
"name": alias_name_good,
|
if alias_norm in ALIAS_SET:
|
||||||
"normalized_name": Alias.normalize(alias_name_good),
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
with transaction.atomic():
|
|
||||||
alias, created = Alias.objects.get_or_create(**obj_dict)
|
|
||||||
except IntegrityError as e:
|
|
||||||
if "unique" in e.args[0]:
|
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
raise e
|
print(alias_norm)
|
||||||
alias.save()
|
ALIAS_SET.add(alias_norm)
|
||||||
return None
|
obj_dict = {
|
||||||
|
"pk": pk_alias,
|
||||||
|
"note_id": MAP_IDBDE[row["idbde"]],
|
||||||
|
"name": alias_name,
|
||||||
|
"normalized_name": alias_norm,
|
||||||
|
}
|
||||||
|
pk_alias += 1
|
||||||
|
bulk_mgr.add(Alias(**obj_dict))
|
||||||
|
bulk_mgr.done()
|
||||||
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
def handle(self, *args, **kwargs):
|
||||||
global MAP_IDBDE
|
|
||||||
# default args, provided by ImportCommand.
|
# default args, provided by ImportCommand.
|
||||||
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
|
nk15db, nk15user = kwargs['nk15db'], kwargs['nk15user']
|
||||||
# connecting to nk15 database
|
# connecting to nk15 database
|
||||||
|
@ -189,10 +198,11 @@ class Command(ImportCommand):
|
||||||
cur = conn.cursor(cursor_factory=pge.DictCursor)
|
cur = conn.cursor(cursor_factory=pge.DictCursor)
|
||||||
|
|
||||||
self.import_account(cur,kwargs["chunk"])
|
self.import_account(cur,kwargs["chunk"])
|
||||||
|
# Alias Management
|
||||||
|
if kwargs["alias"]:
|
||||||
|
self.import_alias(cur,kwargs["chunk"])
|
||||||
|
#save to disk
|
||||||
if kwargs["save"]:
|
if kwargs["save"]:
|
||||||
filename = kwargs["save"]
|
filename = kwargs["save"]
|
||||||
with open(filename, 'w') as fp:
|
with open(filename, 'w') as fp:
|
||||||
json.dump(MAP_IDBDE, fp, sort_keys=True, indent=2)
|
json.dump(MAP_IDBDE, fp, sort_keys=True, indent=2)
|
||||||
# Alias Management
|
|
||||||
if kwargs["alias"]:
|
|
||||||
self.impot_alias(cur)
|
|
||||||
|
|
Loading…
Reference in New Issue