Compare commits
No commits in common. "3806feb67fcb1fe822cfdedddbbc4ca7eeef3829" and "47dc4dd9e6742c3fd0f0d4ea654b1e9a0a39f4f7" have entirely different histories.
3806feb67f
...
47dc4dd9e6
|
@ -55,6 +55,6 @@ class Command(BaseCommand):
|
||||||
self.stderr.write(err_log)
|
self.stderr.write(err_log)
|
||||||
if options["mail"]:
|
if options["mail"]:
|
||||||
send_mail("[Note Kfet] La base de données n'est pas consistante", err_log,
|
send_mail("[Note Kfet] La base de données n'est pas consistante", err_log,
|
||||||
"NoteKfet2020 <notekfet2020@crans.org>", ["respo-info.bde@lists.crans.org"])
|
"NoteKfet2020 <notekfet2020@crans.org>", ["respoinfo.bde@lists.crans.org"])
|
||||||
|
|
||||||
exit(1 if error else 0)
|
exit(1 if error else 0)
|
||||||
|
|
|
@ -63,7 +63,6 @@ class Command(ImportCommand):
|
||||||
"pk": pk_activity,
|
"pk": pk_activity,
|
||||||
"name": row["titre"],
|
"name": row["titre"],
|
||||||
"description": row["description"],
|
"description": row["description"],
|
||||||
"location": row["lieu"],
|
|
||||||
"activity_type_id": activity_type_id, # By default Pot
|
"activity_type_id": activity_type_id, # By default Pot
|
||||||
"creater_id": NoteUser.objects.get(pk=note).user.id,
|
"creater_id": NoteUser.objects.get(pk=note).user.id,
|
||||||
"organizer_id": organizer.pk,
|
"organizer_id": organizer.pk,
|
||||||
|
|
|
@ -1,179 +0,0 @@
|
||||||
# Copyright (C) 2018-2020 by BDE ENS Paris-Saclay
|
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
import os
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
from django.core.management import BaseCommand
|
|
||||||
from django.urls import reverse
|
|
||||||
from django.utils import timezone
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from urllib.request import Request, urlopen
|
|
||||||
|
|
||||||
from activity.models import Activity
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
acl_header = "#acl NoteKfet2015:read,write,admin NoteKfet2020:read,write,admin All:read Default\n"
|
|
||||||
|
|
||||||
warning_header = """## NE PAS ÉDITER CETTE PAGE MANUELLEMENT
|
|
||||||
## ELLE EST GÉNÉRÉE AUTOMATIQUEMENT PAR LA NOTE KFET 2020
|
|
||||||
## Adapté par [[WikiYnerant|ÿnérant]] du script de by 20-100, largement inspiré de la version de Barbichu.
|
|
||||||
"""
|
|
||||||
|
|
||||||
intro_generic = """ * Elle est générée automatiquement par la [[NoteKfet/NoteKfet2020|Note Kfet 2020]]
|
|
||||||
* Ne pas éditer cette page manuellement, toute modification sera annulée automatiquement.
|
|
||||||
* Pour annoncer un nouvel événement, rendez-vous sur {activities_url}
|
|
||||||
|
|
||||||
""".format(activities_url="https://" + os.getenv("NOTE_URL") + reverse("activity:activity_list"))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def connection(url):
|
|
||||||
"""Se logue sur le wiki et renvoie le cookie de session"""
|
|
||||||
parameters = {
|
|
||||||
'action': 'login',
|
|
||||||
'login': 'Connexion',
|
|
||||||
'name': os.getenv("WIKI_USER", "NoteKfet2020"),
|
|
||||||
'password': os.getenv("WIKI_PASSWORD"),
|
|
||||||
}
|
|
||||||
# Il faut encoder ça proprement
|
|
||||||
data = urlencode(parameters).encode("utf-8")
|
|
||||||
request = Request(url, data)
|
|
||||||
# La requête est envoyée en HTTP POST
|
|
||||||
response = urlopen(request)
|
|
||||||
# a priori la page elle-même je m'en carre…
|
|
||||||
response.read(2)
|
|
||||||
# …ce qui m'intéresse, c'est le cookie qu'elle me file
|
|
||||||
cookie = response.headers['set-cookie']
|
|
||||||
return cookie
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_edition_ticket(url, cookie):
|
|
||||||
"""Récupère le ticket d'édition de la page"""
|
|
||||||
# On crée la requête d'édition…
|
|
||||||
suffix = "?action=edit&editor=text"
|
|
||||||
request = Request(url + suffix)
|
|
||||||
# …avec le cookie
|
|
||||||
request.add_header("Cookie", cookie)
|
|
||||||
# On l'envoie
|
|
||||||
pagecontent = urlopen(request)
|
|
||||||
html = pagecontent.read()
|
|
||||||
soup = BeautifulSoup(html, features="lxml")
|
|
||||||
# On va chercher le formulaire
|
|
||||||
form = soup.find(name="form", attrs={"id": "editor"})
|
|
||||||
# On récupère le ticket dedans
|
|
||||||
ticket = soup.find(name="input", attrs={"name": "ticket"})
|
|
||||||
return ticket["value"]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def edit_wiki(page, content, comment=''):
|
|
||||||
"""Modifie une page du wiki"""
|
|
||||||
url = "https://wiki.crans.org/" + page
|
|
||||||
|
|
||||||
# On se connecte et on récupère le cookie de session
|
|
||||||
cookie = Command.connection(url)
|
|
||||||
# On demande l'édition et on récupère le ticket d'édition de la page
|
|
||||||
ticket = Command.get_edition_ticket(url, cookie)
|
|
||||||
# On construit la requête
|
|
||||||
data = {
|
|
||||||
'button_save': 'Enregistrer les modifications',
|
|
||||||
'category': '',
|
|
||||||
'comment': comment.encode("utf-8"),
|
|
||||||
'savetext': content.encode("utf-8"),
|
|
||||||
'action': 'edit',
|
|
||||||
'ticket': ticket
|
|
||||||
}
|
|
||||||
request = Request(url, urlencode(data).encode("utf-8"))
|
|
||||||
request.add_header("Cookie", cookie)
|
|
||||||
# On la poste
|
|
||||||
urlopen(request)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def format_activity(act, raw=True):
|
|
||||||
"""Wiki-formate une activité, pour le calendrier raw si ``raw``, pour le human-readable sinon."""
|
|
||||||
if raw:
|
|
||||||
return """== {title} ==
|
|
||||||
start:: {start}
|
|
||||||
end:: {end}
|
|
||||||
description:: {description} -- {club}
|
|
||||||
location:: {location}
|
|
||||||
""".format(
|
|
||||||
title=act.name,
|
|
||||||
start=timezone.localtime(act.date_start).strftime("%Y-%m-%d %H:%M"),
|
|
||||||
end=timezone.localtime(act.date_end).strftime("%Y-%m-%d %H:%M"),
|
|
||||||
description=act.description,
|
|
||||||
club=act.organizer.name,
|
|
||||||
location=act.location,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return "|| {start} || {title} || {description} || {club} || {location} ||".format(
|
|
||||||
title=act.name,
|
|
||||||
start=timezone.localtime(act.date_start).strftime("%d/%m/%Y"),
|
|
||||||
description=act.description,
|
|
||||||
club=act.organizer.name,
|
|
||||||
location=act.location,
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_raw_page():
|
|
||||||
page = "VieBde/PlanningSoirees/LeCalendrier"
|
|
||||||
header = Command.acl_header + Command.warning_header
|
|
||||||
header += """= Introduction =
|
|
||||||
|
|
||||||
* Cette page a pour but de recenser les activités BDE afin d'être signalées sur le calendrier de la
|
|
||||||
[[PageAccueil|page d'accueil]] du wiki.
|
|
||||||
"""
|
|
||||||
header += Command.intro_generic
|
|
||||||
body = "\n".join(Command.format_activity(activity) for activity in Activity.objects.filter(valid=True)
|
|
||||||
.order_by('-date_start').all())
|
|
||||||
footer = "\n----\nCatégorieCalendrierCampus"
|
|
||||||
return page, header + body + footer
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_human_readable_page():
|
|
||||||
page = "VieBde/PlanningSoirees"
|
|
||||||
header = Command.acl_header + Command.warning_header
|
|
||||||
header += """= Planning de soirées =
|
|
||||||
== Introduction ==
|
|
||||||
* Cette page est destinée à accueillir le planning des soirées BDE.
|
|
||||||
"""
|
|
||||||
header += Command.intro_generic + "\n"
|
|
||||||
body = """== Planning des activités à venir ==
|
|
||||||
||'''Date'''||'''Titre'''||'''Description'''||'''Par''' ||'''Lieu'''||
|
|
||||||
"""
|
|
||||||
body += "\n".join(Command.format_activity(activity, False) for activity in Activity.objects
|
|
||||||
.filter(valid=True, date_end__gte=timezone.now()).order_by('-date_start').all())
|
|
||||||
body += """\n\n== Planning des activités passées ==
|
|
||||||
||'''Date'''||'''Titre'''||'''Description'''||'''Par'''||'''Lieu'''||
|
|
||||||
"""
|
|
||||||
body += "\n".join(Command.format_activity(activity, False) for activity in Activity.objects
|
|
||||||
.filter(valid=True, date_end__lt=timezone.now()).order_by('-date_start').all())
|
|
||||||
return page, header + body
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def refresh_raw_wiki_page(comment="refresh", debug=True):
|
|
||||||
page, content = Command.get_raw_page()
|
|
||||||
if debug:
|
|
||||||
print(content)
|
|
||||||
else:
|
|
||||||
Command.edit_wiki(page, content, comment)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def refresh_human_readable_wiki_page(comment="refresh", debug=True):
|
|
||||||
page, content = Command.get_human_readable_page()
|
|
||||||
if debug:
|
|
||||||
print(content)
|
|
||||||
else:
|
|
||||||
Command.edit_wiki(page, content, comment)
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
parser.add_argument("--human", "-H", action="store_true", help="Save human readable page")
|
|
||||||
parser.add_argument("--raw", "-r", action="store_true", help="Save raw page, for the calendar")
|
|
||||||
parser.add_argument("--comment", "-c", action="store", type=str, default="", help="Comment of the modification")
|
|
||||||
parser.add_argument("--debug", "-d", action="store_true", help="Don't commit to the wiki, render in stdout")
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
if options["raw"]:
|
|
||||||
Command.refresh_raw_wiki_page(options["comment"], options["debug"])
|
|
||||||
if options["human"]:
|
|
||||||
Command.refresh_human_readable_wiki_page(options["comment"], options["debug"])
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ class Command(BaseCommand):
|
||||||
activate('fr')
|
activate('fr')
|
||||||
notes = NoteUser.objects.filter(
|
notes = NoteUser.objects.filter(
|
||||||
balance__lte=-options["negative_amount"],
|
balance__lte=-options["negative_amount"],
|
||||||
user__memberships__date_end__gte=timezone.now().date(),
|
user__memberships__date_end__gte=timezone.now(),
|
||||||
).order_by('balance').distinct().all()
|
).order_by('balance').distinct().all()
|
||||||
|
|
||||||
if options["spam"]:
|
if options["spam"]:
|
||||||
|
@ -32,5 +32,5 @@ class Command(BaseCommand):
|
||||||
plain_text = render_to_string("note/mails/negative_notes_report.txt", context=dict(notes=notes))
|
plain_text = render_to_string("note/mails/negative_notes_report.txt", context=dict(notes=notes))
|
||||||
html = render_to_string("note/mails/negative_notes_report.html", context=dict(notes=notes))
|
html = render_to_string("note/mails/negative_notes_report.html", context=dict(notes=notes))
|
||||||
send_mail("[Note Kfet] Liste des négatifs", plain_text, "Note Kfet 2020 <notekfet2020@crans.org>",
|
send_mail("[Note Kfet] Liste des négatifs", plain_text, "Note Kfet 2020 <notekfet2020@crans.org>",
|
||||||
recipient_list=["respo-info.bde@lists.crans.org", "tresorerie.bde@lists.crans.org"],
|
recipient_list=["respoinfo.bde@lists.crans.org", "tresorerie.bde@lists.crans.org"],
|
||||||
html_message=html)
|
html_message=html)
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
# Create backups directory
|
|
||||||
[[ -d /var/www/note_kfet/backups ]] || (mkdir /var/www/note_kfet/backups && chown www-data:www-data /var/www/note_kfet/backups)
|
|
||||||
date=$(date +%Y-%m-%d)
|
|
||||||
# Backup database and save it as tar archive
|
|
||||||
su postgres -c "pg_dump -F t note_db" | tee "/var/www/note_kfet/backups/$date.tar" > /dev/null
|
|
||||||
# Compress backup as gzip
|
|
||||||
gzip "/var/www/note_kfet/backups/$date.tar"
|
|
||||||
chown www-data:www-data "/var/www/note_kfet/backups/$date.tar.gz"
|
|
||||||
# Delete backups that have more than 30 days
|
|
||||||
find /var/www/note_kfet/backups -type f -mtime +30 -exec rm {} \;
|
|
Loading…
Reference in New Issue