mirror of
https://gitlab.crans.org/bde/nk20-scripts
synced 2024-12-02 13:16:56 +00:00
180 lines
7.2 KiB
Python
180 lines
7.2 KiB
Python
# Copyright (C) 2018-2020 by BDE ENS Paris-Saclay
|
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
|
|
import os
|
|
from bs4 import BeautifulSoup
|
|
from django.core.management import BaseCommand
|
|
from django.urls import reverse
|
|
from django.utils import timezone
|
|
from urllib.parse import urlencode
|
|
from urllib.request import Request, urlopen
|
|
|
|
from activity.models import Activity
|
|
|
|
|
|
class Command(BaseCommand):
|
|
acl_header = "#acl NoteKfet2015:read,write,admin NoteKfet2020:read,write,admin All:read Default\n"
|
|
|
|
warning_header = """## NE PAS ÉDITER CETTE PAGE MANUELLEMENT
|
|
## ELLE EST GÉNÉRÉE AUTOMATIQUEMENT PAR LA NOTE KFET 2020
|
|
## Adapté par [[WikiYnerant|ÿnérant]] du script de by 20-100, largement inspiré de la version de Barbichu.
|
|
"""
|
|
|
|
intro_generic = """ * Elle est générée automatiquement par la [[NoteKfet/NoteKfet2020|Note Kfet 2020]]
|
|
* Ne pas éditer cette page manuellement, toute modification sera annulée automatiquement.
|
|
* Pour annoncer un nouvel événement, rendez-vous sur {activities_url}
|
|
|
|
""".format(activities_url="https://" + os.getenv("NOTE_URL") + reverse("activity:activity_list"))
|
|
|
|
@staticmethod
|
|
def connection(url):
|
|
"""Se logue sur le wiki et renvoie le cookie de session"""
|
|
parameters = {
|
|
'action': 'login',
|
|
'login': 'Connexion',
|
|
'name': os.getenv("WIKI_USER", "NoteKfet2020"),
|
|
'password': os.getenv("WIKI_PASSWORD"),
|
|
}
|
|
# Il faut encoder ça proprement
|
|
data = urlencode(parameters).encode("utf-8")
|
|
request = Request(url, data)
|
|
# La requête est envoyée en HTTP POST
|
|
response = urlopen(request)
|
|
# a priori la page elle-même je m'en carre…
|
|
response.read(2)
|
|
# …ce qui m'intéresse, c'est le cookie qu'elle me file
|
|
cookie = response.headers['set-cookie']
|
|
return cookie
|
|
|
|
@staticmethod
|
|
def get_edition_ticket(url, cookie):
|
|
"""Récupère le ticket d'édition de la page"""
|
|
# On crée la requête d'édition…
|
|
suffix = "?action=edit&editor=text"
|
|
request = Request(url + suffix)
|
|
# …avec le cookie
|
|
request.add_header("Cookie", cookie)
|
|
# On l'envoie
|
|
pagecontent = urlopen(request)
|
|
html = pagecontent.read()
|
|
soup = BeautifulSoup(html, features="lxml")
|
|
# On va chercher le formulaire
|
|
form = soup.find(name="form", attrs={"id": "editor"})
|
|
# On récupère le ticket dedans
|
|
ticket = soup.find(name="input", attrs={"name": "ticket"})
|
|
return ticket["value"]
|
|
|
|
@staticmethod
|
|
def edit_wiki(page, content, comment=''):
|
|
"""Modifie une page du wiki"""
|
|
url = "https://wiki.crans.org/" + page
|
|
|
|
# On se connecte et on récupère le cookie de session
|
|
cookie = Command.connection(url)
|
|
# On demande l'édition et on récupère le ticket d'édition de la page
|
|
ticket = Command.get_edition_ticket(url, cookie)
|
|
# On construit la requête
|
|
data = {
|
|
'button_save': 'Enregistrer les modifications',
|
|
'category': '',
|
|
'comment': comment.encode("utf-8"),
|
|
'savetext': content.encode("utf-8"),
|
|
'action': 'edit',
|
|
'ticket': ticket
|
|
}
|
|
request = Request(url, urlencode(data).encode("utf-8"))
|
|
request.add_header("Cookie", cookie)
|
|
# On la poste
|
|
urlopen(request)
|
|
|
|
@staticmethod
|
|
def format_activity(act, raw=True):
|
|
"""Wiki-formate une activité, pour le calendrier raw si ``raw``, pour le human-readable sinon."""
|
|
if raw:
|
|
return """== {title} ==
|
|
start:: {start}
|
|
end:: {end}
|
|
description:: {description} -- {club}
|
|
location:: {location}
|
|
""".format(
|
|
title=act.name,
|
|
start=timezone.localtime(act.date_start).strftime("%Y-%m-%d %H:%M"),
|
|
end=timezone.localtime(act.date_end).strftime("%Y-%m-%d %H:%M"),
|
|
description=act.description,
|
|
club=act.organizer.name,
|
|
location=act.location,
|
|
)
|
|
else:
|
|
return "|| {start} || {title} || {description} || {club} || {location} ||".format(
|
|
title=act.name,
|
|
start=timezone.localtime(act.date_start).strftime("%d/%m/%Y"),
|
|
description=act.description,
|
|
club=act.organizer.name,
|
|
location=act.location,
|
|
)
|
|
|
|
@staticmethod
|
|
def get_raw_page():
|
|
page = "VieBde/PlanningSoirees/LeCalendrier"
|
|
header = Command.acl_header + Command.warning_header
|
|
header += """= Introduction =
|
|
|
|
* Cette page a pour but de recenser les activités BDE afin d'être signalées sur le calendrier de la
|
|
[[PageAccueil|page d'accueil]] du wiki.
|
|
"""
|
|
header += Command.intro_generic
|
|
body = "\n".join(Command.format_activity(activity) for activity in Activity.objects.filter(valid=True)
|
|
.order_by('-date_start').all())
|
|
footer = "\n----\nCatégorieCalendrierCampus"
|
|
return page, header + body + footer
|
|
|
|
@staticmethod
|
|
def get_human_readable_page():
|
|
page = "VieBde/PlanningSoirees"
|
|
header = Command.acl_header + Command.warning_header
|
|
header += """= Planning de soirées =
|
|
== Introduction ==
|
|
* Cette page est destinée à accueillir le planning des soirées BDE.
|
|
"""
|
|
header += Command.intro_generic + "\n"
|
|
body = """== Planning des activités à venir ==
|
|
||'''Date'''||'''Titre'''||'''Description'''||'''Par''' ||'''Lieu'''||
|
|
"""
|
|
body += "\n".join(Command.format_activity(activity, False) for activity in Activity.objects
|
|
.filter(valid=True, date_end__gte=timezone.now()).order_by('-date_start').all())
|
|
body += """\n\n== Planning des activités passées ==
|
|
||'''Date'''||'''Titre'''||'''Description'''||'''Par'''||'''Lieu'''||
|
|
"""
|
|
body += "\n".join(Command.format_activity(activity, False) for activity in Activity.objects
|
|
.filter(valid=True, date_end__lt=timezone.now()).order_by('-date_start').all())
|
|
return page, header + body
|
|
|
|
@staticmethod
|
|
def refresh_raw_wiki_page(comment="refresh", debug=True):
|
|
page, content = Command.get_raw_page()
|
|
if debug:
|
|
print(content)
|
|
else:
|
|
Command.edit_wiki(page, content, comment)
|
|
|
|
@staticmethod
|
|
def refresh_human_readable_wiki_page(comment="refresh", debug=True):
|
|
page, content = Command.get_human_readable_page()
|
|
if debug:
|
|
print(content)
|
|
else:
|
|
Command.edit_wiki(page, content, comment)
|
|
|
|
def add_arguments(self, parser):
|
|
parser.add_argument("--human", "-H", action="store_true", help="Save human readable page")
|
|
parser.add_argument("--raw", "-r", action="store_true", help="Save raw page, for the calendar")
|
|
parser.add_argument("--comment", "-c", action="store", type=str, default="", help="Comment of the modification")
|
|
parser.add_argument("--debug", "-d", action="store_true", help="Don't commit to the wiki, render in stdout")
|
|
|
|
def handle(self, *args, **options):
|
|
if options["raw"]:
|
|
Command.refresh_raw_wiki_page(options["comment"], options["debug"])
|
|
if options["human"]:
|
|
Command.refresh_human_readable_wiki_page(options["comment"], options["debug"])
|
|
|