first commit
This commit is contained in:
82
hoarder.py
Normal file
82
hoarder.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import re
|
||||
import aiohttp
|
||||
from typing import Type
|
||||
from mautrix.util.config import BaseProxyConfig, ConfigUpdateHelper
|
||||
from maubot import Plugin, MessageEvent
|
||||
from maubot.handlers import event, command
|
||||
from datetime import datetime
|
||||
|
||||
# Setup config file
|
||||
class Config(BaseProxyConfig):
|
||||
def do_update(self, helper: ConfigUpdateHelper) -> None:
|
||||
helper.copy("hoarder_url")
|
||||
helper.copy("hoarder_api_key")
|
||||
helper.copy("command_prefix")
|
||||
|
||||
class HoarderForwarder(Plugin):
|
||||
async def start(self) -> None:
|
||||
await super().start()
|
||||
self.config.load_and_update()
|
||||
|
||||
@classmethod
|
||||
def get_config_class(cls) -> Type[BaseProxyConfig]:
|
||||
return Config
|
||||
|
||||
async def get_page_title(self, url: str) -> str:
|
||||
"""Récupère le titre de la page web à partir de l'URL sans utiliser BeautifulSoup."""
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url) as response:
|
||||
if response.status == 200:
|
||||
html = await response.text()
|
||||
# Rechercher la balise <title>
|
||||
match = re.search(r"<title>(.*?)</title>", html, re.IGNORECASE | re.DOTALL)
|
||||
if match:
|
||||
return match.group(1).strip()
|
||||
else:
|
||||
return "Sans titre"
|
||||
else:
|
||||
return "Titre introuvable"
|
||||
except Exception as e:
|
||||
print(f"Erreur lors de la récupération du titre: {e}")
|
||||
return "Erreur"
|
||||
|
||||
@command.new(
|
||||
"hoarder",
|
||||
)
|
||||
|
||||
@command.argument("url", pass_raw=True, required=True)
|
||||
async def send_to_hoarder(self, evt: MessageEvent, url: str) -> None:
|
||||
await evt.mark_read()
|
||||
|
||||
hoarder_url = self.config["hoarder_url"] + '/api/trpc/bookmarks.createBookmark'
|
||||
api_key = self.config["hoarder_api_key"]
|
||||
|
||||
self.log.info(hoarder_url)
|
||||
#title = await self.get_page_title(url) # Récupère le titre de la page
|
||||
|
||||
if not hoarder_url or not api_key:
|
||||
self.log.warning("L'URL ou la clé API de Hoarder n'est pas configurée.")
|
||||
return
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {api_key}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
payload = {
|
||||
"json": {
|
||||
"type": "link",
|
||||
"url": url
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(hoarder_url, json=payload, headers=headers) as response:
|
||||
if response.status == 201:
|
||||
self.log.info(f"URL envoyée avec succès à Hoarder : {url}")
|
||||
else:
|
||||
self.log.error(f"Échec de l'envoi de l'URL à Hoarder : {response.status}")
|
||||
except Exception as e:
|
||||
self.log.exception(f"Erreur lors de l'envoi de l'URL à Hoarder : {e}")
|
||||
Reference in New Issue
Block a user